Merge "Add AudioTrack and AudioRecord flag checks" into nyc-mr1-dev
diff --git a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
index 1e8744b..96ecfa0 100644
--- a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
+++ b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
@@ -121,4 +121,6 @@
     void tearDown(int streamId);
 
     void prepare2(int maxCount, int streamId);
+
+    void setDeferredConfiguration(int streamId, in OutputConfiguration outputConfiguration);
 }
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index 3247d0d..38e1c01 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -42,9 +42,24 @@
     return mSurfaceSetID;
 }
 
+int OutputConfiguration::getSurfaceType() const {
+    return mSurfaceType;
+}
+
+int OutputConfiguration::getWidth() const {
+    return mWidth;
+}
+
+int OutputConfiguration::getHeight() const {
+    return mHeight;
+}
+
 OutputConfiguration::OutputConfiguration() :
         mRotation(INVALID_ROTATION),
-        mSurfaceSetID(INVALID_SET_ID) {
+        mSurfaceSetID(INVALID_SET_ID),
+        mSurfaceType(SURFACE_TYPE_UNKNOWN),
+        mWidth(0),
+        mHeight(0) {
 }
 
 OutputConfiguration::OutputConfiguration(const Parcel& parcel) :
@@ -70,18 +85,48 @@
         return err;
     }
 
+    int surfaceType = SURFACE_TYPE_UNKNOWN;
+    if ((err = parcel->readInt32(&surfaceType)) != OK) {
+        ALOGE("%s: Failed to read surface type from parcel", __FUNCTION__);
+        return err;
+    }
+
+    int width = 0;
+    if ((err = parcel->readInt32(&width)) != OK) {
+        ALOGE("%s: Failed to read surface width from parcel", __FUNCTION__);
+        return err;
+    }
+
+    int height = 0;
+    if ((err = parcel->readInt32(&height)) != OK) {
+        ALOGE("%s: Failed to read surface height from parcel", __FUNCTION__);
+        return err;
+    }
+
     view::Surface surfaceShim;
     if ((err = surfaceShim.readFromParcel(parcel)) != OK) {
-        ALOGE("%s: Failed to read surface from parcel", __FUNCTION__);
-        return err;
+        // Read surface failure for deferred surface configuration is expected.
+        if (surfaceType == SURFACE_TYPE_SURFACE_VIEW ||
+                surfaceType == SURFACE_TYPE_SURFACE_TEXTURE) {
+            ALOGV("%s: Get null surface from a deferred surface configuration (%dx%d)",
+                    __FUNCTION__, width, height);
+            err = OK;
+        } else {
+            ALOGE("%s: Failed to read surface from parcel", __FUNCTION__);
+            return err;
+        }
     }
 
     mGbp = surfaceShim.graphicBufferProducer;
     mRotation = rotation;
     mSurfaceSetID = setID;
+    mSurfaceType = surfaceType;
+    mWidth = width;
+    mHeight = height;
 
-    ALOGV("%s: OutputConfiguration: bp = %p, name = %s, rotation = %d, setId = %d", __FUNCTION__,
-            mGbp.get(), String8(surfaceShim.name).string(), mRotation, mSurfaceSetID);
+    ALOGV("%s: OutputConfiguration: bp = %p, name = %s, rotation = %d, setId = %d,"
+            "surfaceType = %d", __FUNCTION__, mGbp.get(), String8(surfaceShim.name).string(),
+            mRotation, mSurfaceSetID, mSurfaceType);
 
     return err;
 }
@@ -104,6 +149,15 @@
     err = parcel->writeInt32(mSurfaceSetID);
     if (err != OK) return err;
 
+    err = parcel->writeInt32(mSurfaceType);
+    if (err != OK) return err;
+
+    err = parcel->writeInt32(mWidth);
+    if (err != OK) return err;
+
+    err = parcel->writeInt32(mHeight);
+    if (err != OK) return err;
+
     view::Surface surfaceShim;
     surfaceShim.name = String16("unknown_name"); // name of surface
     surfaceShim.graphicBufferProducer = mGbp;
diff --git a/include/camera/camera2/OutputConfiguration.h b/include/camera/camera2/OutputConfiguration.h
index 72a3753..cf8f3c6 100644
--- a/include/camera/camera2/OutputConfiguration.h
+++ b/include/camera/camera2/OutputConfiguration.h
@@ -33,10 +33,17 @@
 
     static const int INVALID_ROTATION;
     static const int INVALID_SET_ID;
+    enum SurfaceType{
+        SURFACE_TYPE_UNKNOWN = -1,
+        SURFACE_TYPE_SURFACE_VIEW = 0,
+        SURFACE_TYPE_SURFACE_TEXTURE = 1
+    };
     sp<IGraphicBufferProducer> getGraphicBufferProducer() const;
     int                        getRotation() const;
     int                        getSurfaceSetID() const;
-
+    int                        getSurfaceType() const;
+    int                        getWidth() const;
+    int                        getHeight() const;
     /**
      * Keep impl up-to-date with OutputConfiguration.java in frameworks/base
      */
@@ -60,7 +67,10 @@
     bool operator == (const OutputConfiguration& other) const {
         return (mGbp == other.mGbp &&
                 mRotation == other.mRotation &&
-                mSurfaceSetID == other.mSurfaceSetID);
+                mSurfaceSetID == other.mSurfaceSetID &&
+                mSurfaceType == other.mSurfaceType &&
+                mWidth == other.mWidth &&
+                mHeight == other.mHeight);
     }
     bool operator != (const OutputConfiguration& other) const {
         return !(*this == other);
@@ -71,6 +81,16 @@
         if (mSurfaceSetID != other.mSurfaceSetID) {
             return mSurfaceSetID < other.mSurfaceSetID;
         }
+        if (mSurfaceType != other.mSurfaceType) {
+            return mSurfaceType < other.mSurfaceType;
+        }
+        if (mWidth != other.mWidth) {
+            return mWidth < other.mWidth;
+        }
+        if (mHeight != other.mHeight) {
+            return mHeight < other.mHeight;
+        }
+
         return mRotation < other.mRotation;
     }
     bool operator > (const OutputConfiguration& other) const {
@@ -81,6 +101,9 @@
     sp<IGraphicBufferProducer> mGbp;
     int                        mRotation;
     int                        mSurfaceSetID;
+    int                        mSurfaceType;
+    int                        mWidth;
+    int                        mHeight;
     // helper function
     static String16 readMaybeEmptyString16(const Parcel* parcel);
 };
diff --git a/include/camera/ndk/NdkCameraMetadataTags.h b/include/camera/ndk/NdkCameraMetadataTags.h
index 7c0ff5d..e7f6989 100644
--- a/include/camera/ndk/NdkCameraMetadataTags.h
+++ b/include/camera/ndk/NdkCameraMetadataTags.h
@@ -3871,7 +3871,7 @@
      * camera device, and an identity lens shading map data will be provided
      * if <code>ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE == ON</code>. For example, for lens
      * shading map with size of <code>[ 4, 3 ]</code>,
-     * the output ACAMERA_STATISTICS_LENS_SHADING_CORRECTION_MAP for this case will be an identity
+     * the output android.statistics.lensShadingCorrectionMap for this case will be an identity
      * map shown below:</p>
      * <pre><code>[ 1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
      *  1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
@@ -3883,7 +3883,7 @@
      * <p>When set to other modes, lens shading correction will be applied by the camera
      * device. Applications can request lens shading map data by setting
      * ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE to ON, and then the camera device will provide lens
-     * shading map data in ACAMERA_STATISTICS_LENS_SHADING_CORRECTION_MAP; the returned shading map
+     * shading map data in android.statistics.lensShadingCorrectionMap; the returned shading map
      * data will be the one applied by the camera device for this capture request.</p>
      * <p>The shading map data may depend on the auto-exposure (AE) and AWB statistics, therefore
      * the reliability of the map data may be affected by the AE and AWB algorithms. When AE and
@@ -3893,7 +3893,6 @@
      *
      * @see ACAMERA_CONTROL_AE_MODE
      * @see ACAMERA_CONTROL_AWB_MODE
-     * @see ACAMERA_STATISTICS_LENS_SHADING_CORRECTION_MAP
      * @see ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE
      */
     ACAMERA_SHADING_MODE =                                      // byte (enum)
@@ -4020,57 +4019,6 @@
             ACAMERA_STATISTICS_START + 7,
     /**
      * <p>The shading map is a low-resolution floating-point map
-     * that lists the coefficients used to correct for vignetting, for each
-     * Bayer color channel.</p>
-     *
-     * <p>This tag may appear in:</p>
-     * <ul>
-     *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
-     * </ul>
-     *
-     * <p>The least shaded section of the image should have a gain factor
-     * of 1; all other sections should have gains above 1.</p>
-     * <p>When ACAMERA_COLOR_CORRECTION_MODE = TRANSFORM_MATRIX, the map
-     * must take into account the colorCorrection settings.</p>
-     * <p>The shading map is for the entire active pixel array, and is not
-     * affected by the crop region specified in the request. Each shading map
-     * entry is the value of the shading compensation map over a specific
-     * pixel on the sensor.  Specifically, with a (N x M) resolution shading
-     * map, and an active pixel array size (W x H), shading map entry
-     * (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at
-     * pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels.
-     * The map is assumed to be bilinearly interpolated between the sample points.</p>
-     * <p>The channel order is [R, Geven, Godd, B], where Geven is the green
-     * channel for the even rows of a Bayer pattern, and Godd is the odd rows.
-     * The shading map is stored in a fully interleaved format.</p>
-     * <p>The shading map should have on the order of 30-40 rows and columns,
-     * and must be smaller than 64x64.</p>
-     * <p>As an example, given a very small map defined as:</p>
-     * <pre><code>width,height = [ 4, 3 ]
-     * values =
-     * [ 1.3, 1.2, 1.15, 1.2,  1.2, 1.2, 1.15, 1.2,
-     *     1.1, 1.2, 1.2, 1.2,  1.3, 1.2, 1.3, 1.3,
-     *   1.2, 1.2, 1.25, 1.1,  1.1, 1.1, 1.1, 1.0,
-     *     1.0, 1.0, 1.0, 1.0,  1.2, 1.3, 1.25, 1.2,
-     *   1.3, 1.2, 1.2, 1.3,   1.2, 1.15, 1.1, 1.2,
-     *     1.2, 1.1, 1.0, 1.2,  1.3, 1.15, 1.2, 1.3 ]
-     * </code></pre>
-     * <p>The low-resolution scaling map images for each channel are
-     * (displayed using nearest-neighbor interpolation):</p>
-     * <p><img alt="Red lens shading map" src="../images/camera2/metadata/android.statistics.lensShadingMap/red_shading.png" />
-     * <img alt="Green (even rows) lens shading map" src="../images/camera2/metadata/android.statistics.lensShadingMap/green_e_shading.png" />
-     * <img alt="Green (odd rows) lens shading map" src="../images/camera2/metadata/android.statistics.lensShadingMap/green_o_shading.png" />
-     * <img alt="Blue lens shading map" src="../images/camera2/metadata/android.statistics.lensShadingMap/blue_shading.png" /></p>
-     * <p>As a visualization only, inverting the full-color map to recover an
-     * image of a gray wall (using bicubic interpolation for visual quality) as captured by the sensor gives:</p>
-     * <p><img alt="Image of a uniform white wall (inverse shading map)" src="../images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png" /></p>
-     *
-     * @see ACAMERA_COLOR_CORRECTION_MODE
-     */
-    ACAMERA_STATISTICS_LENS_SHADING_CORRECTION_MAP =            // byte
-            ACAMERA_STATISTICS_START + 10,
-    /**
-     * <p>The shading map is a low-resolution floating-point map
      * that lists the coefficients used to correct for vignetting and color shading,
      * for each Bayer color channel of RAW image data.</p>
      *
@@ -4079,20 +4027,21 @@
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      * </ul>
      *
-     * <p>The lens shading correction is defined as a full shading correction that
-     * corrects both color shading for the output non-RAW images. After the
-     * shading map is applied, the output non-RAW images will be flat-field images
-     * for flat scenes under uniform illumination.</p>
-     * <p>When there is no lens shading correction applied to RAW output images
-     * (ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED <code>==</code> false), this map is a full lens
-     * shading correction map; when there is some lens shading correction applied
-     * to the RAW output image (ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED <code>==</code> true),
-     * this map reports the remaining lens shading correction map that needs to be
-     * applied to get fully shading corrected images.</p>
-     * <p>For a full shading correction map, the least shaded section of the image
-     * should have a gain factor of 1; all other sections should have gains above 1.</p>
+     * <p>The map provided here is the same map that is used by the camera device to
+     * correct both color shading and vignetting for output non-RAW images.</p>
+     * <p>When there is no lens shading correction applied to RAW
+     * output images (ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED <code>==</code>
+     * false), this map is the complete lens shading correction
+     * map; when there is some lens shading correction applied to
+     * the RAW output image (ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED<code>==</code> true), this map reports the remaining lens shading
+     * correction map that needs to be applied to get shading
+     * corrected images that match the camera device's output for
+     * non-RAW formats.</p>
+     * <p>For a complete shading correction map, the least shaded
+     * section of the image will have a gain factor of 1; all
+     * other sections will have gains above 1.</p>
      * <p>When ACAMERA_COLOR_CORRECTION_MODE = TRANSFORM_MATRIX, the map
-     * must take into account the colorCorrection settings.</p>
+     * will take into account the colorCorrection settings.</p>
      * <p>The shading map is for the entire active pixel array, and is not
      * affected by the crop region specified in the request. Each shading map
      * entry is the value of the shading compensation map over a specific
@@ -4105,8 +4054,8 @@
      * channel for the even rows of a Bayer pattern, and Godd is the odd rows.
      * The shading map is stored in a fully interleaved format, and its size
      * is provided in the camera static metadata by ACAMERA_LENS_INFO_SHADING_MAP_SIZE.</p>
-     * <p>The shading map should have on the order of 30-40 rows and columns,
-     * and must be smaller than 64x64.</p>
+     * <p>The shading map will generally have on the order of 30-40 rows and columns,
+     * and will be smaller than 64x64.</p>
      * <p>As an example, given a very small map defined as:</p>
      * <pre><code>ACAMERA_LENS_INFO_SHADING_MAP_SIZE = [ 4, 3 ]
      * ACAMERA_STATISTICS_LENS_SHADING_MAP =
diff --git a/include/media/AudioTimestamp.h b/include/media/AudioTimestamp.h
index 44d6c0b..498de8e 100644
--- a/include/media/AudioTimestamp.h
+++ b/include/media/AudioTimestamp.h
@@ -34,7 +34,7 @@
     struct timespec mTime;     // corresponding CLOCK_MONOTONIC when frame is expected to present
 };
 
-struct ExtendedTimestamp {
+struct alignas(8) /* bug 29096183, bug 29108507 */ ExtendedTimestamp {
     enum Location {
         LOCATION_INVALID = -1,
         // Locations in the audio playback / record pipeline.
diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk
index 6586f41..2bdfd43 100644
--- a/media/libmedia/Android.mk
+++ b/media/libmedia/Android.mk
@@ -49,6 +49,7 @@
     IResourceManagerService.cpp \
     IStreamSource.cpp \
     MediaCodecInfo.cpp \
+    MediaUtils.cpp \
     Metadata.cpp \
     mediarecorder.cpp \
     IMediaMetadataRetriever.cpp \
diff --git a/media/libmedia/MediaUtils.cpp b/media/libmedia/MediaUtils.cpp
new file mode 100644
index 0000000..a02ca65
--- /dev/null
+++ b/media/libmedia/MediaUtils.cpp
@@ -0,0 +1,74 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MediaUtils"
+#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include <cutils/properties.h>
+#include <sys/resource.h>
+#include <unistd.h>
+
+#include "MediaUtils.h"
+
+namespace android {
+
+void limitProcessMemory(
+    const char *property,
+    size_t numberOfBytes,
+    size_t percentageOfTotalMem) {
+
+    long pageSize = sysconf(_SC_PAGESIZE);
+    long numPages = sysconf(_SC_PHYS_PAGES);
+    size_t maxMem = SIZE_MAX;
+
+    if (pageSize > 0 && numPages > 0) {
+        if (size_t(numPages) < SIZE_MAX / size_t(pageSize)) {
+            maxMem = size_t(numPages) * size_t(pageSize);
+        }
+        ALOGV("physMem: %zu", maxMem);
+        if (percentageOfTotalMem > 100) {
+            ALOGW("requested %zu%% of total memory, using 100%%", percentageOfTotalMem);
+            percentageOfTotalMem = 100;
+        }
+        maxMem = maxMem / 100 * percentageOfTotalMem;
+        if (numberOfBytes < maxMem) {
+            maxMem = numberOfBytes;
+        }
+        ALOGV("requested limit: %zu", maxMem);
+    } else {
+        ALOGW("couldn't determine total RAM");
+    }
+
+    int64_t propVal = property_get_int64(property, maxMem);
+    if (propVal > 0 && uint64_t(propVal) <= SIZE_MAX) {
+        maxMem = propVal;
+    }
+    ALOGV("actual limit: %zu", maxMem);
+
+    struct rlimit limit;
+    getrlimit(RLIMIT_AS, &limit);
+    ALOGV("original limits: %lld/%lld", (long long)limit.rlim_cur, (long long)limit.rlim_max);
+    limit.rlim_cur = maxMem;
+    setrlimit(RLIMIT_AS, &limit);
+    limit.rlim_cur = -1;
+    limit.rlim_max = -1;
+    getrlimit(RLIMIT_AS, &limit);
+    ALOGV("new limits: %lld/%lld", (long long)limit.rlim_cur, (long long)limit.rlim_max);
+
+}
+
+} // namespace android
diff --git a/media/libmedia/MediaUtils.h b/media/libmedia/MediaUtils.h
new file mode 100644
index 0000000..f80dd30
--- /dev/null
+++ b/media/libmedia/MediaUtils.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MEDIA_UTILS_H
+#define _MEDIA_UTILS_H
+
+namespace android {
+
+/**
+   Limit the amount of memory a process can allocate using setrlimit(RLIMIT_AS).
+   The value to use will be read from the specified system property, or if the
+   property doesn't exist it will use the specified number of bytes or the
+   specified percentage of total memory, whichever is smaller.
+*/
+void limitProcessMemory(
+    const char *property,
+    size_t numberOfBytes,
+    size_t percentageOfTotalMem);
+
+}   // namespace android
+
+#endif  // _MEDIA_UTILS_H
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
index e88dfa8..5723e0e 100644
--- a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
+++ b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
@@ -26,6 +26,7 @@
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/MediaErrors.h>
+#include <utils/misc.h>
 
 #include <math.h>
 
@@ -55,6 +56,14 @@
     params->nVersion.s.nStep = 0;
 }
 
+static const OMX_U32 kSupportedProfiles[] = {
+    OMX_AUDIO_AACObjectLC,
+    OMX_AUDIO_AACObjectHE,
+    OMX_AUDIO_AACObjectHE_PS,
+    OMX_AUDIO_AACObjectLD,
+    OMX_AUDIO_AACObjectELD,
+};
+
 SoftAAC2::SoftAAC2(
         const char *name,
         const OMX_CALLBACKTYPE *callbacks,
@@ -207,7 +216,7 @@
 
 OMX_ERRORTYPE SoftAAC2::internalGetParameter(
         OMX_INDEXTYPE index, OMX_PTR params) {
-    switch (index) {
+    switch ((OMX_U32) index) {
         case OMX_IndexParamAudioAac:
         {
             OMX_AUDIO_PARAM_AACPROFILETYPE *aacParams =
@@ -283,6 +292,29 @@
             return OMX_ErrorNone;
         }
 
+        case OMX_IndexParamAudioProfileQuerySupported:
+        {
+            OMX_AUDIO_PARAM_ANDROID_PROFILETYPE *profileParams =
+                (OMX_AUDIO_PARAM_ANDROID_PROFILETYPE *)params;
+
+            if (!isValidOMXParam(profileParams)) {
+                return OMX_ErrorBadParameter;
+            }
+
+            if (profileParams->nPortIndex != 0) {
+                return OMX_ErrorUndefined;
+            }
+
+            if (profileParams->nProfileIndex >= NELEM(kSupportedProfiles)) {
+                return OMX_ErrorNoMore;
+            }
+
+            profileParams->eProfile =
+                kSupportedProfiles[profileParams->nProfileIndex];
+
+            return OMX_ErrorNone;
+        }
+
         default:
             return SimpleSoftOMXComponent::internalGetParameter(index, params);
     }
diff --git a/media/libstagefright/codecs/amrwbenc/Android.mk b/media/libstagefright/codecs/amrwbenc/Android.mk
index 77a7b1e..026006e 100644
--- a/media/libstagefright/codecs/amrwbenc/Android.mk
+++ b/media/libstagefright/codecs/amrwbenc/Android.mk
@@ -80,7 +80,8 @@
         src/asm/ARMV7/Syn_filt_32_neon.s \
         src/asm/ARMV7/syn_filt_neon.s
 
-    LOCAL_CFLAGS_arm := -DARM -DARMV7 -DASM_OPT
+    # don't actually generate neon instructions, see bug 26932980
+    LOCAL_CFLAGS_arm := -DARM -DARMV7 -DASM_OPT -mfpu=vfpv3
     LOCAL_C_INCLUDES_arm := $(LOCAL_PATH)/src/asm/ARMV5E
     LOCAL_C_INCLUDES_arm += $(LOCAL_PATH)/src/asm/ARMV7
 endif
@@ -102,7 +103,7 @@
 
 LOCAL_CFLAGS += -Werror
 LOCAL_CLANG := true
-LOCAL_SANITIZE := signed-integer-overflow
+#LOCAL_SANITIZE := signed-integer-overflow
 
 include $(BUILD_STATIC_LIBRARY)
 
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h
index bef9c0a..f9a9ab9 100644
--- a/media/libstagefright/rtsp/MyHandler.h
+++ b/media/libstagefright/rtsp/MyHandler.h
@@ -64,6 +64,10 @@
 
 static int64_t kPauseDelayUs = 3000000ll;
 
+// The allowed maximum number of stale access units at the beginning of
+// a new sequence.
+static int32_t kMaxAllowedStaleAccessUnits = 20;
+
 namespace android {
 
 static bool GetAttribute(const char *s, const char *key, AString *value) {
@@ -1049,14 +1053,24 @@
                 }
 
                 if (track->mNewSegment) {
-                    // The sequence number from RTP packet has only 16 bits and is adjusted
-                    // by the client. Only the low 16 bits of seq in RTP-Info of reply of
-                    // RTSP "PLAY" command should be used to detect the first RTP patcket
+                    // The sequence number from RTP packet has only 16 bits and is extended
+                    // by ARTPSource. Only the low 16 bits of seq in RTP-Info of reply of
+                    // RTSP "PLAY" command should be used to detect the first RTP packet
                     // after seeking.
-                    if ((((seqNum ^ track->mFirstSeqNumInSegment) & 0xffff) != 0)) {
-                        // Not the first rtp packet of the stream after seeking, discarding.
-                        ALOGV("discarding stale access unit (0x%x : 0x%x)",
-                             seqNum, track->mFirstSeqNumInSegment);
+                    if (track->mAllowedStaleAccessUnits > 0) {
+                        if ((((seqNum ^ track->mFirstSeqNumInSegment) & 0xffff) != 0)) {
+                            // Not the first rtp packet of the stream after seeking, discarding.
+                            track->mAllowedStaleAccessUnits--;
+                            ALOGV("discarding stale access unit (0x%x : 0x%x)",
+                                 seqNum, track->mFirstSeqNumInSegment);
+                            break;
+                        }
+                    } else { // track->mAllowedStaleAccessUnits <= 0
+                        mNumAccessUnitsReceived = 0;
+                        ALOGW_IF(track->mAllowedStaleAccessUnits == 0,
+                             "Still no first rtp packet after %d stale ones",
+                             kMaxAllowedStaleAccessUnits);
+                        track->mAllowedStaleAccessUnits = -1;
                         break;
                     }
 
@@ -1516,6 +1530,7 @@
             TrackInfo *info = &mTracks.editItemAt(trackIndex);
             info->mFirstSeqNumInSegment = seq;
             info->mNewSegment = true;
+            info->mAllowedStaleAccessUnits = kMaxAllowedStaleAccessUnits;
 
             CHECK(GetAttribute((*it).c_str(), "rtptime", &val));
 
@@ -1559,6 +1574,7 @@
         bool mUsingInterleavedTCP;
         uint32_t mFirstSeqNumInSegment;
         bool mNewSegment;
+        int32_t mAllowedStaleAccessUnits;
 
         uint32_t mRTPAnchor;
         int64_t mNTPAnchorUs;
@@ -1642,6 +1658,7 @@
         info->mUsingInterleavedTCP = false;
         info->mFirstSeqNumInSegment = 0;
         info->mNewSegment = true;
+        info->mAllowedStaleAccessUnits = kMaxAllowedStaleAccessUnits;
         info->mRTPSocket = -1;
         info->mRTCPSocket = -1;
         info->mRTPAnchor = 0;
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_input_source.pfw b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_input_source.pfw
index 9fd459a..a990879 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_input_source.pfw
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_input_source.pfw
@@ -242,6 +242,7 @@
 				loopback = 0
 				ip = 0
 				bus = 0
+				stub = 0
 
 	domain: DefaultAndMic
 		conf: A2dp
@@ -380,7 +381,7 @@
 				back_mic = 0
 				builtin_mic = 0
 
-	domain: VoiceRecognitionAndHotword
+	domain: VoiceRecognitionAndUnprocessedAndHotword
 		conf: ScoHeadset
 			ForceUseForRecord Is ForceBtSco
 			AvailableInputDevices Includes BluetoothScoHeadset
@@ -391,6 +392,11 @@
 					wired_headset = 0
 					usb_device = 0
 					builtin_mic = 0
+				component: unprocessed/applicable_input_device/mask
+					bluetooth_sco_headset = 1
+					wired_headset = 0
+					usb_device = 0
+					builtin_mic = 0
 				component: hotword/applicable_input_device/mask
 					bluetooth_sco_headset = 1
 					wired_headset = 0
@@ -406,6 +412,11 @@
 					wired_headset = 1
 					usb_device = 0
 					builtin_mic = 0
+				component: unprocessed/applicable_input_device/mask
+					bluetooth_sco_headset = 0
+					wired_headset = 1
+					usb_device = 0
+					builtin_mic = 0
 				component: hotword/applicable_input_device/mask
 					bluetooth_sco_headset = 0
 					wired_headset = 1
@@ -421,6 +432,11 @@
 					wired_headset = 0
 					usb_device = 1
 					builtin_mic = 0
+				component: unprocessed/applicable_input_device/mask
+					bluetooth_sco_headset = 0
+					wired_headset = 0
+					usb_device = 1
+					builtin_mic = 0
 				component: hotword/applicable_input_device/mask
 					bluetooth_sco_headset = 0
 					wired_headset = 0
@@ -436,6 +452,11 @@
 					wired_headset = 0
 					usb_device = 0
 					builtin_mic = 1
+				component: unprocessed/applicable_input_device/mask
+					bluetooth_sco_headset = 0
+					wired_headset = 0
+					usb_device = 0
+					builtin_mic = 1
 				component: hotword/applicable_input_device/mask
 					bluetooth_sco_headset = 0
 					wired_headset = 0
@@ -449,6 +470,11 @@
 					wired_headset = 0
 					usb_device = 0
 					builtin_mic = 0
+				component: unprocessed/applicable_input_device/mask
+					bluetooth_sco_headset = 0
+					wired_headset = 0
+					usb_device = 0
+					builtin_mic = 0
 				component: hotword/applicable_input_device/mask
 					bluetooth_sco_headset = 0
 					wired_headset = 0
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index dbec34e..a7fe5e7 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -338,13 +338,15 @@
 
     status_t err = mDevice->configureStreams(isConstrainedHighSpeed);
     if (err == BAD_VALUE) {
-        res = STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
-                "Camera %d: Unsupported set of inputs/outputs provided",
+        String8 msg = String8::format("Camera %d: Unsupported set of inputs/outputs provided",
                 mCameraId);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        res = STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
     } else if (err != OK) {
-        res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
-                "Camera %d: Error configuring streams: %s (%d)",
+        String8 msg = String8::format("Camera %d: Error configuring streams: %s (%d)",
                 mCameraId, strerror(-err), err);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
     }
 
     return res;
@@ -365,6 +367,7 @@
 
     bool isInput = false;
     ssize_t index = NAME_NOT_FOUND;
+    ssize_t dIndex = NAME_NOT_FOUND;
 
     if (mInputStream.configured && mInputStream.id == streamId) {
         isInput = true;
@@ -378,10 +381,19 @@
         }
 
         if (index == NAME_NOT_FOUND) {
-            String8 msg = String8::format("Camera %d: Invalid stream ID (%d) specified, no such "
-                    "stream created yet", mCameraId, streamId);
-            ALOGW("%s: %s", __FUNCTION__, msg.string());
-            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+            // See if this stream is one of the deferred streams.
+            for (size_t i = 0; i < mDeferredStreams.size(); ++i) {
+                if (streamId == mDeferredStreams[i]) {
+                    dIndex = i;
+                    break;
+                }
+            }
+            if (dIndex == NAME_NOT_FOUND) {
+                String8 msg = String8::format("Camera %d: Invalid stream ID (%d) specified, no such"
+                        " stream created yet", mCameraId, streamId);
+                ALOGW("%s: %s", __FUNCTION__, msg.string());
+                return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+            }
         }
     }
 
@@ -396,8 +408,10 @@
     } else {
         if (isInput) {
             mInputStream.configured = false;
-        } else {
+        } else if (index != NAME_NOT_FOUND) {
             mStreamMap.removeItemsAt(index);
+        } else {
+            mDeferredStreams.removeItemsAt(dIndex);
         }
     }
 
@@ -416,14 +430,30 @@
     Mutex::Autolock icl(mBinderSerializationLock);
 
     sp<IGraphicBufferProducer> bufferProducer = outputConfiguration.getGraphicBufferProducer();
-    if (bufferProducer == NULL) {
-        ALOGE("%s: bufferProducer must not be null", __FUNCTION__);
+    bool deferredConsumer = bufferProducer == NULL;
+    int surfaceType = outputConfiguration.getSurfaceType();
+    bool validSurfaceType = ((surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) ||
+            (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE));
+    if (deferredConsumer && !validSurfaceType) {
+        ALOGE("%s: Target surface is invalid: bufferProducer = %p, surfaceType = %d.",
+                __FUNCTION__, bufferProducer.get(), surfaceType);
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Target Surface is invalid");
     }
+
     if (!mDevice.get()) {
         return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
     }
 
+    int width, height, format;
+    int32_t consumerUsage;
+    android_dataspace dataSpace;
+    status_t err;
+
+    // Create stream for deferred surface case.
+    if (deferredConsumer) {
+        return createDeferredSurfaceStreamLocked(outputConfiguration, newStreamId);
+    }
+
     // Don't create multiple streams for the same target surface
     {
         ssize_t index = mStreamMap.indexOfKey(IInterface::asBinder(bufferProducer));
@@ -435,13 +465,10 @@
         }
     }
 
-    status_t err;
-
     // HACK b/10949105
     // Query consumer usage bits to set async operation mode for
     // GLConsumer using controlledByApp parameter.
     bool useAsync = false;
-    int32_t consumerUsage;
     if ((err = bufferProducer->query(NATIVE_WINDOW_CONSUMER_USAGE_BITS,
             &consumerUsage)) != OK) {
         String8 msg = String8::format("Camera %d: Failed to query Surface consumer usage: %s (%d)",
@@ -450,8 +477,8 @@
         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
     }
     if (consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) {
-        ALOGW("%s: Camera %d: Forcing asynchronous mode for stream",
-                __FUNCTION__, mCameraId);
+        ALOGW("%s: Camera %d with consumer usage flag: 0x%x: Forcing asynchronous mode for stream",
+                __FUNCTION__, mCameraId, consumerUsage);
         useAsync = true;
     }
 
@@ -467,9 +494,6 @@
     sp<Surface> surface = new Surface(bufferProducer, useAsync);
     ANativeWindow *anw = surface.get();
 
-    int width, height, format;
-    android_dataspace dataSpace;
-
     if ((err = anw->query(anw, NATIVE_WINDOW_WIDTH, &width)) != OK) {
         String8 msg = String8::format("Camera %d: Failed to query Surface width: %s (%d)",
                 mCameraId, strerror(-err), err);
@@ -526,29 +550,12 @@
     } else {
         mStreamMap.add(binder, streamId);
 
-        ALOGV("%s: Camera %d: Successfully created a new stream ID %d",
-              __FUNCTION__, mCameraId, streamId);
+        ALOGV("%s: Camera %d: Successfully created a new stream ID %d for output surface"
+                " (%d x %d) with format 0x%x.",
+              __FUNCTION__, mCameraId, streamId, width, height, format);
 
-        /**
-         * Set the stream transform flags to automatically
-         * rotate the camera stream for preview use cases.
-         */
-        int32_t transform = 0;
-        err = getRotationTransformLocked(&transform);
-
-        if (err != OK) {
-            // Error logged by getRotationTransformLocked.
-            return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION,
-                    "Unable to calculate rotation transform for new stream");
-        }
-
-        err = mDevice->setStreamTransform(streamId, transform);
-        if (err != OK) {
-            String8 msg = String8::format("Failed to set stream transform (stream id %d)",
-                    streamId);
-            ALOGE("%s: %s", __FUNCTION__, msg.string());
-            return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
-        }
+        // Set transform flags to ensure preview to be rotated correctly.
+        res = setStreamTransformLocked(streamId);
 
         *newStreamId = streamId;
     }
@@ -556,6 +563,84 @@
     return res;
 }
 
+binder::Status CameraDeviceClient::createDeferredSurfaceStreamLocked(
+        const hardware::camera2::params::OutputConfiguration &outputConfiguration,
+        /*out*/
+        int* newStreamId) {
+    int width, height, format, surfaceType;
+    int32_t consumerUsage;
+    android_dataspace dataSpace;
+    status_t err;
+    binder::Status res;
+
+    if (!mDevice.get()) {
+        return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
+    }
+
+    // Infer the surface info for deferred surface stream creation.
+    width = outputConfiguration.getWidth();
+    height = outputConfiguration.getHeight();
+    surfaceType = outputConfiguration.getSurfaceType();
+    format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+    dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
+    // Hardcode consumer usage flags: SurfaceView--0x900, SurfaceTexture--0x100.
+    consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE;
+    if (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) {
+        consumerUsage |= GraphicBuffer::USAGE_HW_COMPOSER;
+    }
+    int streamId = camera3::CAMERA3_STREAM_ID_INVALID;
+    err = mDevice->createStream(/*surface*/nullptr, width, height, format, dataSpace,
+            static_cast<camera3_stream_rotation_t>(outputConfiguration.getRotation()),
+            &streamId, outputConfiguration.getSurfaceSetID(), consumerUsage);
+
+    if (err != OK) {
+        res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
+                "Camera %d: Error creating output stream (%d x %d, fmt %x, dataSpace %x): %s (%d)",
+                mCameraId, width, height, format, dataSpace, strerror(-err), err);
+    } else {
+        // Can not add streamId to mStreamMap here, as the surface is deferred. Add it to
+        // a separate list to track. Once the deferred surface is set, this id will be
+        // relocated to mStreamMap.
+        mDeferredStreams.push_back(streamId);
+
+        ALOGV("%s: Camera %d: Successfully created a new stream ID %d for a deferred surface"
+                " (%d x %d) stream with format 0x%x.",
+              __FUNCTION__, mCameraId, streamId, width, height, format);
+
+        // Set transform flags to ensure preview to be rotated correctly.
+        res = setStreamTransformLocked(streamId);
+
+        *newStreamId = streamId;
+    }
+    return res;
+}
+
+binder::Status CameraDeviceClient::setStreamTransformLocked(int streamId) {
+    int32_t transform = 0;
+    status_t err;
+    binder::Status res;
+
+    if (!mDevice.get()) {
+        return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
+    }
+
+    err = getRotationTransformLocked(&transform);
+    if (err != OK) {
+        // Error logged by getRotationTransformLocked.
+        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION,
+                "Unable to calculate rotation transform for new stream");
+    }
+
+    err = mDevice->setStreamTransform(streamId, transform);
+    if (err != OK) {
+        String8 msg = String8::format("Failed to set stream transform (stream id %d)",
+                streamId);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+    }
+
+    return res;
+}
 
 binder::Status CameraDeviceClient::createInputStream(
         int width, int height, int format,
@@ -934,6 +1019,76 @@
     return res;
 }
 
+binder::Status CameraDeviceClient::setDeferredConfiguration(int32_t streamId,
+        const hardware::camera2::params::OutputConfiguration &outputConfiguration) {
+    ATRACE_CALL();
+
+    binder::Status res;
+    if (!(res = checkPidStatus(__FUNCTION__)).isOk()) return res;
+
+    Mutex::Autolock icl(mBinderSerializationLock);
+
+    sp<IGraphicBufferProducer> bufferProducer = outputConfiguration.getGraphicBufferProducer();
+
+    // Client code should guarantee that the surface is from SurfaceView or SurfaceTexture.
+    if (bufferProducer == NULL) {
+        ALOGE("%s: bufferProducer must not be null", __FUNCTION__);
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Target Surface is invalid");
+    }
+    // Check if this stram id is one of the deferred streams
+    ssize_t index = NAME_NOT_FOUND;
+    for (size_t i = 0; i < mDeferredStreams.size(); i++) {
+        if (streamId == mDeferredStreams[i]) {
+            index = i;
+            break;
+        }
+    }
+    if (index == NAME_NOT_FOUND) {
+        String8 msg = String8::format("Camera %d: deferred surface is set to a unknown stream"
+                "(ID %d)", mCameraId, streamId);
+        ALOGW("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+    }
+
+    if (!mDevice.get()) {
+        return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
+    }
+
+    // Don't create multiple streams for the same target surface
+    {
+        ssize_t index = mStreamMap.indexOfKey(IInterface::asBinder(bufferProducer));
+        if (index != NAME_NOT_FOUND) {
+            String8 msg = String8::format("Camera %d: Surface already has a stream created "
+                    " for it (ID %zd)", mCameraId, index);
+            ALOGW("%s: %s", __FUNCTION__, msg.string());
+            return STATUS_ERROR(CameraService::ERROR_ALREADY_EXISTS, msg.string());
+        }
+    }
+
+    status_t err;
+
+    // Always set to async, as we know the deferred surface is for preview streaming.
+    sp<Surface> consumerSurface = new Surface(bufferProducer, /*useAsync*/true);
+
+    // Finish the deferred stream configuration with the surface.
+    err = mDevice->setConsumerSurface(streamId, consumerSurface);
+    if (err == OK) {
+        sp<IBinder> binder = IInterface::asBinder(bufferProducer);
+        mStreamMap.add(binder, streamId);
+        mDeferredStreams.removeItemsAt(index);
+    } else if (err == NO_INIT) {
+        res = STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                "Camera %d: Deferred surface is invalid: %s (%d)",
+                mCameraId, strerror(-err), err);
+    } else {
+        res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
+                "Camera %d: Error setting output stream deferred surface: %s (%d)",
+                mCameraId, strerror(-err), err);
+    }
+
+    return res;
+}
+
 status_t CameraDeviceClient::dump(int fd, const Vector<String16>& args) {
     return BasicClient::dump(fd, args);
 }
@@ -959,6 +1114,11 @@
         for (size_t i = 0; i < mStreamMap.size(); i++) {
             result.appendFormat("      Stream %d\n", mStreamMap.valueAt(i));
         }
+    } else if (!mDeferredStreams.isEmpty()) {
+        result.append("    Current deferred surface output stream IDs:\n");
+        for (auto& streamId : mDeferredStreams) {
+            result.appendFormat("      Stream %d\n", streamId);
+        }
     } else {
         result.append("    No output streams configured.\n");
     }
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index d792b7d..dde23fb 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -131,6 +131,10 @@
     // Prepare stream by preallocating up to maxCount of its buffers
     virtual binder::Status prepare2(int32_t maxCount, int32_t streamId);
 
+    // Set the deferred surface for a stream.
+    virtual binder::Status setDeferredConfiguration(int32_t streamId,
+            const hardware::camera2::params::OutputConfiguration &outputConfiguration);
+
     /**
      * Interface used by CameraService
      */
@@ -188,6 +192,15 @@
     // Find the square of the euclidean distance between two points
     static int64_t euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1);
 
+    // Create an output stream with surface deferred for future.
+    binder::Status createDeferredSurfaceStreamLocked(
+            const hardware::camera2::params::OutputConfiguration &outputConfiguration,
+            int* newStreamId = NULL);
+
+    // Set the stream transform flags to automatically rotate the camera stream for preview use
+    // cases.
+    binder::Status setStreamTransformLocked(int streamId);
+
     // Find the closest dimensions for a given format in available stream configurations with
     // a width <= ROUNDING_WIDTH_CAP
     static const int32_t ROUNDING_WIDTH_CAP = 1920;
@@ -213,6 +226,10 @@
 
     int32_t mRequestIdCounter;
 
+    // The list of output streams whose surfaces are deferred. We have to track them separately
+    // as there are no surfaces available and can not be put into mStreamMap. Once the deferred
+    // Surface is configured, the stream id will be moved to mStreamMap.
+    Vector<int32_t> mDeferredStreams;
 };
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 35ec531..8707f2a 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -110,7 +110,8 @@
     virtual status_t createStream(sp<Surface> consumer,
             uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
-            int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID) = 0;
+            int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
+            uint32_t consumerUsage = 0) = 0;
 
     /**
      * Create an input stream of width, height, and format.
@@ -312,6 +313,12 @@
      * Get the HAL device version.
      */
     virtual uint32_t getDeviceVersion() = 0;
+
+    /**
+     * Set the deferred consumer surface and finish the rest of the stream configuration.
+     */
+    virtual status_t setConsumerSurface(int streamId, sp<Surface> consumer) = 0;
+
 };
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index c0de95a..a21514a 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -997,12 +997,13 @@
 
 status_t Camera3Device::createStream(sp<Surface> consumer,
         uint32_t width, uint32_t height, int format, android_dataspace dataSpace,
-        camera3_stream_rotation_t rotation, int *id, int streamSetId) {
+        camera3_stream_rotation_t rotation, int *id, int streamSetId, uint32_t consumerUsage) {
     ATRACE_CALL();
     Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
-    ALOGV("Camera %d: Creating new stream %d: %d x %d, format %d, dataspace %d rotation %d",
-            mId, mNextStreamId, width, height, format, dataSpace, rotation);
+    ALOGV("Camera %d: Creating new stream %d: %d x %d, format %d, dataspace %d rotation %d"
+            " consumer usage 0x%x", mId, mNextStreamId, width, height, format, dataSpace, rotation,
+            consumerUsage);
 
     status_t res;
     bool wasActive = false;
@@ -1039,6 +1040,19 @@
     if (mDeviceVersion <= CAMERA_DEVICE_API_VERSION_3_2) {
         streamSetId = CAMERA3_STREAM_SET_ID_INVALID;
     }
+
+    // HAL3.1 doesn't support deferred consumer stream creation as it requires buffer registration
+    // which requires a consumer surface to be available.
+    if (consumer == nullptr && mDeviceVersion < CAMERA_DEVICE_API_VERSION_3_2) {
+        ALOGE("HAL3.1 doesn't support deferred consumer stream creation");
+        return BAD_VALUE;
+    }
+
+    if (consumer == nullptr && format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
+        ALOGE("Deferred consumer stream creation only support IMPLEMENTATION_DEFINED format");
+        return BAD_VALUE;
+    }
+
     // Use legacy dataspace values for older HALs
     if (mDeviceVersion <= CAMERA_DEVICE_API_VERSION_3_3) {
         dataSpace = mapToLegacyDataspace(dataSpace);
@@ -1070,6 +1084,10 @@
         newStream = new Camera3OutputStream(mNextStreamId, consumer,
                 width, height, rawOpaqueBufferSize, format, dataSpace, rotation,
                 mTimestampOffset, streamSetId);
+    } else if (consumer == nullptr) {
+        newStream = new Camera3OutputStream(mNextStreamId,
+                width, height, format, consumerUsage, dataSpace, rotation,
+                mTimestampOffset, streamSetId);
     } else {
         newStream = new Camera3OutputStream(mNextStreamId, consumer,
                 width, height, format, dataSpace, rotation,
@@ -1729,6 +1747,44 @@
     }
 }
 
+status_t Camera3Device::setConsumerSurface(int streamId, sp<Surface> consumer) {
+    ATRACE_CALL();
+    ALOGV("%s: Camera %d: set consumer surface for stream %d", __FUNCTION__, mId, streamId);
+    Mutex::Autolock il(mInterfaceLock);
+    Mutex::Autolock l(mLock);
+
+    if (consumer == nullptr) {
+        CLOGE("Null consumer is passed!");
+        return BAD_VALUE;
+    }
+
+    ssize_t idx = mOutputStreams.indexOfKey(streamId);
+    if (idx == NAME_NOT_FOUND) {
+        CLOGE("Stream %d is unknown", streamId);
+        return idx;
+    }
+    sp<Camera3OutputStreamInterface> stream = mOutputStreams[idx];
+    status_t res = stream->setConsumer(consumer);
+    if (res != OK) {
+        CLOGE("Stream %d set consumer failed (error %d %s) ", streamId, res, strerror(-res));
+        return res;
+    }
+
+    if (!stream->isConfiguring()) {
+        CLOGE("Stream %d was already fully configured.", streamId);
+        return INVALID_OPERATION;
+    }
+
+    res = stream->finishConfiguration(mHal3Device);
+    if (res != OK) {
+        SET_ERR_L("Can't finish configuring output stream %d: %s (%d)",
+                stream->getId(), strerror(-res), res);
+        return res;
+    }
+
+    return OK;
+}
+
 /**
  * Camera3Device private methods
  */
@@ -1788,6 +1844,13 @@
         sp<Camera3OutputStreamInterface> stream =
                 mOutputStreams.editValueAt(idx);
 
+        // It is illegal to include a deferred consumer output stream into a request
+        if (stream->isConsumerConfigurationDeferred()) {
+            CLOGE("Stream %d hasn't finished configuration yet due to deferred consumer",
+                    stream->getId());
+            return NULL;
+        }
+
         // Lazy completion of stream configuration (allocation/registration)
         // on first use
         if (stream->isConfiguring()) {
@@ -1952,7 +2015,7 @@
     for (size_t i = 0; i < mOutputStreams.size(); i++) {
         sp<Camera3OutputStreamInterface> outputStream =
             mOutputStreams.editValueAt(i);
-        if (outputStream->isConfiguring()) {
+        if (outputStream->isConfiguring() && !outputStream->isConsumerConfigurationDeferred()) {
             res = outputStream->finishConfiguration(mHal3Device);
             if (res != OK) {
                 SET_ERR_L("Can't finish configuring output stream %d: %s (%d)",
@@ -1964,7 +2027,7 @@
 
     // Request thread needs to know to avoid using repeat-last-settings protocol
     // across configure_streams() calls
-    mRequestThread->configurationComplete();
+    mRequestThread->configurationComplete(mIsConstrainedHighSpeedConfiguration);
 
     // Boost priority of request thread for high speed recording to SCHED_FIFO
     if (mIsConstrainedHighSpeedConfiguration) {
@@ -2683,7 +2746,8 @@
         mCurrentPreCaptureTriggerId(0),
         mRepeatingLastFrameNumber(
             hardware::camera2::ICameraDeviceUser::NO_IN_FLIGHT_REPEATING_FRAMES),
-        mAeLockAvailable(aeLockAvailable) {
+        mAeLockAvailable(aeLockAvailable),
+        mPrepareVideoStream(false) {
     mStatusId = statusTracker->addComponent();
 }
 
@@ -2693,9 +2757,11 @@
     mListener = listener;
 }
 
-void Camera3Device::RequestThread::configurationComplete() {
+void Camera3Device::RequestThread::configurationComplete(bool isConstrainedHighSpeed) {
     Mutex::Autolock l(mRequestLock);
     mReconfigured = true;
+    // Prepare video stream for high speed recording.
+    mPrepareVideoStream = isConstrainedHighSpeed;
 }
 
 status_t Camera3Device::RequestThread::queueRequestList(
@@ -3197,8 +3263,25 @@
                 captureRequest->mOutputStreams.size());
         halRequest->output_buffers = outputBuffers->array();
         for (size_t i = 0; i < captureRequest->mOutputStreams.size(); i++) {
-            res = captureRequest->mOutputStreams.editItemAt(i)->
-                    getBuffer(&outputBuffers->editItemAt(i));
+            sp<Camera3OutputStreamInterface> outputStream = captureRequest->mOutputStreams.editItemAt(i);
+
+            // Prepare video buffers for high speed recording on the first video request.
+            if (mPrepareVideoStream && outputStream->isVideoStream()) {
+                // Only try to prepare video stream on the first video request.
+                mPrepareVideoStream = false;
+
+                res = outputStream->startPrepare(Camera3StreamInterface::ALLOCATE_PIPELINE_MAX);
+                while (res == NOT_ENOUGH_DATA) {
+                    res = outputStream->prepareNextBuffer();
+                }
+                if (res != OK) {
+                    ALOGW("%s: Preparing video buffers for high speed failed: %s (%d)",
+                        __FUNCTION__, strerror(-res), res);
+                    outputStream->cancelPrepare();
+                }
+            }
+
+            res = outputStream->getBuffer(&outputBuffers->editItemAt(i));
             if (res != OK) {
                 // Can't get output buffer from gralloc queue - this could be due to
                 // abandoned queue or other consumer misbehavior, so not a fatal
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 0366ef6..4b99dab 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -95,11 +95,14 @@
 
     // Actual stream creation/deletion is delayed until first request is submitted
     // If adding streams while actively capturing, will pause device before adding
-    // stream, reconfiguring device, and unpausing.
+    // stream, reconfiguring device, and unpausing. If the client create a stream
+    // with nullptr consumer surface, the client must then call setConsumer()
+    // and finish the stream configuration before starting output streaming.
     virtual status_t createStream(sp<Surface> consumer,
             uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
-            int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID);
+            int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
+            uint32_t consumerUsage = 0);
     virtual status_t createInputStream(
             uint32_t width, uint32_t height, int format,
             int *id);
@@ -160,6 +163,12 @@
     // Methods called by subclasses
     void             notifyStatus(bool idle); // updates from StatusTracker
 
+    /**
+     * Set the deferred consumer surface to the output stream and finish the deferred
+     * consumer configuration.
+     */
+    virtual status_t setConsumerSurface(int streamId, sp<Surface> consumer);
+
   private:
     static const size_t        kDumpLockAttempts  = 10;
     static const size_t        kDumpSleepDuration = 100000; // 0.10 sec
@@ -450,7 +459,7 @@
         /**
          * Call after stream (re)-configuration is completed.
          */
-        void     configurationComplete();
+        void     configurationComplete(bool isConstrainedHighSpeed);
 
         /**
          * Set or clear the list of repeating requests. Does not block
@@ -638,6 +647,9 @@
 
         // Whether the device supports AE lock
         bool               mAeLockAvailable;
+
+        // Flag indicating if we should prepare video stream for video requests.
+        bool               mPrepareVideoStream;
     };
     sp<RequestThread> mRequestThread;
 
diff --git a/services/camera/libcameraservice/device3/Camera3DummyStream.cpp b/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
index 5bf76bd..c74038b 100644
--- a/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
@@ -96,6 +96,15 @@
     return false;
 }
 
+bool Camera3DummyStream::isConsumerConfigurationDeferred() const {
+    return false;
+}
+
+status_t Camera3DummyStream::setConsumer(sp<Surface> consumer) {
+    ALOGE("%s: Stream %d: Dummy stream doesn't support set consumer surface %p!",
+            __FUNCTION__, mId, consumer.get());
+    return INVALID_OPERATION;
+}
 }; // namespace camera3
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3DummyStream.h b/services/camera/libcameraservice/device3/Camera3DummyStream.h
index 97c0c96..6c8859c 100644
--- a/services/camera/libcameraservice/device3/Camera3DummyStream.h
+++ b/services/camera/libcameraservice/device3/Camera3DummyStream.h
@@ -59,6 +59,16 @@
      */
     bool isVideoStream() const;
 
+    /**
+     * Return if the consumer configuration of this stream is deferred.
+     */
+    virtual bool isConsumerConfigurationDeferred() const;
+
+    /**
+     * Set the consumer surface to the output stream.
+     */
+    virtual status_t setConsumer(sp<Surface> consumer);
+
   protected:
 
     /**
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index d2b98e6..dcadf36 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -42,7 +42,8 @@
         mTransform(0),
         mTraceFirstBuffer(true),
         mUseBufferManager(false),
-        mTimestampOffset(timestampOffset) {
+        mTimestampOffset(timestampOffset),
+        mConsumerUsage(0) {
 
     if (mConsumer == NULL) {
         ALOGE("%s: Consumer is NULL!", __FUNCTION__);
@@ -66,7 +67,8 @@
         mTraceFirstBuffer(true),
         mUseMonoTimestamp(false),
         mUseBufferManager(false),
-        mTimestampOffset(timestampOffset) {
+        mTimestampOffset(timestampOffset),
+        mConsumerUsage(0) {
 
     if (format != HAL_PIXEL_FORMAT_BLOB && format != HAL_PIXEL_FORMAT_RAW_OPAQUE) {
         ALOGE("%s: Bad format for size-only stream: %d", __FUNCTION__,
@@ -84,6 +86,39 @@
     }
 }
 
+Camera3OutputStream::Camera3OutputStream(int id,
+        uint32_t width, uint32_t height, int format,
+        uint32_t consumerUsage, android_dataspace dataSpace,
+        camera3_stream_rotation_t rotation, nsecs_t timestampOffset, int setId) :
+        Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, width, height,
+                            /*maxSize*/0, format, dataSpace, rotation, setId),
+        mConsumer(nullptr),
+        mTransform(0),
+        mTraceFirstBuffer(true),
+        mUseBufferManager(false),
+        mTimestampOffset(timestampOffset),
+        mConsumerUsage(consumerUsage) {
+    // Deferred consumer only support preview surface format now.
+    if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
+        ALOGE("%s: Deferred consumer only supports IMPLEMENTATION_DEFINED format now!",
+                __FUNCTION__);
+        mState = STATE_ERROR;
+    }
+
+    // Sanity check for the consumer usage flag.
+    if ((consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) == 0 &&
+            (consumerUsage & GraphicBuffer::USAGE_HW_COMPOSER) == 0) {
+        ALOGE("%s: Deferred consumer usage flag is illegal (0x%x)!", __FUNCTION__, consumerUsage);
+        mState = STATE_ERROR;
+    }
+
+    mConsumerName = String8("Deferred");
+    if (setId > CAMERA3_STREAM_SET_ID_INVALID) {
+        mBufferReleasedListener = new BufferReleasedListener(this);
+    }
+
+}
+
 Camera3OutputStream::Camera3OutputStream(int id, camera3_stream_type_t type,
                                          uint32_t width, uint32_t height,
                                          int format,
@@ -96,7 +131,8 @@
         mTransform(0),
         mTraceFirstBuffer(true),
         mUseMonoTimestamp(false),
-        mUseBufferManager(false) {
+        mUseBufferManager(false),
+        mConsumerUsage(0) {
 
     if (setId > CAMERA3_STREAM_SET_ID_INVALID) {
         mBufferReleasedListener = new BufferReleasedListener(this);
@@ -459,11 +495,16 @@
         return res;
     }
 
+    // Stream configuration was not finished (can only be in STATE_IN_CONFIG or STATE_CONSTRUCTED
+    // state), don't need change the stream state, return OK.
+    if (mConsumer == nullptr) {
+        return OK;
+    }
+
     ALOGV("%s: disconnecting stream %d from native window", __FUNCTION__, getId());
 
     res = native_window_api_disconnect(mConsumer.get(),
                                        NATIVE_WINDOW_API_CAMERA);
-
     /**
      * This is not an error. if client calling process dies, the window will
      * also die and all calls to it will return DEAD_OBJECT, thus it's already
@@ -505,6 +546,12 @@
 
     status_t res;
     int32_t u = 0;
+    if (mConsumer == nullptr) {
+        // mConsumerUsage was sanitized before the Camera3OutputStream was constructed.
+        *usage = mConsumerUsage;
+        return OK;
+    }
+
     res = static_cast<ANativeWindow*>(mConsumer.get())->query(mConsumer.get(),
             NATIVE_WINDOW_CONSUMER_USAGE_BITS, &u);
 
@@ -540,7 +587,7 @@
 status_t Camera3OutputStream::setBufferManager(sp<Camera3BufferManager> bufferManager) {
     Mutex::Autolock l(mLock);
     if (mState != STATE_CONSTRUCTED) {
-        ALOGE("%s: this method can only be called when stream in in CONSTRUCTED state.",
+        ALOGE("%s: this method can only be called when stream in CONSTRUCTED state.",
                 __FUNCTION__);
         return INVALID_OPERATION;
     }
@@ -591,6 +638,26 @@
     }
 }
 
+bool Camera3OutputStream::isConsumerConfigurationDeferred() const {
+    Mutex::Autolock l(mLock);
+    return mConsumer == nullptr;
+}
+
+status_t Camera3OutputStream::setConsumer(sp<Surface> consumer) {
+    if (consumer == nullptr) {
+        ALOGE("%s: it's illegal to set a null consumer surface!", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+
+    if (mConsumer != nullptr) {
+        ALOGE("%s: consumer surface was already set!", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+
+    mConsumer = consumer;
+    return OK;
+}
+
 bool Camera3OutputStream::isConsumedByHWComposer() const {
     uint32_t usage = 0;
     status_t res = getEndpointUsage(&usage);
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index a883448..2feca27 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -94,6 +94,16 @@
             android_dataspace dataSpace, camera3_stream_rotation_t rotation,
             nsecs_t timestampOffset, int setId = CAMERA3_STREAM_SET_ID_INVALID);
 
+    /**
+     * Set up a stream with deferred consumer for formats that have 2 dimensions, such as
+     * RAW and YUV. The consumer must be set before using this stream for output. A valid
+     * stream set id needs to be set to support buffer sharing between multiple streams.
+     */
+    Camera3OutputStream(int id, uint32_t width, uint32_t height, int format,
+            uint32_t consumerUsage, android_dataspace dataSpace,
+            camera3_stream_rotation_t rotation, nsecs_t timestampOffset,
+            int setId = CAMERA3_STREAM_SET_ID_INVALID);
+
     virtual ~Camera3OutputStream();
 
     /**
@@ -117,6 +127,16 @@
      */
     bool isConsumedByHWComposer() const;
 
+    /**
+     * Return if the consumer configuration of this stream is deferred.
+     */
+    virtual bool isConsumerConfigurationDeferred() const;
+
+    /**
+     * Set the consumer surface to the output stream.
+     */
+    virtual status_t setConsumer(sp<Surface> consumer);
+
     class BufferReleasedListener : public BnProducerListener {
         public:
           BufferReleasedListener(wp<Camera3OutputStream> parent) : mParent(parent) {}
@@ -157,6 +177,7 @@
     virtual status_t disconnectLocked();
 
     sp<Surface> mConsumer;
+
   private:
     int               mTransform;
 
@@ -193,6 +214,12 @@
     nsecs_t mTimestampOffset;
 
     /**
+     * Consumer end point usage flag set by the constructor for the deferred
+     * consumer case.
+     */
+    uint32_t    mConsumerUsage;
+
+    /**
      * Internal Camera3Stream interface
      */
     virtual status_t getBufferLocked(camera3_stream_buffer *buffer);
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
index df89b34..7c09c40 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
@@ -39,6 +39,16 @@
      * Return if this output stream is for video encoding.
      */
     virtual bool isVideoStream() const = 0;
+
+    /**
+     * Return if the consumer configuration of this stream is deferred.
+     */
+    virtual bool isConsumerConfigurationDeferred() const = 0;
+
+    /**
+     * Set the consumer surface to the output stream.
+     */
+    virtual status_t setConsumer(sp<Surface> consumer) = 0;
 };
 
 } // namespace camera3
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 96d62d4..3ffd9d1 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -55,7 +55,7 @@
     mMaxSize(maxSize),
     mState(STATE_CONSTRUCTED),
     mStatusId(StatusTracker::NO_STATUS_ID),
-    mStreamUnpreparable(false),
+    mStreamUnpreparable(true),
     mOldUsage(0),
     mOldMaxBuffers(0),
     mPrepared(false),
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index 0755700..1ff215d 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -175,7 +175,8 @@
      *   OK on a successful configuration
      *   NO_INIT in case of a serious error from the HAL device
      *   NO_MEMORY in case of an error registering buffers
-     *   INVALID_OPERATION in case connecting to the consumer failed
+     *   INVALID_OPERATION in case connecting to the consumer failed or consumer
+     *       doesn't exist yet.
      */
     status_t         finishConfiguration(camera3_device *hal3Device);
 
diff --git a/services/mediaextractor/Android.mk b/services/mediaextractor/Android.mk
index bc2b641..a9a2d3c 100644
--- a/services/mediaextractor/Android.mk
+++ b/services/mediaextractor/Android.mk
@@ -19,6 +19,7 @@
 LOCAL_MODULE:= mediaextractor
 LOCAL_32_BIT_ONLY := true
 LOCAL_INIT_RC := mediaextractor.rc
+LOCAL_C_INCLUDES := frameworks/av/media/libmedia
 include $(BUILD_EXECUTABLE)
 
 include $(call all-makefiles-under, $(LOCAL_PATH))
diff --git a/services/mediaextractor/main_extractorservice.cpp b/services/mediaextractor/main_extractorservice.cpp
index a7f3fbe..b829c9a 100644
--- a/services/mediaextractor/main_extractorservice.cpp
+++ b/services/mediaextractor/main_extractorservice.cpp
@@ -29,12 +29,18 @@
 // from LOCAL_C_INCLUDES
 #include "IcuUtils.h"
 #include "MediaExtractorService.h"
+#include "MediaUtils.h"
 #include "minijail/minijail.h"
 
 using namespace android;
 
 int main(int argc __unused, char** argv)
 {
+    limitProcessMemory(
+        "ro.media.maxmem", /* property that defines limit */
+        SIZE_MAX, /* upper limit in bytes */
+        10 /* upper limit as percentage of physical RAM */);
+
     signal(SIGPIPE, SIG_IGN);
     MiniJail();