Merge "Fix for Bundled Effects level compensation" into lmp-mr1-dev
diff --git a/include/media/AudioPolicy.h b/include/media/AudioPolicy.h
new file mode 100644
index 0000000..a755e1e
--- /dev/null
+++ b/include/media/AudioPolicy.h
@@ -0,0 +1,82 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#ifndef ANDROID_AUDIO_POLICY_H
+#define ANDROID_AUDIO_POLICY_H
+
+#include <system/audio.h>
+#include <system/audio_policy.h>
+#include <binder/Parcel.h>
+#include <utils/String8.h>
+#include <utils/Vector.h>
+
+namespace android {
+
+// Keep in sync with AudioMix.java, AudioMixingRule.java, AudioPolicyConfig.java
+#define RULE_EXCLUSION_MASK 0x8000
+#define RULE_MATCH_ATTRIBUTE_USAGE 0x1
+#define RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET (0x1 << 1)
+#define RULE_EXCLUDE_ATTRIBUTE_USAGE (RULE_EXCLUSION_MASK|RULE_MATCH_ATTRIBUTE_USAGE)
+#define RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET \
+    (RULE_EXCLUSION_MASK|RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET)
+
+#define MIX_TYPE_INVALID -1
+#define MIX_TYPE_PLAYERS 0
+#define MIX_TYPE_RECORDERS 1
+
+#define ROUTE_FLAG_RENDER 0x1
+#define ROUTE_FLAG_LOOP_BACK (0x1 << 1)
+
+#define MAX_MIXES_PER_POLICY 10
+#define MAX_CRITERIA_PER_MIX 20
+
+class AttributeMatchCriterion {
+public:
+    AttributeMatchCriterion() {}
+    AttributeMatchCriterion(audio_usage_t usage, audio_source_t source, uint32_t rule);
+
+    status_t readFromParcel(Parcel *parcel);
+    status_t writeToParcel(Parcel *parcel) const;
+
+    union {
+        audio_usage_t   mUsage;
+        audio_source_t  mSource;
+    } mAttr;
+    uint32_t        mRule;
+};
+
+class AudioMix {
+public:
+    AudioMix() {}
+    AudioMix(Vector<AttributeMatchCriterion> criteria, uint32_t mixType, audio_config_t format,
+             uint32_t routeFlags, String8 registrationId) :
+        mCriteria(criteria), mMixType(mixType), mFormat(format),
+        mRouteFlags(routeFlags), mRegistrationId(registrationId) {}
+
+    status_t readFromParcel(Parcel *parcel);
+    status_t writeToParcel(Parcel *parcel) const;
+
+    Vector<AttributeMatchCriterion> mCriteria;
+    uint32_t        mMixType;
+    audio_config_t  mFormat;
+    uint32_t        mRouteFlags;
+    String8         mRegistrationId;
+};
+
+}; // namespace android
+
+#endif  // ANDROID_AUDIO_POLICY_H
diff --git a/include/media/AudioPolicyHelper.h b/include/media/AudioPolicyHelper.h
index 3ed0b74..79231be 100644
--- a/include/media/AudioPolicyHelper.h
+++ b/include/media/AudioPolicyHelper.h
@@ -63,7 +63,7 @@
 
 static void stream_type_to_audio_attributes(audio_stream_type_t streamType,
                                      audio_attributes_t *attr) {
-    attr->flags = 0x0;
+    memset(attr, 0, sizeof(audio_attributes_t));
 
     switch (streamType) {
     case AUDIO_STREAM_DEFAULT:
diff --git a/include/media/AudioSystem.h b/include/media/AudioSystem.h
index bf1fc1c..843a354 100644
--- a/include/media/AudioSystem.h
+++ b/include/media/AudioSystem.h
@@ -18,6 +18,7 @@
 #define ANDROID_AUDIOSYSTEM_H_
 
 #include <hardware/audio_effect.h>
+#include <media/AudioPolicy.h>
 #include <media/IAudioFlingerClient.h>
 #include <media/IAudioPolicyServiceClient.h>
 #include <system/audio.h>
@@ -324,6 +325,8 @@
 
     static audio_mode_t getPhoneState();
 
+    static status_t registerPolicyMixes(Vector<AudioMix> mixes, bool registration);
+
     // ----------------------------------------------------------------------------
 
     class AudioPortCallback : public RefBase
diff --git a/include/media/IAudioPolicyService.h b/include/media/IAudioPolicyService.h
index 3e4b873..c98c475 100644
--- a/include/media/IAudioPolicyService.h
+++ b/include/media/IAudioPolicyService.h
@@ -25,6 +25,7 @@
 #include <utils/Errors.h>
 #include <binder/IInterface.h>
 #include <media/AudioSystem.h>
+#include <media/AudioPolicy.h>
 #include <media/IAudioPolicyServiceClient.h>
 
 #include <system/audio_policy.h>
@@ -150,6 +151,8 @@
     virtual status_t releaseSoundTriggerSession(audio_session_t session) = 0;
 
     virtual audio_mode_t getPhoneState() = 0;
+
+    virtual status_t registerPolicyMixes(Vector<AudioMix> mixes, bool registration) = 0;
 };
 
 
diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h
index fcccc6d..4d4ce90 100644
--- a/include/media/stagefright/ACodec.h
+++ b/include/media/stagefright/ACodec.h
@@ -182,6 +182,7 @@
     sp<ANativeWindow> mNativeWindow;
     sp<AMessage> mInputFormat;
     sp<AMessage> mOutputFormat;
+    sp<AMessage> mBaseOutputFormat;
 
     Vector<BufferInfo> mBuffers[2];
     bool mPortEOS[2];
diff --git a/include/media/stagefright/MediaCodecList.h b/include/media/stagefright/MediaCodecList.h
index 8605d99..c2bbe4d 100644
--- a/include/media/stagefright/MediaCodecList.h
+++ b/include/media/stagefright/MediaCodecList.h
@@ -52,6 +52,12 @@
     static sp<IMediaCodecList> getLocalInstance();
 
 private:
+    class BinderDeathObserver : public IBinder::DeathRecipient {
+        void binderDied(const wp<IBinder> &the_late_who __unused);
+    };
+
+    static sp<BinderDeathObserver> sBinderDeathObserver;
+
     enum Section {
         SECTION_TOPLEVEL,
         SECTION_DECODERS,
diff --git a/include/media/stagefright/foundation/AUtils.h b/include/media/stagefright/foundation/AUtils.h
index 3a73a39..d7ecf50 100644
--- a/include/media/stagefright/foundation/AUtils.h
+++ b/include/media/stagefright/foundation/AUtils.h
@@ -40,6 +40,12 @@
     }
 }
 
+/* == ceil(nom / den) * den. T must be integer type, alignment must be positive power of 2 */
+template<class T, class U>
+inline static const T align(const T &nom, const U &den) {
+    return (nom + (T)(den - 1)) & (T)~(den - 1);
+}
+
 template<class T>
 inline static T abs(const T &a) {
     return a < 0 ? -a : a;
diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk
index e012116..a2e0909 100644
--- a/media/libmedia/Android.mk
+++ b/media/libmedia/Android.mk
@@ -59,7 +59,8 @@
     MemoryLeakTrackUtil.cpp \
     SoundPool.cpp \
     SoundPoolThread.cpp \
-    StringArray.cpp
+    StringArray.cpp \
+    AudioPolicy.cpp
 
 LOCAL_SRC_FILES += ../libnbaio/roundup.c
 
diff --git a/media/libmedia/AudioPolicy.cpp b/media/libmedia/AudioPolicy.cpp
new file mode 100644
index 0000000..d2d0971
--- /dev/null
+++ b/media/libmedia/AudioPolicy.cpp
@@ -0,0 +1,115 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AudioPolicy"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+#include <media/AudioPolicy.h>
+
+namespace android {
+
+//
+//  AttributeMatchCriterion implementation
+//
+AttributeMatchCriterion::AttributeMatchCriterion(audio_usage_t usage,
+                                                 audio_source_t source,
+                                                 uint32_t rule)
+: mRule(rule)
+{
+    if (mRule == RULE_MATCH_ATTRIBUTE_USAGE ||
+            mRule == RULE_EXCLUDE_ATTRIBUTE_USAGE) {
+        mAttr.mUsage = usage;
+    } else {
+        mAttr.mSource = source;
+    }
+}
+
+status_t AttributeMatchCriterion::readFromParcel(Parcel *parcel)
+{
+    mRule = parcel->readInt32();
+    if (mRule == RULE_MATCH_ATTRIBUTE_USAGE ||
+            mRule == RULE_EXCLUDE_ATTRIBUTE_USAGE) {
+        mAttr.mUsage = (audio_usage_t)parcel->readInt32();
+    } else {
+        mAttr.mSource = (audio_source_t)parcel->readInt32();
+    }
+    return NO_ERROR;
+}
+
+status_t AttributeMatchCriterion::writeToParcel(Parcel *parcel) const
+{
+    parcel->writeInt32(mRule);
+    parcel->writeInt32(mAttr.mUsage);
+    return NO_ERROR;
+}
+
+//
+//  AudioMix implementation
+//
+
+status_t AudioMix::readFromParcel(Parcel *parcel)
+{
+    mMixType = parcel->readInt32();
+    mFormat.sample_rate = (uint32_t)parcel->readInt32();
+    mFormat.channel_mask = (audio_channel_mask_t)parcel->readInt32();
+    mFormat.format = (audio_format_t)parcel->readInt32();
+    mRouteFlags = parcel->readInt32();
+    mRegistrationId = parcel->readString8();
+    size_t size = (size_t)parcel->readInt32();
+    if (size > MAX_CRITERIA_PER_MIX) {
+        size = MAX_CRITERIA_PER_MIX;
+    }
+    for (size_t i = 0; i < size; i++) {
+        AttributeMatchCriterion criterion;
+        if (criterion.readFromParcel(parcel) == NO_ERROR) {
+            mCriteria.add(criterion);
+        }
+    }
+    return NO_ERROR;
+}
+
+status_t AudioMix::writeToParcel(Parcel *parcel) const
+{
+    parcel->writeInt32(mMixType);
+    parcel->writeInt32(mFormat.sample_rate);
+    parcel->writeInt32(mFormat.channel_mask);
+    parcel->writeInt32(mFormat.format);
+    parcel->writeInt32(mRouteFlags);
+    parcel->writeString8(mRegistrationId);
+    size_t size = mCriteria.size();
+    if (size > MAX_CRITERIA_PER_MIX) {
+        size = MAX_CRITERIA_PER_MIX;
+    }
+    size_t sizePosition = parcel->dataPosition();
+    parcel->writeInt32(size);
+    size_t finalSize = size;
+    for (size_t i = 0; i < size; i++) {
+        size_t position = parcel->dataPosition();
+        if (mCriteria[i].writeToParcel(parcel) != NO_ERROR) {
+            parcel->setDataPosition(position);
+            finalSize--;
+        }
+    }
+    if (size != finalSize) {
+        size_t position = parcel->dataPosition();
+        parcel->setDataPosition(sizePosition);
+        parcel->writeInt32(finalSize);
+        parcel->setDataPosition(position);
+    }
+    return NO_ERROR;
+}
+
+}; // namespace android
diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp
index 96f1ade..9cae21c 100644
--- a/media/libmedia/AudioSystem.cpp
+++ b/media/libmedia/AudioSystem.cpp
@@ -958,6 +958,12 @@
     return aps->getPhoneState();
 }
 
+status_t AudioSystem::registerPolicyMixes(Vector<AudioMix> mixes, bool registration)
+{
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == 0) return PERMISSION_DENIED;
+    return aps->registerPolicyMixes(mixes, registration);
+}
 
 // ---------------------------------------------------------------------------
 
diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp
index 4a6df6d..d9c3177 100644
--- a/media/libmedia/AudioTrack.cpp
+++ b/media/libmedia/AudioTrack.cpp
@@ -851,6 +851,10 @@
         // due to hardware latency. We leave this behavior for now.
         *position = dspFrames;
     } else {
+        if (mCblk->mFlags & CBLK_INVALID) {
+            restoreTrack_l("getPosition");
+        }
+
         // IAudioTrack::stop() isn't synchronous; we don't know when presentation completes
         *position = (mState == STATE_STOPPED || mState == STATE_FLUSHED) ?
                 0 : updateAndGetPosition_l();
@@ -1226,7 +1230,11 @@
         mStaticProxy = new StaticAudioTrackClientProxy(cblk, buffers, frameCount, mFrameSizeAF);
         mProxy = mStaticProxy;
     }
-    mProxy->setVolumeLR(GAIN_MINIFLOAT_PACKED_UNITY);
+
+    mProxy->setVolumeLR(gain_minifloat_pack(
+            gain_from_float(mVolume[AUDIO_INTERLEAVE_LEFT]),
+            gain_from_float(mVolume[AUDIO_INTERLEAVE_RIGHT])));
+
     mProxy->setSendLevel(mSendLevel);
     mProxy->setSampleRate(mSampleRate);
     mProxy->setMinimum(mNotificationFramesAct);
@@ -1942,6 +1950,10 @@
         break;
     }
 
+    if (mCblk->mFlags & CBLK_INVALID) {
+        restoreTrack_l("getTimestamp");
+    }
+
     // The presented frame count must always lag behind the consumed frame count.
     // To avoid a race, read the presented frames first.  This ensures that presented <= consumed.
     status_t status = mAudioTrack->getTimestamp(timestamp);
diff --git a/media/libmedia/AudioTrackShared.cpp b/media/libmedia/AudioTrackShared.cpp
index 4a783b3..ff24475 100644
--- a/media/libmedia/AudioTrackShared.cpp
+++ b/media/libmedia/AudioTrackShared.cpp
@@ -307,6 +307,7 @@
 {
     audio_track_cblk_t* cblk = mCblk;
     if (!(android_atomic_or(CBLK_INVALID, &cblk->mFlags) & CBLK_INVALID)) {
+        android_atomic_or(CBLK_FUTEX_WAKE, &cblk->mFutex);
         // it seems that a FUTEX_WAKE_PRIVATE will not wake a FUTEX_WAIT, even within same process
         (void) syscall(__NR_futex, &cblk->mFutex, mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE,
                 1);
@@ -317,6 +318,7 @@
 {
     audio_track_cblk_t* cblk = mCblk;
     if (!(android_atomic_or(CBLK_INTERRUPT, &cblk->mFlags) & CBLK_INTERRUPT)) {
+        android_atomic_or(CBLK_FUTEX_WAKE, &cblk->mFutex);
         (void) syscall(__NR_futex, &cblk->mFutex, mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE,
                 1);
     }
diff --git a/media/libmedia/IAudioPolicyService.cpp b/media/libmedia/IAudioPolicyService.cpp
index 5873a30..70551c4 100644
--- a/media/libmedia/IAudioPolicyService.cpp
+++ b/media/libmedia/IAudioPolicyService.cpp
@@ -69,7 +69,8 @@
     GET_OUTPUT_FOR_ATTR,
     ACQUIRE_SOUNDTRIGGER_SESSION,
     RELEASE_SOUNDTRIGGER_SESSION,
-    GET_PHONE_STATE
+    GET_PHONE_STATE,
+    REGISTER_POLICY_MIXES,
 };
 
 class BpAudioPolicyService : public BpInterface<IAudioPolicyService>
@@ -675,6 +676,38 @@
         }
         return (audio_mode_t)reply.readInt32();
     }
+
+    virtual status_t registerPolicyMixes(Vector<AudioMix> mixes, bool registration)
+    {
+        Parcel data, reply;
+        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+        data.writeInt32(registration ? 1 : 0);
+        size_t size = mixes.size();
+        if (size > MAX_MIXES_PER_POLICY) {
+            size = MAX_MIXES_PER_POLICY;
+        }
+        size_t sizePosition = data.dataPosition();
+        data.writeInt32(size);
+        size_t finalSize = size;
+        for (size_t i = 0; i < size; i++) {
+            size_t position = data.dataPosition();
+            if (mixes[i].writeToParcel(&data) != NO_ERROR) {
+                data.setDataPosition(position);
+                finalSize--;
+            }
+        }
+        if (size != finalSize) {
+            size_t position = data.dataPosition();
+            data.setDataPosition(sizePosition);
+            data.writeInt32(finalSize);
+            data.setDataPosition(position);
+        }
+        status_t status = remote()->transact(REGISTER_POLICY_MIXES, data, &reply);
+        if (status == NO_ERROR) {
+            status = (status_t)reply.readInt32();
+        }
+        return status;
+    }
 };
 
 IMPLEMENT_META_INTERFACE(AudioPolicyService, "android.media.IAudioPolicyService");
@@ -1147,6 +1180,25 @@
             return NO_ERROR;
         } break;
 
+        case REGISTER_POLICY_MIXES: {
+            CHECK_INTERFACE(IAudioPolicyService, data, reply);
+            bool registration = data.readInt32() == 1;
+            Vector<AudioMix> mixes;
+            size_t size = (size_t)data.readInt32();
+            if (size > MAX_MIXES_PER_POLICY) {
+                size = MAX_MIXES_PER_POLICY;
+            }
+            for (size_t i = 0; i < size; i++) {
+                AudioMix mix;
+                if (mix.readFromParcel((Parcel*)&data) == NO_ERROR) {
+                    mixes.add(mix);
+                }
+            }
+            status_t status = registerPolicyMixes(mixes, registration);
+            reply->writeInt32(status);
+            return NO_ERROR;
+        } break;
+
         default:
             return BBinder::onTransact(code, data, reply, flags);
     }
diff --git a/media/libmediaplayerservice/MediaPlayerFactory.cpp b/media/libmediaplayerservice/MediaPlayerFactory.cpp
index 3e0fc0d..aeefb4c 100644
--- a/media/libmediaplayerservice/MediaPlayerFactory.cpp
+++ b/media/libmediaplayerservice/MediaPlayerFactory.cpp
@@ -20,9 +20,12 @@
 
 #include <cutils/properties.h>
 #include <media/IMediaPlayer.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/FileSource.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <utils/Errors.h>
 #include <utils/misc.h>
+#include <../libstagefright/include/WVMExtractor.h>
 
 #include "MediaPlayerFactory.h"
 
@@ -179,10 +182,18 @@
     virtual float scoreFactory(const sp<IMediaPlayer>& /*client*/,
                                int fd,
                                int64_t offset,
-                               int64_t /*length*/,
+                               int64_t length,
                                float /*curScore*/) {
-        if (getDefaultPlayerType()
-                == STAGEFRIGHT_PLAYER) {
+        if (legacyDrm()) {
+            sp<DataSource> source = new FileSource(dup(fd), offset, length);
+            String8 mimeType;
+            float confidence;
+            if (SniffWVM(source, &mimeType, &confidence, NULL /* format */)) {
+                return 1.0;
+            }
+        }
+
+        if (getDefaultPlayerType() == STAGEFRIGHT_PLAYER) {
             char buf[20];
             lseek(fd, offset, SEEK_SET);
             read(fd, buf, sizeof(buf));
@@ -198,10 +209,28 @@
         return 0.0;
     }
 
+    virtual float scoreFactory(const sp<IMediaPlayer>& /*client*/,
+                               const char* url,
+                               float /*curScore*/) {
+        if (legacyDrm() && !strncasecmp("widevine://", url, 11)) {
+            return 1.0;
+        }
+        return 0.0;
+    }
+
     virtual sp<MediaPlayerBase> createPlayer() {
         ALOGV(" create StagefrightPlayer");
         return new StagefrightPlayer();
     }
+  private:
+    bool legacyDrm() {
+        char value[PROPERTY_VALUE_MAX];
+        if (property_get("persist.sys.media.legacy-drm", value, NULL)
+                && (!strcmp("1", value) || !strcasecmp("true", value))) {
+            return true;
+        }
+        return false;
+    }
 };
 
 class NuPlayerFactory : public MediaPlayerFactory::IFactory {
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index e7a26b6..dd79b50 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -54,6 +54,7 @@
       mDurationUs(0ll),
       mAudioIsVorbis(false),
       mIsWidevine(false),
+      mIsSecure(false),
       mUIDValid(uidValid),
       mUID(uid),
       mFd(-1),
@@ -163,6 +164,25 @@
         if (mFileMeta->findInt64(kKeyDuration, &duration)) {
             mDurationUs = duration;
         }
+
+        if (!mIsWidevine) {
+            // Check mime to see if we actually have a widevine source.
+            // If the data source is not URL-type (eg. file source), we
+            // won't be able to tell until now.
+            const char *fileMime;
+            if (mFileMeta->findCString(kKeyMIMEType, &fileMime)
+                    && !strncasecmp(fileMime, "video/wvm", 9)) {
+                mIsWidevine = true;
+                if (!mUri.empty()) {
+                  // streaming, but the app forgot to specify widevine:// url
+                  mWVMExtractor = static_cast<WVMExtractor *>(extractor.get());
+                  mWVMExtractor->setAdaptiveStreamingMode(true);
+                  if (mUIDValid) {
+                    mWVMExtractor->setUID(mUID);
+                  }
+                }
+            }
+        }
     }
 
     int32_t totalBitrate = 0;
@@ -208,7 +228,7 @@
                 int32_t secure;
                 if (meta->findInt32(kKeyRequiresSecureBuffers, &secure)
                         && secure) {
-                    mIsWidevine = true;
+                    mIsSecure = true;
                     if (mUIDValid) {
                         extractor->setUID(mUID);
                     }
@@ -263,7 +283,7 @@
 
 status_t NuPlayer::GenericSource::setBuffers(
         bool audio, Vector<MediaBuffer *> &buffers) {
-    if (mIsWidevine && !audio) {
+    if (mIsSecure && !audio) {
         return mVideoTrack.mSource->setBuffers(buffers);
     }
     return INVALID_OPERATION;
@@ -293,6 +313,10 @@
 void NuPlayer::GenericSource::onPrepareAsync() {
     // delayed data source creation
     if (mDataSource == NULL) {
+        // set to false first, if the extractor
+        // comes back as secure, set it to true then.
+        mIsSecure = false;
+
         if (!mUri.empty()) {
             const char* uri = mUri.c_str();
             mIsWidevine = !strncasecmp(uri, "widevine://", 11);
@@ -312,8 +336,6 @@
                    mHTTPService, uri, &mUriHeaders, &mContentType,
                    static_cast<HTTPBase *>(mHttpSource.get()));
         } else {
-            // set to false first, if the extractor
-            // comes back as secure, set it to true then.
             mIsWidevine = false;
 
             mDataSource = new FileSource(mFd, mOffset, mLength);
@@ -368,7 +390,7 @@
     }
 
     notifyFlagsChanged(
-            (mIsWidevine ? FLAG_SECURE : 0)
+            (mIsSecure ? FLAG_SECURE : 0)
             | FLAG_CAN_PAUSE
             | FLAG_CAN_SEEK_BACKWARD
             | FLAG_CAN_SEEK_FORWARD
@@ -485,8 +507,8 @@
     // nothing to do, just account for DRM playback status
     setDrmPlaybackStatusIfNeeded(Playback::STOP, 0);
     mStarted = false;
-    if (mIsWidevine) {
-        // For a widevine source we need to prevent any further reads.
+    if (mIsWidevine || mIsSecure) {
+        // For widevine or secure sources we need to prevent any further reads.
         sp<AMessage> msg = new AMessage(kWhatStopWidevine, id());
         sp<AMessage> response;
         (void) msg->postAndAwaitResponse(&response);
@@ -846,7 +868,12 @@
 
     status_t finalResult;
     if (!track->mPackets->hasBufferAvailable(&finalResult)) {
-        return (finalResult == OK ? -EWOULDBLOCK : finalResult);
+        if (finalResult == OK) {
+            postReadBuffer(
+                    audio ? MEDIA_TRACK_TYPE_AUDIO : MEDIA_TRACK_TYPE_VIDEO);
+            return -EWOULDBLOCK;
+        }
+        return finalResult;
     }
 
     status_t result = track->mPackets->dequeueAccessUnit(accessUnit);
@@ -1179,6 +1206,7 @@
 sp<ABuffer> NuPlayer::GenericSource::mediaBufferToABuffer(
         MediaBuffer* mb,
         media_track_type trackType,
+        int64_t /* seekTimeUs */,
         int64_t *actualTimeUs) {
     bool audio = trackType == MEDIA_TRACK_TYPE_AUDIO;
     size_t outLength = mb->range_length();
@@ -1188,7 +1216,7 @@
     }
 
     sp<ABuffer> ab;
-    if (mIsWidevine && !audio) {
+    if (mIsSecure && !audio) {
         // data is already provided in the buffer
         ab = new ABuffer(NULL, mb->range_length());
         mb->add_ref();
@@ -1216,6 +1244,16 @@
     CHECK(mb->meta_data()->findInt64(kKeyTime, &timeUs));
     meta->setInt64("timeUs", timeUs);
 
+#if 0
+    // Temporarily disable pre-roll till we have a full solution to handle
+    // both single seek and continous seek gracefully.
+    if (seekTimeUs > timeUs) {
+        sp<AMessage> extra = new AMessage;
+        extra->setInt64("resume-at-mediaTimeUs", seekTimeUs);
+        meta->setMessage("extra", extra);
+    }
+#endif
+
     if (trackType == MEDIA_TRACK_TYPE_TIMEDTEXT) {
         const char *mime;
         CHECK(mTimedTextTrack.mSource != NULL
@@ -1257,14 +1295,13 @@
     int32_t tmpType;
     CHECK(msg->findInt32("trackType", &tmpType));
     media_track_type trackType = (media_track_type)tmpType;
+    readBuffer(trackType);
     {
         // only protect the variable change, as readBuffer may
-        // take considerable time.  This may result in one extra
-        // read being processed, but that is benign.
+        // take considerable time.
         Mutex::Autolock _l(mReadBufferLock);
         mPendingReadBufferTypes &= ~(1 << trackType);
     }
-    readBuffer(trackType);
 }
 
 void NuPlayer::GenericSource::readBuffer(
@@ -1317,7 +1354,7 @@
         seeking = true;
     }
 
-    if (mIsWidevine && trackType != MEDIA_TRACK_TYPE_AUDIO) {
+    if (mIsWidevine) {
         options.setNonBlocking();
     }
 
@@ -1348,7 +1385,8 @@
                 track->mPackets->queueDiscontinuity( type, NULL, true /* discard */);
             }
 
-            sp<ABuffer> buffer = mediaBufferToABuffer(mbuf, trackType, actualTimeUs);
+            sp<ABuffer> buffer = mediaBufferToABuffer(
+                    mbuf, trackType, seekTimeUs, actualTimeUs);
             track->mPackets->queueAccessUnit(buffer);
             formatChange = false;
             seeking = false;
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.h b/media/libmediaplayerservice/nuplayer/GenericSource.h
index f2528a9..1b63a1f 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.h
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.h
@@ -118,6 +118,7 @@
     int64_t mDurationUs;
     bool mAudioIsVorbis;
     bool mIsWidevine;
+    bool mIsSecure;
     bool mUIDValid;
     uid_t mUID;
     sp<IMediaHTTPService> mHTTPService;
@@ -182,6 +183,7 @@
     sp<ABuffer> mediaBufferToABuffer(
             MediaBuffer *mbuf,
             media_track_type trackType,
+            int64_t seekTimeUs,
             int64_t *actualTimeUs = NULL);
 
     void postReadBuffer(media_track_type trackType);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index d433a4d..a28591e 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -537,7 +537,7 @@
             sp<RefBase> obj;
             CHECK(msg->findObject("native-window", &obj));
 
-            if (mSource->getFormat(false /* audio */) == NULL) {
+            if (mSource == NULL || mSource->getFormat(false /* audio */) == NULL) {
                 performSetSurface(static_cast<NativeWindowWrapper *>(obj.get()));
                 break;
             }
@@ -1667,6 +1667,10 @@
 
             sp<NuPlayerDriver> driver = mDriver.promote();
             if (driver != NULL) {
+                if ((flags & NuPlayer::Source::FLAG_CAN_SEEK) == 0) {
+                    driver->notifyListener(
+                            MEDIA_INFO, MEDIA_INFO_NOT_SEEKABLE, 0);
+                }
                 driver->notifyFlagsChanged(flags);
             }
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
index 6ad28b5..2abd9d6 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
@@ -476,6 +476,7 @@
     buffer->meta()->setInt64("timeUs", timeUs);
     if (flags & MediaCodec::BUFFER_FLAG_EOS) {
         buffer->meta()->setInt32("eos", true);
+        notifyResumeCompleteIfNecessary();
     }
     // we do not expect CODECCONFIG or SYNCFRAME for decoder
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index a1e1aec..21b74ee 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -771,6 +771,7 @@
 
         if (mAnchorTimeMediaUs < 0) {
             setAnchorTime(mediaTimeUs, nowUs);
+            mAnchorMaxMediaUs = mediaTimeUs;
             realTimeUs = nowUs;
         } else {
             realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
diff --git a/media/libstagefright/AACWriter.cpp b/media/libstagefright/AACWriter.cpp
index 353920e..7cc9430 100644
--- a/media/libstagefright/AACWriter.cpp
+++ b/media/libstagefright/AACWriter.cpp
@@ -61,7 +61,8 @@
       mPaused(false),
       mResumed(false),
       mChannelCount(-1),
-      mSampleRate(-1) {
+      mSampleRate(-1),
+      mAACProfile(OMX_AUDIO_AACObjectLC) {
 }
 
 AACWriter::~AACWriter() {
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 1413635..e1b3b4d 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -1158,7 +1158,7 @@
     }
 
     sp<AMessage> inputFormat = new AMessage();
-    sp<AMessage> outputFormat = new AMessage();
+    sp<AMessage> outputFormat = mNotify->dup(); // will use this for kWhatOutputFormatChanged
 
     mIsEncoder = encoder;
 
@@ -1303,7 +1303,21 @@
                 return err;
             }
 
-            inputFormat->setInt32("adaptive-playback", true);
+            int32_t maxWidth = 0, maxHeight = 0;
+            if (msg->findInt32("max-width", &maxWidth) &&
+                    msg->findInt32("max-height", &maxHeight)) {
+
+                err = mOMX->prepareForAdaptivePlayback(
+                        mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight);
+                if (err != OK) {
+                    ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d",
+                            mComponentName.c_str(), err);
+                } else {
+                    inputFormat->setInt32("max-width", maxWidth);
+                    inputFormat->setInt32("max-height", maxHeight);
+                    inputFormat->setInt32("adaptive-playback", true);
+                }
+            }
         } else {
             ALOGV("Configuring CPU controlled video playback.");
             mTunneled = false;
@@ -1543,6 +1557,8 @@
         err = setMinBufferSize(kPortIndexInput, 8192);  // XXX
     }
 
+    mBaseOutputFormat = outputFormat;
+
     CHECK_EQ(getPortFormat(kPortIndexInput, inputFormat), (status_t)OK);
     CHECK_EQ(getPortFormat(kPortIndexOutput, outputFormat), (status_t)OK);
     mInputFormat = inputFormat;
@@ -3533,7 +3549,7 @@
 }
 
 void ACodec::sendFormatChange(const sp<AMessage> &reply) {
-    sp<AMessage> notify = mNotify->dup();
+    sp<AMessage> notify = mBaseOutputFormat->dup();
     notify->setInt32("what", kWhatOutputFormatChanged);
 
     CHECK_EQ(getPortFormat(kPortIndexOutput, notify), (status_t)OK);
@@ -4238,7 +4254,8 @@
 
         case RESUBMIT_BUFFERS:
         {
-            if (rangeLength == 0 && !(flags & OMX_BUFFERFLAG_EOS)) {
+            if (rangeLength == 0 && (!(flags & OMX_BUFFERFLAG_EOS)
+                    || mCodec->mPortEOS[kPortIndexOutput])) {
                 ALOGV("[%s] calling fillBuffer %u",
                      mCodec->mComponentName.c_str(), info->mBufferID);
 
@@ -4648,6 +4665,7 @@
     mCodec->mRepeatFrameDelayUs = -1ll;
     mCodec->mInputFormat.clear();
     mCodec->mOutputFormat.clear();
+    mCodec->mBaseOutputFormat.clear();
 
     if (mCodec->mShutdownInProgress) {
         bool keepComponentAllocated = mCodec->mKeepComponentAllocated;
diff --git a/media/libstagefright/MediaCodecList.cpp b/media/libstagefright/MediaCodecList.cpp
index 5b8be46..cf6e937 100644
--- a/media/libstagefright/MediaCodecList.cpp
+++ b/media/libstagefright/MediaCodecList.cpp
@@ -62,6 +62,14 @@
 
 sp<IMediaCodecList> MediaCodecList::sRemoteList;
 
+sp<MediaCodecList::BinderDeathObserver> MediaCodecList::sBinderDeathObserver;
+
+void MediaCodecList::BinderDeathObserver::binderDied(const wp<IBinder> &who __unused) {
+    Mutex::Autolock _l(sRemoteInitMutex);
+    sRemoteList.clear();
+    sBinderDeathObserver.clear();
+}
+
 // static
 sp<IMediaCodecList> MediaCodecList::getInstance() {
     Mutex::Autolock _l(sRemoteInitMutex);
@@ -72,8 +80,11 @@
             interface_cast<IMediaPlayerService>(binder);
         if (service.get() != NULL) {
             sRemoteList = service->getCodecList();
+            if (sRemoteList != NULL) {
+                sBinderDeathObserver = new BinderDeathObserver();
+                binder->linkToDeath(sBinderDeathObserver.get());
+            }
         }
-
         if (sRemoteList == NULL) {
             // if failed to get remote list, create local list
             sRemoteList = getLocalInstance();
diff --git a/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp
index bb55871..24dfc29 100644
--- a/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp
+++ b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp
@@ -17,6 +17,7 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "SoftAVCEncoder"
 #include <utils/Log.h>
+#include <utils/misc.h>
 
 #include "avcenc_api.h"
 #include "avcenc_int.h"
@@ -25,6 +26,7 @@
 #include <HardwareAPI.h>
 #include <MetadataBufferType.h>
 #include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AUtils.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/MetaData.h>
@@ -51,31 +53,36 @@
     params->nVersion.s.nStep = 0;
 }
 
+static const CodecProfileLevel kProfileLevels[] = {
+    { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel2  },
+};
+
 typedef struct LevelConversion {
     OMX_U32 omxLevel;
     AVCLevel avcLevel;
+    uint32_t maxMacroBlocks;
 } LevelConcersion;
 
 static LevelConversion ConversionTable[] = {
-    { OMX_VIDEO_AVCLevel1,  AVC_LEVEL1_B },
-    { OMX_VIDEO_AVCLevel1b, AVC_LEVEL1   },
-    { OMX_VIDEO_AVCLevel11, AVC_LEVEL1_1 },
-    { OMX_VIDEO_AVCLevel12, AVC_LEVEL1_2 },
-    { OMX_VIDEO_AVCLevel13, AVC_LEVEL1_3 },
-    { OMX_VIDEO_AVCLevel2,  AVC_LEVEL2 },
+    { OMX_VIDEO_AVCLevel1,  AVC_LEVEL1_B, 99 },
+    { OMX_VIDEO_AVCLevel1b, AVC_LEVEL1,   99 },
+    { OMX_VIDEO_AVCLevel11, AVC_LEVEL1_1, 396 },
+    { OMX_VIDEO_AVCLevel12, AVC_LEVEL1_2, 396 },
+    { OMX_VIDEO_AVCLevel13, AVC_LEVEL1_3, 396 },
+    { OMX_VIDEO_AVCLevel2,  AVC_LEVEL2,   396 },
 #if 0
-    // encoding speed is very poor if video
-    // resolution is higher than CIF
-    { OMX_VIDEO_AVCLevel21, AVC_LEVEL2_1 },
-    { OMX_VIDEO_AVCLevel22, AVC_LEVEL2_2 },
-    { OMX_VIDEO_AVCLevel3,  AVC_LEVEL3   },
-    { OMX_VIDEO_AVCLevel31, AVC_LEVEL3_1 },
-    { OMX_VIDEO_AVCLevel32, AVC_LEVEL3_2 },
-    { OMX_VIDEO_AVCLevel4,  AVC_LEVEL4   },
-    { OMX_VIDEO_AVCLevel41, AVC_LEVEL4_1 },
-    { OMX_VIDEO_AVCLevel42, AVC_LEVEL4_2 },
-    { OMX_VIDEO_AVCLevel5,  AVC_LEVEL5   },
-    { OMX_VIDEO_AVCLevel51, AVC_LEVEL5_1 },
+    // encoding speed is very poor if video resolution
+    // is higher than CIF or if level is higher than 2
+    { OMX_VIDEO_AVCLevel21, AVC_LEVEL2_1, 792 },
+    { OMX_VIDEO_AVCLevel22, AVC_LEVEL2_2, 1620 },
+    { OMX_VIDEO_AVCLevel3,  AVC_LEVEL3,   1620 },
+    { OMX_VIDEO_AVCLevel31, AVC_LEVEL3_1, 3600 },
+    { OMX_VIDEO_AVCLevel32, AVC_LEVEL3_2, 5120 },
+    { OMX_VIDEO_AVCLevel4,  AVC_LEVEL4,   8192 },
+    { OMX_VIDEO_AVCLevel41, AVC_LEVEL4_1, 8192 },
+    { OMX_VIDEO_AVCLevel42, AVC_LEVEL4_2, 8704 },
+    { OMX_VIDEO_AVCLevel5,  AVC_LEVEL5,   22080 },
+    { OMX_VIDEO_AVCLevel51, AVC_LEVEL5_1, 36864 },
 #endif
 };
 
@@ -148,13 +155,11 @@
             const OMX_CALLBACKTYPE *callbacks,
             OMX_PTR appData,
             OMX_COMPONENTTYPE **component)
-    : SoftVideoEncoderOMXComponent(name, callbacks, appData, component),
-      mVideoWidth(176),
-      mVideoHeight(144),
-      mVideoFrameRate(30),
-      mVideoBitRate(192000),
-      mVideoColorFormat(OMX_COLOR_FormatYUV420Planar),
-      mStoreMetaDataInBuffers(false),
+    : SoftVideoEncoderOMXComponent(
+            name, "video_encoder.avc", OMX_VIDEO_CodingAVC,
+            kProfileLevels, NELEM(kProfileLevels),
+            176 /* width */, 144 /* height */,
+            callbacks, appData, component),
       mIDRFrameRefreshIntervalInSec(1),
       mAVCEncProfile(AVC_BASELINE),
       mAVCEncLevel(AVC_LEVEL2),
@@ -168,7 +173,13 @@
       mInputFrameData(NULL),
       mSliceGroup(NULL) {
 
-    initPorts();
+    const size_t kOutputBufferSize =
+        320 * ConversionTable[NELEM(ConversionTable) - 1].maxMacroBlocks;
+
+    initPorts(
+            kNumBuffers, kNumBuffers, kOutputBufferSize,
+            MEDIA_MIMETYPE_VIDEO_AVC, 2 /* minCompressionRatio */);
+
     ALOGI("Construct SoftAVCEncoder");
 }
 
@@ -230,30 +241,28 @@
 
     mEncParams->use_overrun_buffer = AVC_OFF;
 
-    if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar
-            || mStoreMetaDataInBuffers) {
+    if (mColorFormat != OMX_COLOR_FormatYUV420Planar || mInputDataIsMeta) {
         // Color conversion is needed.
         free(mInputFrameData);
         mInputFrameData =
-            (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
+            (uint8_t *) malloc((mWidth * mHeight * 3 ) >> 1);
         CHECK(mInputFrameData != NULL);
     }
 
     // PV's AVC encoder requires the video dimension of multiple
-    if (mVideoWidth % 16 != 0 || mVideoHeight % 16 != 0) {
+    if (mWidth % 16 != 0 || mHeight % 16 != 0) {
         ALOGE("Video frame size %dx%d must be a multiple of 16",
-            mVideoWidth, mVideoHeight);
+            mWidth, mHeight);
         return OMX_ErrorBadParameter;
     }
 
-    mEncParams->width = mVideoWidth;
-    mEncParams->height = mVideoHeight;
-    mEncParams->bitrate = mVideoBitRate;
-    mEncParams->frame_rate = 1000 * mVideoFrameRate;  // In frames/ms!
-    mEncParams->CPB_size = (uint32_t) (mVideoBitRate >> 1);
+    mEncParams->width = mWidth;
+    mEncParams->height = mHeight;
+    mEncParams->bitrate = mBitrate;
+    mEncParams->frame_rate = (1000 * mFramerate) >> 16;  // In frames/ms!, mFramerate is in Q16
+    mEncParams->CPB_size = (uint32_t) (mBitrate >> 1);
 
-    int32_t nMacroBlocks = ((((mVideoWidth + 15) >> 4) << 4) *
-            (((mVideoHeight + 15) >> 4) << 4)) >> 8;
+    int32_t nMacroBlocks = divUp(mWidth, 16) * divUp(mHeight, 16);
     CHECK(mSliceGroup == NULL);
     mSliceGroup = (uint32_t *) malloc(sizeof(uint32_t) * nMacroBlocks);
     CHECK(mSliceGroup != NULL);
@@ -272,7 +281,7 @@
         mEncParams->idr_period = 1;  // All I frames
     } else {
         mEncParams->idr_period =
-            (mIDRFrameRefreshIntervalInSec * mVideoFrameRate);
+            (mIDRFrameRefreshIntervalInSec * mFramerate) >> 16; // mFramerate is in Q16
     }
 
     // Set profile and level
@@ -345,71 +354,9 @@
     mOutputBuffers.clear();
 }
 
-void SoftAVCEncoder::initPorts() {
-    OMX_PARAM_PORTDEFINITIONTYPE def;
-    InitOMXParams(&def);
-
-    const size_t kInputBufferSize = (mVideoWidth * mVideoHeight * 3) >> 1;
-
-    // 31584 is PV's magic number.  Not sure why.
-    const size_t kOutputBufferSize =
-            (kInputBufferSize > 31584) ? kInputBufferSize: 31584;
-
-    def.nPortIndex = 0;
-    def.eDir = OMX_DirInput;
-    def.nBufferCountMin = kNumBuffers;
-    def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = kInputBufferSize;
-    def.bEnabled = OMX_TRUE;
-    def.bPopulated = OMX_FALSE;
-    def.eDomain = OMX_PortDomainVideo;
-    def.bBuffersContiguous = OMX_FALSE;
-    def.nBufferAlignment = 1;
-
-    def.format.video.cMIMEType = const_cast<char *>("video/raw");
-    def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
-    def.format.video.eColorFormat = OMX_COLOR_FormatYUV420Planar;
-    def.format.video.xFramerate = (mVideoFrameRate << 16);  // Q16 format
-    def.format.video.nBitrate = mVideoBitRate;
-    def.format.video.nFrameWidth = mVideoWidth;
-    def.format.video.nFrameHeight = mVideoHeight;
-    def.format.video.nStride = mVideoWidth;
-    def.format.video.nSliceHeight = mVideoHeight;
-
-    addPort(def);
-
-    def.nPortIndex = 1;
-    def.eDir = OMX_DirOutput;
-    def.nBufferCountMin = kNumBuffers;
-    def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = kOutputBufferSize;
-    def.bEnabled = OMX_TRUE;
-    def.bPopulated = OMX_FALSE;
-    def.eDomain = OMX_PortDomainVideo;
-    def.bBuffersContiguous = OMX_FALSE;
-    def.nBufferAlignment = 2;
-
-    def.format.video.cMIMEType = const_cast<char *>("video/avc");
-    def.format.video.eCompressionFormat = OMX_VIDEO_CodingAVC;
-    def.format.video.eColorFormat = OMX_COLOR_FormatUnused;
-    def.format.video.xFramerate = (0 << 16);  // Q16 format
-    def.format.video.nBitrate = mVideoBitRate;
-    def.format.video.nFrameWidth = mVideoWidth;
-    def.format.video.nFrameHeight = mVideoHeight;
-    def.format.video.nStride = mVideoWidth;
-    def.format.video.nSliceHeight = mVideoHeight;
-
-    addPort(def);
-}
-
 OMX_ERRORTYPE SoftAVCEncoder::internalGetParameter(
         OMX_INDEXTYPE index, OMX_PTR params) {
     switch (index) {
-        case OMX_IndexParamVideoErrorCorrection:
-        {
-            return OMX_ErrorNotImplemented;
-        }
-
         case OMX_IndexParamVideoBitrate:
         {
             OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
@@ -420,37 +367,7 @@
             }
 
             bitRate->eControlRate = OMX_Video_ControlRateVariable;
-            bitRate->nTargetBitrate = mVideoBitRate;
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamVideoPortFormat:
-        {
-            OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
-                (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
-
-            if (formatParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (formatParams->nIndex > 2) {
-                return OMX_ErrorNoMore;
-            }
-
-            if (formatParams->nPortIndex == 0) {
-                formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused;
-                if (formatParams->nIndex == 0) {
-                    formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar;
-                } else if (formatParams->nIndex == 1) {
-                    formatParams->eColorFormat = OMX_COLOR_FormatYUV420SemiPlanar;
-                } else {
-                    formatParams->eColorFormat = OMX_COLOR_FormatAndroidOpaque;
-                }
-            } else {
-                formatParams->eCompressionFormat = OMX_VIDEO_CodingAVC;
-                formatParams->eColorFormat = OMX_COLOR_FormatUnused;
-            }
-
+            bitRate->nTargetBitrate = mBitrate;
             return OMX_ErrorNone;
         }
 
@@ -487,30 +404,8 @@
             return OMX_ErrorNone;
         }
 
-        case OMX_IndexParamVideoProfileLevelQuerySupported:
-        {
-            OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileLevel =
-                (OMX_VIDEO_PARAM_PROFILELEVELTYPE *)params;
-
-            if (profileLevel->nPortIndex != 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            const size_t size =
-                    sizeof(ConversionTable) / sizeof(ConversionTable[0]);
-
-            if (profileLevel->nProfileIndex >= size) {
-                return OMX_ErrorNoMore;
-            }
-
-            profileLevel->eProfile = OMX_VIDEO_AVCProfileBaseline;
-            profileLevel->eLevel = ConversionTable[profileLevel->nProfileIndex].omxLevel;
-
-            return OMX_ErrorNone;
-        }
-
         default:
-            return SimpleSoftOMXComponent::internalGetParameter(index, params);
+            return SoftVideoEncoderOMXComponent::internalGetParameter(index, params);
     }
 }
 
@@ -519,11 +414,6 @@
     int32_t indexFull = index;
 
     switch (indexFull) {
-        case OMX_IndexParamVideoErrorCorrection:
-        {
-            return OMX_ErrorNotImplemented;
-        }
-
         case OMX_IndexParamVideoBitrate:
         {
             OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
@@ -534,109 +424,7 @@
                 return OMX_ErrorUndefined;
             }
 
-            mVideoBitRate = bitRate->nTargetBitrate;
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamPortDefinition:
-        {
-            OMX_PARAM_PORTDEFINITIONTYPE *def =
-                (OMX_PARAM_PORTDEFINITIONTYPE *)params;
-            if (def->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (def->nPortIndex == 0) {
-                if (def->format.video.eCompressionFormat != OMX_VIDEO_CodingUnused ||
-                    (def->format.video.eColorFormat != OMX_COLOR_FormatYUV420Planar &&
-                     def->format.video.eColorFormat != OMX_COLOR_FormatYUV420SemiPlanar &&
-                     def->format.video.eColorFormat != OMX_COLOR_FormatAndroidOpaque)) {
-                    return OMX_ErrorUndefined;
-                }
-            } else {
-                if (def->format.video.eCompressionFormat != OMX_VIDEO_CodingAVC ||
-                    (def->format.video.eColorFormat != OMX_COLOR_FormatUnused)) {
-                    return OMX_ErrorUndefined;
-                }
-            }
-
-            OMX_ERRORTYPE err = SimpleSoftOMXComponent::internalSetParameter(index, params);
-            if (OMX_ErrorNone != err) {
-                return err;
-            }
-
-            if (def->nPortIndex == 0) {
-                mVideoWidth = def->format.video.nFrameWidth;
-                mVideoHeight = def->format.video.nFrameHeight;
-                mVideoFrameRate = def->format.video.xFramerate >> 16;
-                mVideoColorFormat = def->format.video.eColorFormat;
-
-                OMX_PARAM_PORTDEFINITIONTYPE *portDef =
-                    &editPortInfo(0)->mDef;
-                portDef->format.video.nFrameWidth = mVideoWidth;
-                portDef->format.video.nFrameHeight = mVideoHeight;
-                portDef->format.video.nStride = portDef->format.video.nFrameWidth;
-                portDef->format.video.nSliceHeight = portDef->format.video.nFrameHeight;
-                portDef->format.video.xFramerate = def->format.video.xFramerate;
-                portDef->format.video.eColorFormat =
-                    (OMX_COLOR_FORMATTYPE) mVideoColorFormat;
-                portDef->nBufferSize =
-                    (portDef->format.video.nStride * portDef->format.video.nSliceHeight * 3) / 2;
-                portDef = &editPortInfo(1)->mDef;
-                portDef->format.video.nFrameWidth = mVideoWidth;
-                portDef->format.video.nFrameHeight = mVideoHeight;
-            } else {
-                mVideoBitRate = def->format.video.nBitrate;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamStandardComponentRole:
-        {
-            const OMX_PARAM_COMPONENTROLETYPE *roleParams =
-                (const OMX_PARAM_COMPONENTROLETYPE *)params;
-
-            if (strncmp((const char *)roleParams->cRole,
-                        "video_encoder.avc",
-                        OMX_MAX_STRINGNAME_SIZE - 1)) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamVideoPortFormat:
-        {
-            const OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
-                (const OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
-
-            if (formatParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (formatParams->nIndex > 2) {
-                return OMX_ErrorNoMore;
-            }
-
-            if (formatParams->nPortIndex == 0) {
-                if (formatParams->eCompressionFormat != OMX_VIDEO_CodingUnused ||
-                    ((formatParams->nIndex == 0 &&
-                      formatParams->eColorFormat != OMX_COLOR_FormatYUV420Planar) ||
-                    (formatParams->nIndex == 1 &&
-                     formatParams->eColorFormat != OMX_COLOR_FormatYUV420SemiPlanar) ||
-                    (formatParams->nIndex == 2 &&
-                     formatParams->eColorFormat != OMX_COLOR_FormatAndroidOpaque) )) {
-                    return OMX_ErrorUndefined;
-                }
-                mVideoColorFormat = formatParams->eColorFormat;
-            } else {
-                if (formatParams->eCompressionFormat != OMX_VIDEO_CodingAVC ||
-                    formatParams->eColorFormat != OMX_COLOR_FormatUnused) {
-                    return OMX_ErrorUndefined;
-                }
-            }
-
+            mBitrate = bitRate->nTargetBitrate;
             return OMX_ErrorNone;
         }
 
@@ -673,29 +461,8 @@
             return OMX_ErrorNone;
         }
 
-        case kStoreMetaDataExtensionIndex:
-        {
-            StoreMetaDataInBuffersParams *storeParams =
-                    (StoreMetaDataInBuffersParams*)params;
-            if (storeParams->nPortIndex != 0) {
-                ALOGE("%s: StoreMetadataInBuffersParams.nPortIndex not zero!",
-                        __FUNCTION__);
-                return OMX_ErrorUndefined;
-            }
-
-            mStoreMetaDataInBuffers = storeParams->bStoreMetaData;
-            ALOGV("StoreMetaDataInBuffers set to: %s",
-                    mStoreMetaDataInBuffers ? " true" : "false");
-
-            if (mStoreMetaDataInBuffers) {
-                mVideoColorFormat = OMX_COLOR_FormatAndroidOpaque;
-            }
-
-            return OMX_ErrorNone;
-        }
-
         default:
-            return SimpleSoftOMXComponent::internalSetParameter(index, params);
+            return SoftVideoEncoderOMXComponent::internalSetParameter(index, params);
     }
 }
 
@@ -789,11 +556,11 @@
             if (inHeader->nFilledLen > 0) {
                 AVCFrameIO videoInput;
                 memset(&videoInput, 0, sizeof(videoInput));
-                videoInput.height = ((mVideoHeight  + 15) >> 4) << 4;
-                videoInput.pitch = ((mVideoWidth + 15) >> 4) << 4;
+                videoInput.height = align(mHeight, 16);
+                videoInput.pitch = align(mWidth, 16);
                 videoInput.coding_timestamp = (inHeader->nTimeStamp + 500) / 1000;  // in ms
                 const uint8_t *inputData = NULL;
-                if (mStoreMetaDataInBuffers) {
+                if (mInputDataIsMeta) {
                     if (inHeader->nFilledLen != 8) {
                         ALOGE("MetaData buffer is wrong size! "
                                 "(got %u bytes, expected 8)", inHeader->nFilledLen);
@@ -803,9 +570,9 @@
                     }
                     inputData =
                         extractGraphicBuffer(
-                                mInputFrameData, (mVideoWidth * mVideoHeight * 3) >> 1,
+                                mInputFrameData, (mWidth * mHeight * 3) >> 1,
                                 inHeader->pBuffer + inHeader->nOffset, inHeader->nFilledLen,
-                                mVideoWidth, mVideoHeight);
+                                mWidth, mHeight);
                     if (inputData == NULL) {
                         ALOGE("Unable to extract gralloc buffer in metadata mode");
                         mSignalledError = true;
@@ -815,9 +582,9 @@
                     // TODO: Verify/convert pixel format enum
                 } else {
                     inputData = (const uint8_t *)inHeader->pBuffer + inHeader->nOffset;
-                    if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar) {
+                    if (mColorFormat != OMX_COLOR_FormatYUV420Planar) {
                         ConvertYUV420SemiPlanarToYUV420Planar(
-                            inputData, mInputFrameData, mVideoWidth, mVideoHeight);
+                            inputData, mInputFrameData, mWidth, mHeight);
                         inputData = mInputFrameData;
                     }
                 }
diff --git a/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.h b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.h
index 130593f..f31c1f4 100644
--- a/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.h
+++ b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.h
@@ -68,12 +68,6 @@
         int32_t mFlags;
     } InputBufferInfo;
 
-    int32_t  mVideoWidth;
-    int32_t  mVideoHeight;
-    int32_t  mVideoFrameRate;
-    int32_t  mVideoBitRate;
-    int32_t  mVideoColorFormat;
-    bool     mStoreMetaDataInBuffers;
     int32_t  mIDRFrameRefreshIntervalInSec;
     AVCProfile mAVCEncProfile;
     AVCLevel   mAVCEncLevel;
@@ -94,7 +88,6 @@
     Vector<MediaBuffer *> mOutputBuffers;
     Vector<InputBufferInfo> mInputBufferInfoVec;
 
-    void initPorts();
     OMX_ERRORTYPE initEncParams();
     OMX_ERRORTYPE initEncoder();
     OMX_ERRORTYPE releaseEncoder();
diff --git a/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp b/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
index f4cba54..cddd176 100644
--- a/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
+++ b/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
@@ -26,6 +26,7 @@
 #include "SoftHEVC.h"
 
 #include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AUtils.h>
 #include <media/stagefright/MediaDefs.h>
 #include <OMX_VideoExt.h>
 
@@ -75,8 +76,12 @@
       mNewWidth(mWidth),
       mNewHeight(mHeight),
       mChangingResolution(false) {
-    initPorts(kNumBuffers, INPUT_BUF_SIZE, kNumBuffers,
-            CODEC_MIME_TYPE);
+    const size_t kMinCompressionRatio = 4 /* compressionRatio (for Level 4+) */;
+    const size_t kMaxOutputBufferSize = 2048 * 2048 * 3 / 2;
+    // INPUT_BUF_SIZE is given by HEVC codec as minimum input size
+    initPorts(
+            kNumBuffers, max(kMaxOutputBufferSize / kMinCompressionRatio, (size_t)INPUT_BUF_SIZE),
+            kNumBuffers, CODEC_MIME_TYPE, kMinCompressionRatio);
     CHECK_EQ(initDecoder(), (status_t)OK);
 }
 
@@ -644,7 +649,7 @@
             // The decoder should be fixed so that |u4_error_code| instead of |status| returns
             // IHEVCD_UNSUPPORTED_DIMENSIONS.
             bool unsupportedDimensions =
-                ((IHEVCD_UNSUPPORTED_DIMENSIONS == status)
+                ((IHEVCD_UNSUPPORTED_DIMENSIONS == (IHEVCD_CXA_ERROR_CODES_T)status)
                     || (IHEVCD_UNSUPPORTED_DIMENSIONS == s_dec_op.u4_error_code));
             bool resChanged = (IVD_RES_CHANGED == (s_dec_op.u4_error_code & 0xFF));
 
diff --git a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
index e399984..ede645c 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
+++ b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
@@ -21,6 +21,7 @@
 #include "SoftMPEG4.h"
 
 #include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AUtils.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/IOMX.h>
@@ -70,7 +71,7 @@
       mPvTime(0) {
     initPorts(
             kNumInputBuffers,
-            8192 /* inputBufferSize */,
+            352 * 288 * 3 / 2 /* minInputBufferSize */,
             kNumOutputBuffers,
             (mMode == MODE_MPEG4)
             ? MEDIA_MIMETYPE_VIDEO_MPEG4 : MEDIA_MIMETYPE_VIDEO_H263);
@@ -353,14 +354,14 @@
     }
 }
 
-void SoftMPEG4::updatePortDefinitions() {
-    SoftVideoDecoderOMXComponent::updatePortDefinitions();
+void SoftMPEG4::updatePortDefinitions(bool updateCrop, bool updateInputSize) {
+    SoftVideoDecoderOMXComponent::updatePortDefinitions(updateCrop, updateInputSize);
 
     /* We have to align our width and height - this should affect stride! */
     OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kOutputPortIndex)->mDef;
-    def->nBufferSize =
-        (((def->format.video.nFrameWidth + 15) & -16)
-            * ((def->format.video.nFrameHeight + 15) & -16) * 3) / 2;
+    def->format.video.nStride = align(def->format.video.nStride, 16);
+    def->format.video.nSliceHeight = align(def->format.video.nSliceHeight, 16);
+    def->nBufferSize = (def->format.video.nStride * def->format.video.nSliceHeight * 3) / 2;
 }
 
 }  // namespace android
diff --git a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h
index 8a06a00..4114e7d 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h
+++ b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h
@@ -66,7 +66,7 @@
 
     status_t initDecoder();
 
-    virtual void updatePortDefinitions();
+    virtual void updatePortDefinitions(bool updateCrop = true, bool updateInputSize = false);
     bool handlePortSettingsChange();
 
     DISALLOW_EVIL_CONSTRUCTORS(SoftMPEG4);
diff --git a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
index 400f320..fa3486c 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
+++ b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
@@ -17,6 +17,7 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "SoftMPEG4Encoder"
 #include <utils/Log.h>
+#include <utils/misc.h>
 
 #include "mp4enc_api.h"
 #include "OMX_Video.h"
@@ -24,6 +25,7 @@
 #include <HardwareAPI.h>
 #include <MetadataBufferType.h>
 #include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AUtils.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/MetaData.h>
@@ -46,19 +48,30 @@
     params->nVersion.s.nStep = 0;
 }
 
+static const CodecProfileLevel kMPEG4ProfileLevels[] = {
+    { OMX_VIDEO_MPEG4ProfileCore, OMX_VIDEO_MPEG4Level2 },
+};
+
+static const CodecProfileLevel kH263ProfileLevels[] = {
+    { OMX_VIDEO_H263ProfileBaseline, OMX_VIDEO_H263Level45 },
+};
+
 SoftMPEG4Encoder::SoftMPEG4Encoder(
             const char *name,
+            const char *componentRole,
+            OMX_VIDEO_CODINGTYPE codingType,
+            const char *mime,
+            const CodecProfileLevel *profileLevels,
+            size_t numProfileLevels,
             const OMX_CALLBACKTYPE *callbacks,
             OMX_PTR appData,
             OMX_COMPONENTTYPE **component)
-    : SoftVideoEncoderOMXComponent(name, callbacks, appData, component),
+    : SoftVideoEncoderOMXComponent(
+            name, componentRole, codingType,
+            profileLevels, numProfileLevels,
+            176 /* width */, 144 /* height */,
+            callbacks, appData, component),
       mEncodeMode(COMBINE_MODE_WITH_ERR_RES),
-      mVideoWidth(176),
-      mVideoHeight(144),
-      mVideoFrameRate(30),
-      mVideoBitRate(192000),
-      mVideoColorFormat(OMX_COLOR_FormatYUV420Planar),
-      mStoreMetaDataInBuffers(false),
       mIDRFrameRefreshIntervalInSec(1),
       mNumInputFrames(-1),
       mStarted(false),
@@ -68,13 +81,15 @@
       mEncParams(new tagvideoEncOptions),
       mInputFrameData(NULL) {
 
-   if (!strcmp(name, "OMX.google.h263.encoder")) {
+    if (codingType == OMX_VIDEO_CodingH263) {
         mEncodeMode = H263_MODE;
-    } else {
-        CHECK(!strcmp(name, "OMX.google.mpeg4.encoder"));
     }
 
-    initPorts();
+    // 256 * 1024 is a magic number for PV's encoder, not sure why
+    const size_t kOutputBufferSize = 256 * 1024;
+
+    initPorts(kNumBuffers, kNumBuffers, kOutputBufferSize, mime);
+
     ALOGI("Construct SoftMPEG4Encoder");
 }
 
@@ -98,9 +113,9 @@
         return OMX_ErrorUndefined;
     }
     mEncParams->encMode = mEncodeMode;
-    mEncParams->encWidth[0] = mVideoWidth;
-    mEncParams->encHeight[0] = mVideoHeight;
-    mEncParams->encFrameRate[0] = mVideoFrameRate;
+    mEncParams->encWidth[0] = mWidth;
+    mEncParams->encHeight[0] = mHeight;
+    mEncParams->encFrameRate[0] = mFramerate >> 16; // mFramerate is in Q16 format
     mEncParams->rcType = VBR_1;
     mEncParams->vbvDelay = 5.0f;
 
@@ -111,27 +126,26 @@
     mEncParams->rvlcEnable = PV_OFF;
     mEncParams->numLayers = 1;
     mEncParams->timeIncRes = 1000;
-    mEncParams->tickPerSrc = mEncParams->timeIncRes / mVideoFrameRate;
+    mEncParams->tickPerSrc = ((int64_t)mEncParams->timeIncRes << 16) / mFramerate;
 
-    mEncParams->bitRate[0] = mVideoBitRate;
+    mEncParams->bitRate[0] = mBitrate;
     mEncParams->iQuant[0] = 15;
     mEncParams->pQuant[0] = 12;
     mEncParams->quantType[0] = 0;
     mEncParams->noFrameSkipped = PV_OFF;
 
-    if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar
-            || mStoreMetaDataInBuffers) {
+    if (mColorFormat != OMX_COLOR_FormatYUV420Planar || mInputDataIsMeta) {
         // Color conversion is needed.
         free(mInputFrameData);
         mInputFrameData =
-            (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
+            (uint8_t *) malloc((mWidth * mHeight * 3 ) >> 1);
         CHECK(mInputFrameData != NULL);
     }
 
     // PV's MPEG4 encoder requires the video dimension of multiple
-    if (mVideoWidth % 16 != 0 || mVideoHeight % 16 != 0) {
+    if (mWidth % 16 != 0 || mHeight % 16 != 0) {
         ALOGE("Video frame size %dx%d must be a multiple of 16",
-            mVideoWidth, mVideoHeight);
+            mWidth, mHeight);
         return OMX_ErrorBadParameter;
     }
 
@@ -142,7 +156,7 @@
         mEncParams->intraPeriod = 1;  // All I frames
     } else {
         mEncParams->intraPeriod =
-            (mIDRFrameRefreshIntervalInSec * mVideoFrameRate);
+            (mIDRFrameRefreshIntervalInSec * mFramerate) >> 16;
     }
 
     mEncParams->numIntraMB = 0;
@@ -201,81 +215,9 @@
     return OMX_ErrorNone;
 }
 
-void SoftMPEG4Encoder::initPorts() {
-    OMX_PARAM_PORTDEFINITIONTYPE def;
-    InitOMXParams(&def);
-
-    const size_t kInputBufferSize = (mVideoWidth * mVideoHeight * 3) >> 1;
-
-    // 256 * 1024 is a magic number for PV's encoder, not sure why
-    const size_t kOutputBufferSize =
-        (kInputBufferSize > 256 * 1024)
-            ? kInputBufferSize: 256 * 1024;
-
-    def.nPortIndex = 0;
-    def.eDir = OMX_DirInput;
-    def.nBufferCountMin = kNumBuffers;
-    def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = kInputBufferSize;
-    def.bEnabled = OMX_TRUE;
-    def.bPopulated = OMX_FALSE;
-    def.eDomain = OMX_PortDomainVideo;
-    def.bBuffersContiguous = OMX_FALSE;
-    def.nBufferAlignment = 1;
-
-    def.format.video.cMIMEType = const_cast<char *>("video/raw");
-
-    def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
-    def.format.video.eColorFormat = OMX_COLOR_FormatYUV420Planar;
-    def.format.video.xFramerate = (mVideoFrameRate << 16);  // Q16 format
-    def.format.video.nBitrate = mVideoBitRate;
-    def.format.video.nFrameWidth = mVideoWidth;
-    def.format.video.nFrameHeight = mVideoHeight;
-    def.format.video.nStride = mVideoWidth;
-    def.format.video.nSliceHeight = mVideoHeight;
-
-    addPort(def);
-
-    def.nPortIndex = 1;
-    def.eDir = OMX_DirOutput;
-    def.nBufferCountMin = kNumBuffers;
-    def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = kOutputBufferSize;
-    def.bEnabled = OMX_TRUE;
-    def.bPopulated = OMX_FALSE;
-    def.eDomain = OMX_PortDomainVideo;
-    def.bBuffersContiguous = OMX_FALSE;
-    def.nBufferAlignment = 2;
-
-    def.format.video.cMIMEType =
-        (mEncodeMode == COMBINE_MODE_WITH_ERR_RES)
-            ? const_cast<char *>(MEDIA_MIMETYPE_VIDEO_MPEG4)
-            : const_cast<char *>(MEDIA_MIMETYPE_VIDEO_H263);
-
-    def.format.video.eCompressionFormat =
-        (mEncodeMode == COMBINE_MODE_WITH_ERR_RES)
-            ? OMX_VIDEO_CodingMPEG4
-            : OMX_VIDEO_CodingH263;
-
-    def.format.video.eColorFormat = OMX_COLOR_FormatUnused;
-    def.format.video.xFramerate = (0 << 16);  // Q16 format
-    def.format.video.nBitrate = mVideoBitRate;
-    def.format.video.nFrameWidth = mVideoWidth;
-    def.format.video.nFrameHeight = mVideoHeight;
-    def.format.video.nStride = mVideoWidth;
-    def.format.video.nSliceHeight = mVideoHeight;
-
-    addPort(def);
-}
-
 OMX_ERRORTYPE SoftMPEG4Encoder::internalGetParameter(
         OMX_INDEXTYPE index, OMX_PTR params) {
     switch (index) {
-        case OMX_IndexParamVideoErrorCorrection:
-        {
-            return OMX_ErrorNotImplemented;
-        }
-
         case OMX_IndexParamVideoBitrate:
         {
             OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
@@ -286,41 +228,7 @@
             }
 
             bitRate->eControlRate = OMX_Video_ControlRateVariable;
-            bitRate->nTargetBitrate = mVideoBitRate;
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamVideoPortFormat:
-        {
-            OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
-                (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
-
-            if (formatParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (formatParams->nIndex > 2) {
-                return OMX_ErrorNoMore;
-            }
-
-            if (formatParams->nPortIndex == 0) {
-                formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused;
-                if (formatParams->nIndex == 0) {
-                    formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar;
-                } else if (formatParams->nIndex == 1) {
-                    formatParams->eColorFormat = OMX_COLOR_FormatYUV420SemiPlanar;
-                } else {
-                    formatParams->eColorFormat = OMX_COLOR_FormatAndroidOpaque;
-                }
-            } else {
-                formatParams->eCompressionFormat =
-                    (mEncodeMode == COMBINE_MODE_WITH_ERR_RES)
-                        ? OMX_VIDEO_CodingMPEG4
-                        : OMX_VIDEO_CodingH263;
-
-                formatParams->eColorFormat = OMX_COLOR_FormatUnused;
-            }
-
+            bitRate->nTargetBitrate = mBitrate;
             return OMX_ErrorNone;
         }
 
@@ -369,32 +277,8 @@
             return OMX_ErrorNone;
         }
 
-        case OMX_IndexParamVideoProfileLevelQuerySupported:
-        {
-            OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileLevel =
-                (OMX_VIDEO_PARAM_PROFILELEVELTYPE *)params;
-
-            if (profileLevel->nPortIndex != 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (profileLevel->nProfileIndex > 0) {
-                return OMX_ErrorNoMore;
-            }
-
-            if (mEncodeMode == H263_MODE) {
-                profileLevel->eProfile = OMX_VIDEO_H263ProfileBaseline;
-                profileLevel->eLevel = OMX_VIDEO_H263Level45;
-            } else {
-                profileLevel->eProfile = OMX_VIDEO_MPEG4ProfileCore;
-                profileLevel->eLevel = OMX_VIDEO_MPEG4Level2;
-            }
-
-            return OMX_ErrorNone;
-        }
-
         default:
-            return SimpleSoftOMXComponent::internalGetParameter(index, params);
+            return SoftVideoEncoderOMXComponent::internalGetParameter(index, params);
     }
 }
 
@@ -403,11 +287,6 @@
     int32_t indexFull = index;
 
     switch (indexFull) {
-        case OMX_IndexParamVideoErrorCorrection:
-        {
-            return OMX_ErrorNotImplemented;
-        }
-
         case OMX_IndexParamVideoBitrate:
         {
             OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
@@ -418,116 +297,7 @@
                 return OMX_ErrorUndefined;
             }
 
-            mVideoBitRate = bitRate->nTargetBitrate;
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamPortDefinition:
-        {
-            OMX_PARAM_PORTDEFINITIONTYPE *def =
-                (OMX_PARAM_PORTDEFINITIONTYPE *)params;
-            if (def->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (def->nPortIndex == 0) {
-                if (def->format.video.eCompressionFormat != OMX_VIDEO_CodingUnused ||
-                    (def->format.video.eColorFormat != OMX_COLOR_FormatYUV420Planar &&
-                     def->format.video.eColorFormat != OMX_COLOR_FormatYUV420SemiPlanar &&
-                     def->format.video.eColorFormat != OMX_COLOR_FormatAndroidOpaque)) {
-                    return OMX_ErrorUndefined;
-                }
-            } else {
-                if ((mEncodeMode == COMBINE_MODE_WITH_ERR_RES &&
-                        def->format.video.eCompressionFormat != OMX_VIDEO_CodingMPEG4) ||
-                    (mEncodeMode == H263_MODE &&
-                        def->format.video.eCompressionFormat != OMX_VIDEO_CodingH263) ||
-                    (def->format.video.eColorFormat != OMX_COLOR_FormatUnused)) {
-                    return OMX_ErrorUndefined;
-                }
-            }
-
-            OMX_ERRORTYPE err = SimpleSoftOMXComponent::internalSetParameter(index, params);
-            if (OMX_ErrorNone != err) {
-                return err;
-            }
-
-            if (def->nPortIndex == 0) {
-                mVideoWidth = def->format.video.nFrameWidth;
-                mVideoHeight = def->format.video.nFrameHeight;
-                mVideoFrameRate = def->format.video.xFramerate >> 16;
-                mVideoColorFormat = def->format.video.eColorFormat;
-
-                OMX_PARAM_PORTDEFINITIONTYPE *portDef =
-                    &editPortInfo(0)->mDef;
-                portDef->format.video.nFrameWidth = mVideoWidth;
-                portDef->format.video.nFrameHeight = mVideoHeight;
-                portDef->format.video.nStride = portDef->format.video.nFrameWidth;
-                portDef->format.video.nSliceHeight = portDef->format.video.nFrameHeight;
-                portDef->format.video.xFramerate = def->format.video.xFramerate;
-                portDef->format.video.eColorFormat =
-                    (OMX_COLOR_FORMATTYPE) mVideoColorFormat;
-                portDef->nBufferSize =
-                    (portDef->format.video.nStride * portDef->format.video.nSliceHeight * 3) / 2;
-                portDef = &editPortInfo(1)->mDef;
-                portDef->format.video.nFrameWidth = mVideoWidth;
-                portDef->format.video.nFrameHeight = mVideoHeight;
-            } else {
-                mVideoBitRate = def->format.video.nBitrate;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamStandardComponentRole:
-        {
-            const OMX_PARAM_COMPONENTROLETYPE *roleParams =
-                (const OMX_PARAM_COMPONENTROLETYPE *)params;
-
-            if (strncmp((const char *)roleParams->cRole,
-                        (mEncodeMode == H263_MODE)
-                            ? "video_encoder.h263": "video_encoder.mpeg4",
-                        OMX_MAX_STRINGNAME_SIZE - 1)) {
-                return OMX_ErrorUndefined;
-            }
-
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamVideoPortFormat:
-        {
-            const OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
-                (const OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
-
-            if (formatParams->nPortIndex > 1) {
-                return OMX_ErrorUndefined;
-            }
-
-            if (formatParams->nIndex > 2) {
-                return OMX_ErrorNoMore;
-            }
-
-            if (formatParams->nPortIndex == 0) {
-                if (formatParams->eCompressionFormat != OMX_VIDEO_CodingUnused ||
-                    ((formatParams->nIndex == 0 &&
-                      formatParams->eColorFormat != OMX_COLOR_FormatYUV420Planar) ||
-                    (formatParams->nIndex == 1 &&
-                     formatParams->eColorFormat != OMX_COLOR_FormatYUV420SemiPlanar) ||
-                    (formatParams->nIndex == 2 &&
-                     formatParams->eColorFormat != OMX_COLOR_FormatAndroidOpaque) )) {
-                    return OMX_ErrorUndefined;
-                }
-                mVideoColorFormat = formatParams->eColorFormat;
-            } else {
-                if ((mEncodeMode == H263_MODE &&
-                        formatParams->eCompressionFormat != OMX_VIDEO_CodingH263) ||
-                    (mEncodeMode == COMBINE_MODE_WITH_ERR_RES &&
-                        formatParams->eCompressionFormat != OMX_VIDEO_CodingMPEG4) ||
-                    formatParams->eColorFormat != OMX_COLOR_FormatUnused) {
-                    return OMX_ErrorUndefined;
-                }
-            }
-
+            mBitrate = bitRate->nTargetBitrate;
             return OMX_ErrorNone;
         }
 
@@ -578,29 +348,8 @@
             return OMX_ErrorNone;
         }
 
-        case kStoreMetaDataExtensionIndex:
-        {
-            StoreMetaDataInBuffersParams *storeParams =
-                    (StoreMetaDataInBuffersParams*)params;
-            if (storeParams->nPortIndex != 0) {
-                ALOGE("%s: StoreMetadataInBuffersParams.nPortIndex not zero!",
-                        __FUNCTION__);
-                return OMX_ErrorUndefined;
-            }
-
-            mStoreMetaDataInBuffers = storeParams->bStoreMetaData;
-            ALOGV("StoreMetaDataInBuffers set to: %s",
-                    mStoreMetaDataInBuffers ? " true" : "false");
-
-            if (mStoreMetaDataInBuffers) {
-                mVideoColorFormat = OMX_COLOR_FormatAndroidOpaque;
-            }
-
-            return OMX_ErrorNone;
-        }
-
         default:
-            return SimpleSoftOMXComponent::internalSetParameter(index, params);
+            return SoftVideoEncoderOMXComponent::internalSetParameter(index, params);
     }
 }
 
@@ -663,7 +412,7 @@
 
         if (inHeader->nFilledLen > 0) {
             const uint8_t *inputData = NULL;
-            if (mStoreMetaDataInBuffers) {
+            if (mInputDataIsMeta) {
                 if (inHeader->nFilledLen != 8) {
                     ALOGE("MetaData buffer is wrong size! "
                             "(got %u bytes, expected 8)", inHeader->nFilledLen);
@@ -673,9 +422,9 @@
                 }
                 inputData =
                     extractGraphicBuffer(
-                            mInputFrameData, (mVideoWidth * mVideoHeight * 3) >> 1,
+                            mInputFrameData, (mWidth * mHeight * 3) >> 1,
                             inHeader->pBuffer + inHeader->nOffset, inHeader->nFilledLen,
-                            mVideoWidth, mVideoHeight);
+                            mWidth, mHeight);
                 if (inputData == NULL) {
                     ALOGE("Unable to extract gralloc buffer in metadata mode");
                     mSignalledError = true;
@@ -684,9 +433,9 @@
                 }
             } else {
                 inputData = (const uint8_t *)inHeader->pBuffer + inHeader->nOffset;
-                if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar) {
+                if (mColorFormat != OMX_COLOR_FormatYUV420Planar) {
                     ConvertYUV420SemiPlanarToYUV420Planar(
-                        inputData, mInputFrameData, mVideoWidth, mVideoHeight);
+                        inputData, mInputFrameData, mWidth, mHeight);
                     inputData = mInputFrameData;
                 }
             }
@@ -696,8 +445,8 @@
             VideoEncFrameIO vin, vout;
             memset(&vin, 0, sizeof(vin));
             memset(&vout, 0, sizeof(vout));
-            vin.height = ((mVideoHeight  + 15) >> 4) << 4;
-            vin.pitch = ((mVideoWidth + 15) >> 4) << 4;
+            vin.height = align(mHeight, 16);
+            vin.pitch = align(mWidth, 16);
             vin.timestamp = (inHeader->nTimeStamp + 500) / 1000;  // in ms
             vin.yChan = (uint8_t *)inputData;
             vin.uChan = vin.yChan + vin.height * vin.pitch;
@@ -745,5 +494,19 @@
 android::SoftOMXComponent *createSoftOMXComponent(
         const char *name, const OMX_CALLBACKTYPE *callbacks,
         OMX_PTR appData, OMX_COMPONENTTYPE **component) {
-    return new android::SoftMPEG4Encoder(name, callbacks, appData, component);
+    using namespace android;
+    if (!strcmp(name, "OMX.google.h263.encoder")) {
+        return new android::SoftMPEG4Encoder(
+                name, "video_encoder.h263", OMX_VIDEO_CodingH263, MEDIA_MIMETYPE_VIDEO_H263,
+                kH263ProfileLevels, NELEM(kH263ProfileLevels),
+                callbacks, appData, component);
+    } else if (!strcmp(name, "OMX.google.mpeg4.encoder")) {
+        return new android::SoftMPEG4Encoder(
+                name, "video_encoder.mpeg4", OMX_VIDEO_CodingMPEG4, MEDIA_MIMETYPE_VIDEO_MPEG4,
+                kMPEG4ProfileLevels, NELEM(kMPEG4ProfileLevels),
+                callbacks, appData, component);
+    } else {
+        CHECK(!"Unknown component");
+    }
+    return NULL;
 }
diff --git a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h
index b0605b4..25ecdc9 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h
+++ b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h
@@ -25,11 +25,17 @@
 
 namespace android {
 
+struct CodecProfileLevel;
 struct MediaBuffer;
 
 struct SoftMPEG4Encoder : public SoftVideoEncoderOMXComponent {
     SoftMPEG4Encoder(
             const char *name,
+            const char *componentRole,
+            OMX_VIDEO_CODINGTYPE codingType,
+            const char *mime,
+            const CodecProfileLevel *profileLevels,
+            size_t numProfileLevels,
             const OMX_CALLBACKTYPE *callbacks,
             OMX_PTR appData,
             OMX_COMPONENTTYPE **component);
@@ -58,12 +64,6 @@
     } InputBufferInfo;
 
     MP4EncodingMode mEncodeMode;
-    int32_t  mVideoWidth;
-    int32_t  mVideoHeight;
-    int32_t  mVideoFrameRate;
-    int32_t  mVideoBitRate;
-    int32_t  mVideoColorFormat;
-    bool     mStoreMetaDataInBuffers;
     int32_t  mIDRFrameRefreshIntervalInSec;
 
     int64_t  mNumInputFrames;
@@ -76,7 +76,6 @@
     uint8_t               *mInputFrameData;
     Vector<InputBufferInfo> mInputBufferInfoVec;
 
-    void initPorts();
     OMX_ERRORTYPE initEncParams();
     OMX_ERRORTYPE initEncoder();
     OMX_ERRORTYPE releaseEncoder();
diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
index 87d6961..8a95643 100644
--- a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
+++ b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
@@ -40,10 +40,13 @@
       mMode(codingType == OMX_VIDEO_CodingVP8 ? MODE_VP8 : MODE_VP9),
       mCtx(NULL),
       mImg(NULL) {
-    initPorts(kNumBuffers, 768 * 1024 /* inputBufferSize */,
-            kNumBuffers,
-            codingType == OMX_VIDEO_CodingVP8 ? MEDIA_MIMETYPE_VIDEO_VP8 : MEDIA_MIMETYPE_VIDEO_VP9);
-
+    // arbitrary from avc/hevc as vpx does not specify a min compression ratio
+    const size_t kMinCompressionRatio = mMode == MODE_VP8 ? 2 : 4;
+    const char *mime = mMode == MODE_VP8 ? MEDIA_MIMETYPE_VIDEO_VP8 : MEDIA_MIMETYPE_VIDEO_VP9;
+    const size_t kMaxOutputBufferSize = 2048 * 2048 * 3 / 2;
+    initPorts(
+            kNumBuffers, kMaxOutputBufferSize / kMinCompressionRatio /* inputBufferSize */,
+            kNumBuffers, mime, kMinCompressionRatio);
     CHECK_EQ(initDecoder(), (status_t)OK);
 }
 
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
index 0285feb..970acf3 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
+++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
@@ -19,6 +19,7 @@
 #include "SoftVPXEncoder.h"
 
 #include <utils/Log.h>
+#include <utils/misc.h>
 
 #include <media/hardware/HardwareAPI.h>
 #include <media/hardware/MetadataBufferType.h>
@@ -50,23 +51,29 @@
     return cpuCoreCount;
 }
 
+static const CodecProfileLevel kProfileLevels[] = {
+    { OMX_VIDEO_VP8ProfileMain, OMX_VIDEO_VP8Level_Version0 },
+    { OMX_VIDEO_VP8ProfileMain, OMX_VIDEO_VP8Level_Version1 },
+    { OMX_VIDEO_VP8ProfileMain, OMX_VIDEO_VP8Level_Version2 },
+    { OMX_VIDEO_VP8ProfileMain, OMX_VIDEO_VP8Level_Version3 },
+};
+
 SoftVPXEncoder::SoftVPXEncoder(const char *name,
                                const OMX_CALLBACKTYPE *callbacks,
                                OMX_PTR appData,
                                OMX_COMPONENTTYPE **component)
-    : SoftVideoEncoderOMXComponent(name, callbacks, appData, component),
+    : SoftVideoEncoderOMXComponent(
+            name, "video_encoder.vp8", OMX_VIDEO_CodingVP8,
+            kProfileLevels, NELEM(kProfileLevels),
+            176 /* width */, 144 /* height */,
+            callbacks, appData, component),
       mCodecContext(NULL),
       mCodecConfiguration(NULL),
       mCodecInterface(NULL),
-      mWidth(176),
-      mHeight(144),
-      mBitrate(192000),  // in bps
-      mFramerate(30 << 16), // in Q16 format
       mBitrateUpdated(false),
       mBitrateControlMode(VPX_VBR),  // variable bitrate
       mDCTPartitions(0),
       mErrorResilience(OMX_FALSE),
-      mColorFormat(OMX_COLOR_FormatYUV420Planar),
       mLevel(OMX_VIDEO_VP8Level_Version0),
       mKeyFrameInterval(0),
       mMinQuantizer(0),
@@ -77,11 +84,15 @@
       mTemporalPatternIdx(0),
       mLastTimestamp(0x7FFFFFFFFFFFFFFFLL),
       mConversionBuffer(NULL),
-      mInputDataIsMeta(false),
       mKeyFrameRequested(false) {
     memset(mTemporalLayerBitrateRatio, 0, sizeof(mTemporalLayerBitrateRatio));
     mTemporalLayerBitrateRatio[0] = 100;
-    initPorts();
+
+    const size_t kMinOutputBufferSize = 1024 * 1024; // arbitrary
+
+    initPorts(
+            kNumBuffers, kNumBuffers, kMinOutputBufferSize,
+            MEDIA_MIMETYPE_VIDEO_VP8, 2 /* minCompressionRatio */);
 }
 
 
@@ -89,71 +100,6 @@
     releaseEncoder();
 }
 
-
-void SoftVPXEncoder::initPorts() {
-    OMX_PARAM_PORTDEFINITIONTYPE inputPort;
-    OMX_PARAM_PORTDEFINITIONTYPE outputPort;
-
-    InitOMXParams(&inputPort);
-    InitOMXParams(&outputPort);
-
-    inputPort.nBufferCountMin = kNumBuffers;
-    inputPort.nBufferCountActual = inputPort.nBufferCountMin;
-    inputPort.bEnabled = OMX_TRUE;
-    inputPort.bPopulated = OMX_FALSE;
-    inputPort.eDomain = OMX_PortDomainVideo;
-    inputPort.bBuffersContiguous = OMX_FALSE;
-    inputPort.format.video.pNativeRender = NULL;
-    inputPort.format.video.nFrameWidth = mWidth;
-    inputPort.format.video.nFrameHeight = mHeight;
-    inputPort.format.video.nStride = inputPort.format.video.nFrameWidth;
-    inputPort.format.video.nSliceHeight = inputPort.format.video.nFrameHeight;
-    inputPort.format.video.nBitrate = 0;
-    // frameRate is in Q16 format.
-    inputPort.format.video.xFramerate = mFramerate;
-    inputPort.format.video.bFlagErrorConcealment = OMX_FALSE;
-    inputPort.nPortIndex = kInputPortIndex;
-    inputPort.eDir = OMX_DirInput;
-    inputPort.nBufferAlignment = kInputBufferAlignment;
-    inputPort.format.video.cMIMEType =
-        const_cast<char *>(MEDIA_MIMETYPE_VIDEO_RAW);
-    inputPort.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
-    inputPort.format.video.eColorFormat = mColorFormat;
-    inputPort.format.video.pNativeWindow = NULL;
-    inputPort.nBufferSize =
-        (inputPort.format.video.nStride *
-        inputPort.format.video.nSliceHeight * 3) / 2;
-
-    addPort(inputPort);
-
-    outputPort.nBufferCountMin = kNumBuffers;
-    outputPort.nBufferCountActual = outputPort.nBufferCountMin;
-    outputPort.bEnabled = OMX_TRUE;
-    outputPort.bPopulated = OMX_FALSE;
-    outputPort.eDomain = OMX_PortDomainVideo;
-    outputPort.bBuffersContiguous = OMX_FALSE;
-    outputPort.format.video.pNativeRender = NULL;
-    outputPort.format.video.nFrameWidth = mWidth;
-    outputPort.format.video.nFrameHeight = mHeight;
-    outputPort.format.video.nStride = outputPort.format.video.nFrameWidth;
-    outputPort.format.video.nSliceHeight = outputPort.format.video.nFrameHeight;
-    outputPort.format.video.nBitrate = mBitrate;
-    outputPort.format.video.xFramerate = 0;
-    outputPort.format.video.bFlagErrorConcealment = OMX_FALSE;
-    outputPort.nPortIndex = kOutputPortIndex;
-    outputPort.eDir = OMX_DirOutput;
-    outputPort.nBufferAlignment = kOutputBufferAlignment;
-    outputPort.format.video.cMIMEType =
-        const_cast<char *>(MEDIA_MIMETYPE_VIDEO_VP8);
-    outputPort.format.video.eCompressionFormat = OMX_VIDEO_CodingVP8;
-    outputPort.format.video.eColorFormat = OMX_COLOR_FormatUnused;
-    outputPort.format.video.pNativeWindow = NULL;
-    outputPort.nBufferSize = 1024 * 1024; // arbitrary
-
-    addPort(outputPort);
-}
-
-
 status_t SoftVPXEncoder::initEncoder() {
     vpx_codec_err_t codec_return;
 
@@ -409,38 +355,6 @@
     const int32_t indexFull = index;
 
     switch (indexFull) {
-        case OMX_IndexParamVideoPortFormat: {
-            OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
-                (OMX_VIDEO_PARAM_PORTFORMATTYPE *)param;
-
-            if (formatParams->nPortIndex == kInputPortIndex) {
-                if (formatParams->nIndex >= kNumberOfSupportedColorFormats) {
-                    return OMX_ErrorNoMore;
-                }
-
-                // Color formats, in order of preference
-                if (formatParams->nIndex == 0) {
-                    formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar;
-                } else if (formatParams->nIndex == 1) {
-                    formatParams->eColorFormat =
-                        OMX_COLOR_FormatYUV420SemiPlanar;
-                } else {
-                    formatParams->eColorFormat = OMX_COLOR_FormatAndroidOpaque;
-                }
-
-                formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused;
-                formatParams->xFramerate = mFramerate;
-                return OMX_ErrorNone;
-            } else if (formatParams->nPortIndex == kOutputPortIndex) {
-                formatParams->eCompressionFormat = OMX_VIDEO_CodingVP8;
-                formatParams->eColorFormat = OMX_COLOR_FormatUnused;
-                formatParams->xFramerate = 0;
-                return OMX_ErrorNone;
-            } else {
-                return OMX_ErrorBadPortIndex;
-            }
-        }
-
         case OMX_IndexParamVideoBitrate: {
             OMX_VIDEO_PARAM_BITRATETYPE *bitrate =
                 (OMX_VIDEO_PARAM_BITRATETYPE *)param;
@@ -495,54 +409,8 @@
                 return OMX_ErrorNone;
         }
 
-        case OMX_IndexParamVideoProfileLevelQuerySupported: {
-            OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileAndLevel =
-                (OMX_VIDEO_PARAM_PROFILELEVELTYPE *)param;
-
-            if (profileAndLevel->nPortIndex != kOutputPortIndex) {
-                return OMX_ErrorUnsupportedIndex;
-            }
-
-            switch (profileAndLevel->nProfileIndex) {
-                case 0:
-                    profileAndLevel->eLevel = OMX_VIDEO_VP8Level_Version0;
-                    break;
-
-                case 1:
-                    profileAndLevel->eLevel = OMX_VIDEO_VP8Level_Version1;
-                    break;
-
-                case 2:
-                    profileAndLevel->eLevel = OMX_VIDEO_VP8Level_Version2;
-                    break;
-
-                case 3:
-                    profileAndLevel->eLevel = OMX_VIDEO_VP8Level_Version3;
-                    break;
-
-                default:
-                    return OMX_ErrorNoMore;
-            }
-
-            profileAndLevel->eProfile = OMX_VIDEO_VP8ProfileMain;
-            return OMX_ErrorNone;
-        }
-
-        case OMX_IndexParamVideoProfileLevelCurrent: {
-            OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileAndLevel =
-                (OMX_VIDEO_PARAM_PROFILELEVELTYPE *)param;
-
-            if (profileAndLevel->nPortIndex != kOutputPortIndex) {
-                return OMX_ErrorUnsupportedIndex;
-            }
-
-            profileAndLevel->eLevel = mLevel;
-            profileAndLevel->eProfile = OMX_VIDEO_VP8ProfileMain;
-            return OMX_ErrorNone;
-        }
-
         default:
-            return SimpleSoftOMXComponent::internalGetParameter(index, param);
+            return SoftVideoEncoderOMXComponent::internalGetParameter(index, param);
     }
 }
 
@@ -553,30 +421,10 @@
     const int32_t indexFull = index;
 
     switch (indexFull) {
-        case OMX_IndexParamStandardComponentRole:
-            return internalSetRoleParams(
-                (const OMX_PARAM_COMPONENTROLETYPE *)param);
-
         case OMX_IndexParamVideoBitrate:
             return internalSetBitrateParams(
                 (const OMX_VIDEO_PARAM_BITRATETYPE *)param);
 
-        case OMX_IndexParamPortDefinition:
-        {
-            OMX_ERRORTYPE err = internalSetPortParams(
-                (const OMX_PARAM_PORTDEFINITIONTYPE *)param);
-
-            if (err != OMX_ErrorNone) {
-                return err;
-            }
-
-            return SimpleSoftOMXComponent::internalSetParameter(index, param);
-        }
-
-        case OMX_IndexParamVideoPortFormat:
-            return internalSetFormatParams(
-                (const OMX_VIDEO_PARAM_PORTFORMATTYPE *)param);
-
         case OMX_IndexParamVideoVp8:
             return internalSetVp8Params(
                 (const OMX_VIDEO_PARAM_VP8TYPE *)param);
@@ -585,27 +433,8 @@
             return internalSetAndroidVp8Params(
                 (const OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *)param);
 
-        case OMX_IndexParamVideoProfileLevelCurrent:
-            return internalSetProfileLevel(
-                (const OMX_VIDEO_PARAM_PROFILELEVELTYPE *)param);
-
-        case kStoreMetaDataExtensionIndex:
-        {
-            // storeMetaDataInBuffers
-            const StoreMetaDataInBuffersParams *storeParam =
-                (const StoreMetaDataInBuffersParams *)param;
-
-            if (storeParam->nPortIndex != kInputPortIndex) {
-                return OMX_ErrorBadPortIndex;
-            }
-
-            mInputDataIsMeta = (storeParam->bStoreMetaData == OMX_TRUE);
-
-            return OMX_ErrorNone;
-        }
-
         default:
-            return SimpleSoftOMXComponent::internalSetParameter(index, param);
+            return SoftVideoEncoderOMXComponent::internalSetParameter(index, param);
     }
 }
 
@@ -646,29 +475,6 @@
     }
 }
 
-OMX_ERRORTYPE SoftVPXEncoder::internalSetProfileLevel(
-        const OMX_VIDEO_PARAM_PROFILELEVELTYPE* profileAndLevel) {
-    if (profileAndLevel->nPortIndex != kOutputPortIndex) {
-        return OMX_ErrorUnsupportedIndex;
-    }
-
-    if (profileAndLevel->eProfile != OMX_VIDEO_VP8ProfileMain) {
-        return OMX_ErrorBadParameter;
-    }
-
-    if (profileAndLevel->eLevel == OMX_VIDEO_VP8Level_Version0 ||
-        profileAndLevel->eLevel == OMX_VIDEO_VP8Level_Version1 ||
-        profileAndLevel->eLevel == OMX_VIDEO_VP8Level_Version2 ||
-        profileAndLevel->eLevel == OMX_VIDEO_VP8Level_Version3) {
-        mLevel = (OMX_VIDEO_VP8LEVELTYPE)profileAndLevel->eLevel;
-    } else {
-        return OMX_ErrorBadParameter;
-    }
-
-    return OMX_ErrorNone;
-}
-
-
 OMX_ERRORTYPE SoftVPXEncoder::internalSetVp8Params(
         const OMX_VIDEO_PARAM_VP8TYPE* vp8Params) {
     if (vp8Params->nPortIndex != kOutputPortIndex) {
@@ -743,95 +549,6 @@
     return OMX_ErrorNone;
 }
 
-OMX_ERRORTYPE SoftVPXEncoder::internalSetFormatParams(
-        const OMX_VIDEO_PARAM_PORTFORMATTYPE* format) {
-    if (format->nPortIndex == kInputPortIndex) {
-        if (format->eColorFormat == OMX_COLOR_FormatYUV420Planar ||
-            format->eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar ||
-            format->eColorFormat == OMX_COLOR_FormatAndroidOpaque) {
-            mColorFormat = format->eColorFormat;
-
-            OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kInputPortIndex)->mDef;
-            def->format.video.eColorFormat = mColorFormat;
-
-            return OMX_ErrorNone;
-        } else {
-            ALOGE("Unsupported color format %i", format->eColorFormat);
-            return OMX_ErrorUnsupportedSetting;
-        }
-    } else if (format->nPortIndex == kOutputPortIndex) {
-        if (format->eCompressionFormat == OMX_VIDEO_CodingVP8) {
-            return OMX_ErrorNone;
-        } else {
-            return OMX_ErrorUnsupportedSetting;
-        }
-    } else {
-        return OMX_ErrorBadPortIndex;
-    }
-}
-
-
-OMX_ERRORTYPE SoftVPXEncoder::internalSetRoleParams(
-        const OMX_PARAM_COMPONENTROLETYPE* role) {
-    const char* roleText = (const char*)role->cRole;
-    const size_t roleTextMaxSize = OMX_MAX_STRINGNAME_SIZE - 1;
-
-    if (strncmp(roleText, "video_encoder.vp8", roleTextMaxSize)) {
-        ALOGE("Unsupported component role");
-        return OMX_ErrorBadParameter;
-    }
-
-    return OMX_ErrorNone;
-}
-
-
-OMX_ERRORTYPE SoftVPXEncoder::internalSetPortParams(
-        const OMX_PARAM_PORTDEFINITIONTYPE* port) {
-    if (port->nPortIndex == kInputPortIndex) {
-        mWidth = port->format.video.nFrameWidth;
-        mHeight = port->format.video.nFrameHeight;
-
-        // xFramerate comes in Q16 format, in frames per second unit
-        mFramerate = port->format.video.xFramerate;
-
-        if (port->format.video.eColorFormat == OMX_COLOR_FormatYUV420Planar ||
-            port->format.video.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar ||
-            port->format.video.eColorFormat == OMX_COLOR_FormatAndroidOpaque) {
-            mColorFormat = port->format.video.eColorFormat;
-        } else {
-            return OMX_ErrorUnsupportedSetting;
-        }
-
-        OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kInputPortIndex)->mDef;
-        def->format.video.nFrameWidth = mWidth;
-        def->format.video.nFrameHeight = mHeight;
-        def->format.video.nStride = def->format.video.nFrameWidth;
-        def->format.video.nSliceHeight = def->format.video.nFrameHeight;
-        def->format.video.xFramerate = mFramerate;
-        def->format.video.eColorFormat = mColorFormat;
-        def->nBufferSize =
-            (def->format.video.nStride * def->format.video.nSliceHeight * 3) / 2;
-        def = &editPortInfo(kOutputPortIndex)->mDef;
-        def->format.video.nFrameWidth = mWidth;
-        def->format.video.nFrameHeight = mHeight;
-
-        return OMX_ErrorNone;
-    } else if (port->nPortIndex == kOutputPortIndex) {
-        mBitrate = port->format.video.nBitrate;
-        mWidth = port->format.video.nFrameWidth;
-        mHeight = port->format.video.nFrameHeight;
-
-        OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kOutputPortIndex)->mDef;
-        def->format.video.nFrameWidth = mWidth;
-        def->format.video.nFrameHeight = mHeight;
-        def->format.video.nBitrate = mBitrate;
-        return OMX_ErrorNone;
-    } else {
-        return OMX_ErrorBadPortIndex;
-    }
-}
-
-
 OMX_ERRORTYPE SoftVPXEncoder::internalSetBitrateParams(
         const OMX_VIDEO_PARAM_BITRATETYPE* bitrate) {
     if (bitrate->nPortIndex != kOutputPortIndex) {
@@ -920,7 +637,7 @@
     return flags;
 }
 
-void SoftVPXEncoder::onQueueFilled(OMX_U32 portIndex) {
+void SoftVPXEncoder::onQueueFilled(OMX_U32 /* portIndex */) {
     // Initialize encoder if not already
     if (mCodecContext == NULL) {
         if (OK != initEncoder()) {
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
index f4c1564..cd0a0cf 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
+++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
@@ -155,18 +155,6 @@
     // that specifies algorithm interface (e.g. vp8)
     vpx_codec_iface_t* mCodecInterface;
 
-    // Width of the input frames
-    int32_t mWidth;
-
-    // Height of the input frames
-    int32_t mHeight;
-
-    // Target bitrate set for the encoder, in bits per second.
-    uint32_t mBitrate;
-
-    // Target framerate set for the encoder.
-    uint32_t mFramerate;
-
     // If a request for a change it bitrate has been received.
     bool mBitrateUpdated;
 
@@ -182,9 +170,6 @@
     // is enabled in encoder
     OMX_BOOL mErrorResilience;
 
-    // Color format for the input port
-    OMX_COLOR_FORMATTYPE mColorFormat;
-
     // Encoder profile corresponding to OMX level parameter
     //
     // The inconsistency in the naming is caused by
@@ -229,14 +214,8 @@
     // indeed YUV420SemiPlanar.
     uint8_t* mConversionBuffer;
 
-    bool mInputDataIsMeta;
-
     bool mKeyFrameRequested;
 
-    // Initializes input and output OMX ports with sensible
-    // default values.
-    void initPorts();
-
     // Initializes vpx encoder with available settings.
     status_t initEncoder();
 
@@ -250,23 +229,10 @@
     // Get current encode flags
     vpx_enc_frame_flags_t getEncodeFlags();
 
-    // Handles port changes with respect to color formats
-    OMX_ERRORTYPE internalSetFormatParams(
-        const OMX_VIDEO_PARAM_PORTFORMATTYPE* format);
-
-    // Verifies the component role tried to be set to this OMX component is
-    // strictly video_encoder.vp8
-    OMX_ERRORTYPE internalSetRoleParams(
-        const OMX_PARAM_COMPONENTROLETYPE* role);
-
     // Updates bitrate to reflect port settings.
     OMX_ERRORTYPE internalSetBitrateParams(
         const OMX_VIDEO_PARAM_BITRATETYPE* bitrate);
 
-    // Handles port definition changes.
-    OMX_ERRORTYPE internalSetPortParams(
-        const OMX_PARAM_PORTDEFINITIONTYPE* port);
-
     // Handles vp8 specific parameters.
     OMX_ERRORTYPE internalSetVp8Params(
         const OMX_VIDEO_PARAM_VP8TYPE* vp8Params);
@@ -275,10 +241,6 @@
     OMX_ERRORTYPE internalSetAndroidVp8Params(
         const OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE* vp8AndroidParams);
 
-    // Updates encoder profile
-    OMX_ERRORTYPE internalSetProfileLevel(
-        const OMX_VIDEO_PARAM_PROFILELEVELTYPE* profileAndLevel);
-
     DISALLOW_EVIL_CONSTRUCTORS(SoftVPXEncoder);
 };
 
diff --git a/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp b/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp
index 168208f..6b8b395 100644
--- a/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp
+++ b/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp
@@ -64,9 +64,11 @@
       mHeadersDecoded(false),
       mEOSStatus(INPUT_DATA_AVAILABLE),
       mSignalledError(false) {
+    const size_t kMinCompressionRatio = 2;
+    const size_t kMaxOutputBufferSize = 2048 * 2048 * 3 / 2;
     initPorts(
-            kNumInputBuffers, 8192 /* inputBufferSize */,
-            kNumOutputBuffers, MEDIA_MIMETYPE_VIDEO_AVC);
+            kNumInputBuffers, kMaxOutputBufferSize / kMinCompressionRatio /* minInputBufferSize */,
+            kNumOutputBuffers, MEDIA_MIMETYPE_VIDEO_AVC, kMinCompressionRatio);
 
     CHECK_EQ(initDecoder(), (status_t)OK);
 }
diff --git a/media/libstagefright/data/media_codecs_google_audio.xml b/media/libstagefright/data/media_codecs_google_audio.xml
index 85f6615..a06684b 100644
--- a/media/libstagefright/data/media_codecs_google_audio.xml
+++ b/media/libstagefright/data/media_codecs_google_audio.xml
@@ -65,7 +65,8 @@
     <Encoders>
         <MediaCodec name="OMX.google.aac.encoder" type="audio/mp4a-latm">
             <Limit name="channel-count" max="6" />
-            <Limit name="sample-rate" ranges="11025,12000,16000,22050,24000,32000,44100,48000" />
+            <Limit name="sample-rate" ranges="8000,11025,12000,16000,22050,24000,32000,44100,48000" />
+            <!-- also may support 64000, 88200  and 96000 Hz -->
             <Limit name="bitrate" range="8000-960000" />
         </MediaCodec>
         <MediaCodec name="OMX.google.amrnb.encoder" type="audio/3gpp">
diff --git a/media/libstagefright/data/media_codecs_google_video.xml b/media/libstagefright/data/media_codecs_google_video.xml
index 1cbef39..7e9fa18 100644
--- a/media/libstagefright/data/media_codecs_google_video.xml
+++ b/media/libstagefright/data/media_codecs_google_video.xml
@@ -73,7 +73,7 @@
     <Encoders>
         <MediaCodec name="OMX.google.h263.encoder" type="video/3gpp">
             <!-- profiles and levels:  ProfileBaseline : Level45 -->
-            <Limit name="size" min="16x16" max="176x144" />
+            <Limit name="size" min="176x144" max="176x144" />
             <Limit name="alignment" value="16x16" />
             <Limit name="bitrate" range="1-128000" />
         </MediaCodec>
diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp
index 5eb4652..0b18666 100644
--- a/media/libstagefright/httplive/LiveSession.cpp
+++ b/media/libstagefright/httplive/LiveSession.cpp
@@ -1109,11 +1109,11 @@
 }
 
 status_t LiveSession::getDuration(int64_t *durationUs) const {
-    int64_t maxDurationUs = 0ll;
+    int64_t maxDurationUs = -1ll;
     for (size_t i = 0; i < mFetcherInfos.size(); ++i) {
         int64_t fetcherDurationUs = mFetcherInfos.valueAt(i).mDurationUs;
 
-        if (fetcherDurationUs >= 0ll && fetcherDurationUs > maxDurationUs) {
+        if (fetcherDurationUs > maxDurationUs) {
             maxDurationUs = fetcherDurationUs;
         }
     }
diff --git a/media/libstagefright/include/SoftVideoDecoderOMXComponent.h b/media/libstagefright/include/SoftVideoDecoderOMXComponent.h
index 9e97ebd..4529007 100644
--- a/media/libstagefright/include/SoftVideoDecoderOMXComponent.h
+++ b/media/libstagefright/include/SoftVideoDecoderOMXComponent.h
@@ -61,9 +61,10 @@
     void initPorts(OMX_U32 numInputBuffers,
             OMX_U32 inputBufferSize,
             OMX_U32 numOutputBuffers,
-            const char *mimeType);
+            const char *mimeType,
+            OMX_U32 minCompressionRatio = 1u);
 
-    virtual void updatePortDefinitions(bool updateCrop = true);
+    virtual void updatePortDefinitions(bool updateCrop = true, bool updateInputSize = false);
 
     uint32_t outputBufferWidth();
     uint32_t outputBufferHeight();
@@ -99,6 +100,9 @@
     } mOutputPortSettingsChange;
 
 private:
+    uint32_t mMinInputBufferSize;
+    uint32_t mMinCompressionRatio;
+
     const char *mComponentRole;
     OMX_VIDEO_CODINGTYPE mCodingType;
     const CodecProfileLevel *mProfileLevels;
diff --git a/media/libstagefright/include/SoftVideoEncoderOMXComponent.h b/media/libstagefright/include/SoftVideoEncoderOMXComponent.h
index b3b810d..b43635d 100644
--- a/media/libstagefright/include/SoftVideoEncoderOMXComponent.h
+++ b/media/libstagefright/include/SoftVideoEncoderOMXComponent.h
@@ -18,6 +18,8 @@
 
 #define SOFT_VIDEO_ENCODER_OMX_COMPONENT_H_
 
+#include <media/IOMX.h>
+
 #include "SimpleSoftOMXComponent.h"
 #include <system/window.h>
 
@@ -28,11 +30,26 @@
 struct SoftVideoEncoderOMXComponent : public SimpleSoftOMXComponent {
     SoftVideoEncoderOMXComponent(
             const char *name,
+            const char *componentRole,
+            OMX_VIDEO_CODINGTYPE codingType,
+            const CodecProfileLevel *profileLevels,
+            size_t numProfileLevels,
+            int32_t width,
+            int32_t height,
             const OMX_CALLBACKTYPE *callbacks,
             OMX_PTR appData,
             OMX_COMPONENTTYPE **component);
 
+    virtual OMX_ERRORTYPE internalSetParameter(OMX_INDEXTYPE index, const OMX_PTR param);
+    virtual OMX_ERRORTYPE internalGetParameter(OMX_INDEXTYPE index, OMX_PTR params);
+
 protected:
+    void initPorts(
+            OMX_U32 numInputBuffers, OMX_U32 numOutputBuffers, OMX_U32 outputBufferSize,
+            const char *mime, OMX_U32 minCompressionRatio = 1);
+
+    static void setRawVideoSize(OMX_PARAM_PORTDEFINITIONTYPE *def);
+
     static void ConvertFlexYUVToPlanar(
             uint8_t *dst, size_t dstStride, size_t dstVStride,
             struct android_ycbcr *ycbcr, int32_t width, int32_t height);
@@ -56,9 +73,30 @@
         kOutputPortIndex = 1,
     };
 
+    bool mInputDataIsMeta;
+    int32_t mWidth;      // width of the input frames
+    int32_t mHeight;     // height of the input frames
+    uint32_t mBitrate;   // target bitrate set for the encoder, in bits per second
+    uint32_t mFramerate; // target framerate set for the encoder, in Q16 format
+    OMX_COLOR_FORMATTYPE mColorFormat;  // Color format for the input port
+
 private:
+    void updatePortParams();
+    OMX_ERRORTYPE internalSetPortParams(const OMX_PARAM_PORTDEFINITIONTYPE* port);
+
+    static const uint32_t kInputBufferAlignment = 1;
+    static const uint32_t kOutputBufferAlignment = 2;
+
     mutable const hw_module_t *mGrallocModule;
 
+    uint32_t mMinOutputBufferSize;
+    uint32_t mMinCompressionRatio;
+
+    const char *mComponentRole;
+    OMX_VIDEO_CODINGTYPE mCodingType;
+    const CodecProfileLevel *mProfileLevels;
+    size_t mNumProfileLevels;
+
     DISALLOW_EVIL_CONSTRUCTORS(SoftVideoEncoderOMXComponent);
 };
 
diff --git a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
index 2f83610..532cf2f 100644
--- a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
@@ -26,6 +26,7 @@
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/ALooper.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/AUtils.h>
 #include <media/stagefright/MediaDefs.h>
 
 namespace android {
@@ -61,6 +62,8 @@
         mCropWidth(width),
         mCropHeight(height),
         mOutputPortSettingsChange(NONE),
+        mMinInputBufferSize(384), // arbitrary, using one uncompressed macroblock
+        mMinCompressionRatio(1),  // max input size is normally the output size
         mComponentRole(componentRole),
         mCodingType(codingType),
         mProfileLevels(profileLevels),
@@ -71,7 +74,11 @@
         OMX_U32 numInputBuffers,
         OMX_U32 inputBufferSize,
         OMX_U32 numOutputBuffers,
-        const char *mimeType) {
+        const char *mimeType,
+        OMX_U32 minCompressionRatio) {
+    mMinInputBufferSize = inputBufferSize;
+    mMinCompressionRatio = minCompressionRatio;
+
     OMX_PARAM_PORTDEFINITIONTYPE def;
     InitOMXParams(&def);
 
@@ -120,27 +127,30 @@
 
     addPort(def);
 
-    updatePortDefinitions();
+    updatePortDefinitions(true /* updateCrop */, true /* updateInputSize */);
 }
 
-void SoftVideoDecoderOMXComponent::updatePortDefinitions(bool updateCrop) {
-    OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kInputPortIndex)->mDef;
-    def->format.video.nFrameWidth = mWidth;
-    def->format.video.nFrameHeight = mHeight;
-    def->format.video.nStride = def->format.video.nFrameWidth;
-    def->format.video.nSliceHeight = def->format.video.nFrameHeight;
+void SoftVideoDecoderOMXComponent::updatePortDefinitions(bool updateCrop, bool updateInputSize) {
+    OMX_PARAM_PORTDEFINITIONTYPE *outDef = &editPortInfo(kOutputPortIndex)->mDef;
+    outDef->format.video.nFrameWidth = outputBufferWidth();
+    outDef->format.video.nFrameHeight = outputBufferHeight();
+    outDef->format.video.nStride = outDef->format.video.nFrameWidth;
+    outDef->format.video.nSliceHeight = outDef->format.video.nFrameHeight;
 
-    def->nBufferSize = def->format.video.nFrameWidth * def->format.video.nFrameHeight * 3 / 2;
+    outDef->nBufferSize =
+        (outDef->format.video.nStride * outDef->format.video.nSliceHeight * 3) / 2;
 
-    def = &editPortInfo(kOutputPortIndex)->mDef;
-    def->format.video.nFrameWidth = outputBufferWidth();
-    def->format.video.nFrameHeight = outputBufferHeight();
-    def->format.video.nStride = def->format.video.nFrameWidth;
-    def->format.video.nSliceHeight = def->format.video.nFrameHeight;
+    OMX_PARAM_PORTDEFINITIONTYPE *inDef = &editPortInfo(kInputPortIndex)->mDef;
+    inDef->format.video.nFrameWidth = mWidth;
+    inDef->format.video.nFrameHeight = mHeight;
+    // input port is compressed, hence it has no stride
+    inDef->format.video.nStride = 0;
+    inDef->format.video.nSliceHeight = 0;
 
-    def->nBufferSize =
-            (def->format.video.nFrameWidth *
-             def->format.video.nFrameHeight * 3) / 2;
+    // when output format changes, input buffer size does not actually change
+    if (updateInputSize) {
+        inDef->nBufferSize = max(outDef->nBufferSize / mMinCompressionRatio, mMinInputBufferSize);
+    }
 
     if (updateCrop) {
         mCropLeft = 0;
@@ -169,7 +179,8 @@
     bool strideChanged = false;
     if (fakeStride) {
         OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kOutputPortIndex)->mDef;
-        if (def->format.video.nStride != width || def->format.video.nSliceHeight != height) {
+        if (def->format.video.nStride != (OMX_S32)width
+                || def->format.video.nSliceHeight != (OMX_U32)height) {
             strideChanged = true;
         }
     }
@@ -252,7 +263,7 @@
                 (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
 
             if (formatParams->nPortIndex > kMaxPortIndex) {
-                return OMX_ErrorUndefined;
+                return OMX_ErrorBadPortIndex;
             }
 
             if (formatParams->nIndex != 0) {
@@ -324,13 +335,25 @@
                 (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
 
             if (formatParams->nPortIndex > kMaxPortIndex) {
-                return OMX_ErrorUndefined;
+                return OMX_ErrorBadPortIndex;
             }
 
             if (formatParams->nIndex != 0) {
                 return OMX_ErrorNoMore;
             }
 
+            if (formatParams->nPortIndex == kInputPortIndex) {
+                if (formatParams->eCompressionFormat != mCodingType
+                        || formatParams->eColorFormat != OMX_COLOR_FormatUnused) {
+                    return OMX_ErrorUnsupportedSetting;
+                }
+            } else {
+                if (formatParams->eCompressionFormat != OMX_VIDEO_CodingUnused
+                        || formatParams->eColorFormat != OMX_COLOR_FormatYUV420Planar) {
+                    return OMX_ErrorUnsupportedSetting;
+                }
+            }
+
             return OMX_ErrorNone;
         }
 
@@ -348,7 +371,7 @@
                 mAdaptiveMaxWidth = 0;
                 mAdaptiveMaxHeight = 0;
             }
-            updatePortDefinitions();
+            updatePortDefinitions(true /* updateCrop */, true /* updateInputSize */);
             return OMX_ErrorNone;
         }
 
@@ -369,11 +392,18 @@
                     (mIsAdaptive && outputPort) ? mAdaptiveMaxWidth : newWidth;
                 def->format.video.nFrameHeight =
                     (mIsAdaptive && outputPort) ? mAdaptiveMaxHeight : newHeight;
-                def->format.video.nStride = def->format.video.nFrameWidth;
-                def->format.video.nSliceHeight = def->format.video.nFrameHeight;
-                def->nBufferSize =
-                    def->format.video.nFrameWidth * def->format.video.nFrameHeight * 3 / 2;
                 if (outputPort) {
+                    def->format.video.nStride = def->format.video.nFrameWidth;
+                    def->format.video.nSliceHeight = def->format.video.nFrameHeight;
+                    def->nBufferSize =
+                        def->format.video.nStride * def->format.video.nSliceHeight * 3 / 2;
+
+
+                    OMX_PARAM_PORTDEFINITIONTYPE *inDef = &editPortInfo(kInputPortIndex)->mDef;
+                    // increase input buffer size if required
+                    inDef->nBufferSize =
+                        max(def->nBufferSize / mMinCompressionRatio, inDef->nBufferSize);
+
                     mWidth = newWidth;
                     mHeight = newHeight;
                     mCropLeft = 0;
diff --git a/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp
index 8bff142..b2d3623 100644
--- a/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp
@@ -19,6 +19,7 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "SoftVideoEncoderOMXComponent"
 #include <utils/Log.h>
+#include <utils/misc.h>
 
 #include "include/SoftVideoEncoderOMXComponent.h"
 
@@ -27,6 +28,7 @@
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/ALooper.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/AUtils.h>
 #include <media/stagefright/MediaDefs.h>
 
 #include <ui/GraphicBuffer.h>
@@ -34,13 +36,316 @@
 
 namespace android {
 
+const static OMX_COLOR_FORMATTYPE kSupportedColorFormats[] = {
+    OMX_COLOR_FormatYUV420Planar,
+    OMX_COLOR_FormatYUV420SemiPlanar,
+    OMX_COLOR_FormatAndroidOpaque
+};
+
+template<class T>
+static void InitOMXParams(T *params) {
+    params->nSize = sizeof(T);
+    params->nVersion.s.nVersionMajor = 1;
+    params->nVersion.s.nVersionMinor = 0;
+    params->nVersion.s.nRevision = 0;
+    params->nVersion.s.nStep = 0;
+}
+
 SoftVideoEncoderOMXComponent::SoftVideoEncoderOMXComponent(
         const char *name,
+        const char *componentRole,
+        OMX_VIDEO_CODINGTYPE codingType,
+        const CodecProfileLevel *profileLevels,
+        size_t numProfileLevels,
+        int32_t width,
+        int32_t height,
         const OMX_CALLBACKTYPE *callbacks,
         OMX_PTR appData,
         OMX_COMPONENTTYPE **component)
     : SimpleSoftOMXComponent(name, callbacks, appData, component),
-      mGrallocModule(NULL) {
+      mInputDataIsMeta(false),
+      mWidth(width),
+      mHeight(height),
+      mBitrate(192000),
+      mFramerate(30 << 16), // Q16 format
+      mColorFormat(OMX_COLOR_FormatYUV420Planar),
+      mGrallocModule(NULL),
+      mMinOutputBufferSize(384), // arbitrary, using one uncompressed macroblock
+      mMinCompressionRatio(1),   // max output size is normally the input size
+      mComponentRole(componentRole),
+      mCodingType(codingType),
+      mProfileLevels(profileLevels),
+      mNumProfileLevels(numProfileLevels) {
+}
+
+void SoftVideoEncoderOMXComponent::initPorts(
+        OMX_U32 numInputBuffers, OMX_U32 numOutputBuffers, OMX_U32 outputBufferSize,
+        const char *mime, OMX_U32 minCompressionRatio) {
+    OMX_PARAM_PORTDEFINITIONTYPE def;
+
+    mMinOutputBufferSize = outputBufferSize;
+    mMinCompressionRatio = minCompressionRatio;
+
+    InitOMXParams(&def);
+
+    def.nPortIndex = kInputPortIndex;
+    def.eDir = OMX_DirInput;
+    def.nBufferCountMin = numInputBuffers;
+    def.nBufferCountActual = def.nBufferCountMin;
+    def.bEnabled = OMX_TRUE;
+    def.bPopulated = OMX_FALSE;
+    def.eDomain = OMX_PortDomainVideo;
+    def.bBuffersContiguous = OMX_FALSE;
+    def.format.video.pNativeRender = NULL;
+    def.format.video.nFrameWidth = mWidth;
+    def.format.video.nFrameHeight = mHeight;
+    def.format.video.nStride = def.format.video.nFrameWidth;
+    def.format.video.nSliceHeight = def.format.video.nFrameHeight;
+    def.format.video.nBitrate = 0;
+    // frameRate is in Q16 format.
+    def.format.video.xFramerate = mFramerate;
+    def.format.video.bFlagErrorConcealment = OMX_FALSE;
+    def.nBufferAlignment = kInputBufferAlignment;
+    def.format.video.cMIMEType = const_cast<char *>("video/raw");
+    def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
+    def.format.video.eColorFormat = mColorFormat;
+    def.format.video.pNativeWindow = NULL;
+    // buffersize set in updatePortParams
+
+    addPort(def);
+
+    InitOMXParams(&def);
+
+    def.nPortIndex = kOutputPortIndex;
+    def.eDir = OMX_DirOutput;
+    def.nBufferCountMin = numOutputBuffers;
+    def.nBufferCountActual = def.nBufferCountMin;
+    def.bEnabled = OMX_TRUE;
+    def.bPopulated = OMX_FALSE;
+    def.eDomain = OMX_PortDomainVideo;
+    def.bBuffersContiguous = OMX_FALSE;
+    def.format.video.pNativeRender = NULL;
+    def.format.video.nFrameWidth = mWidth;
+    def.format.video.nFrameHeight = mHeight;
+    def.format.video.nStride = 0;
+    def.format.video.nSliceHeight = 0;
+    def.format.video.nBitrate = mBitrate;
+    def.format.video.xFramerate = 0 << 16;
+    def.format.video.bFlagErrorConcealment = OMX_FALSE;
+    def.nBufferAlignment = kOutputBufferAlignment;
+    def.format.video.cMIMEType = const_cast<char *>(mime);
+    def.format.video.eCompressionFormat = mCodingType;
+    def.format.video.eColorFormat = OMX_COLOR_FormatUnused;
+    def.format.video.pNativeWindow = NULL;
+    // buffersize set in updatePortParams
+
+    addPort(def);
+
+    updatePortParams();
+}
+
+void SoftVideoEncoderOMXComponent::updatePortParams() {
+    OMX_PARAM_PORTDEFINITIONTYPE *inDef = &editPortInfo(kInputPortIndex)->mDef;
+    inDef->format.video.nFrameWidth = mWidth;
+    inDef->format.video.nFrameHeight = mHeight;
+    inDef->format.video.nStride = inDef->format.video.nFrameWidth;
+    inDef->format.video.nSliceHeight = inDef->format.video.nFrameHeight;
+    inDef->format.video.xFramerate = mFramerate;
+    inDef->format.video.eColorFormat = mColorFormat;
+    uint32_t rawBufferSize =
+        inDef->format.video.nStride * inDef->format.video.nSliceHeight * 3 / 2;
+    if (inDef->format.video.eColorFormat == OMX_COLOR_FormatAndroidOpaque) {
+        inDef->nBufferSize = 4 + max(sizeof(buffer_handle_t), sizeof(GraphicBuffer *));
+    } else {
+        inDef->nBufferSize = rawBufferSize;
+    }
+
+    OMX_PARAM_PORTDEFINITIONTYPE *outDef = &editPortInfo(kOutputPortIndex)->mDef;
+    outDef->format.video.nFrameWidth = mWidth;
+    outDef->format.video.nFrameHeight = mHeight;
+    outDef->format.video.nBitrate = mBitrate;
+
+    outDef->nBufferSize = max(mMinOutputBufferSize, rawBufferSize / mMinCompressionRatio);
+}
+
+OMX_ERRORTYPE SoftVideoEncoderOMXComponent::internalSetPortParams(
+        const OMX_PARAM_PORTDEFINITIONTYPE *port) {
+    if (port->nPortIndex == kInputPortIndex) {
+        mWidth = port->format.video.nFrameWidth;
+        mHeight = port->format.video.nFrameHeight;
+
+        // xFramerate comes in Q16 format, in frames per second unit
+        mFramerate = port->format.video.xFramerate;
+
+        if (port->format.video.eCompressionFormat != OMX_VIDEO_CodingUnused
+                || (port->format.video.eColorFormat != OMX_COLOR_FormatYUV420Planar
+                        && port->format.video.eColorFormat != OMX_COLOR_FormatYUV420SemiPlanar
+                        && port->format.video.eColorFormat != OMX_COLOR_FormatAndroidOpaque)) {
+            return OMX_ErrorUnsupportedSetting;
+        }
+
+        mColorFormat = port->format.video.eColorFormat;
+    } else if (port->nPortIndex == kOutputPortIndex) {
+        if (port->format.video.eCompressionFormat != mCodingType
+                || port->format.video.eColorFormat != OMX_COLOR_FormatUnused) {
+            return OMX_ErrorUnsupportedSetting;
+        }
+
+        mBitrate = port->format.video.nBitrate;
+    } else {
+        return OMX_ErrorBadPortIndex;
+    }
+
+    updatePortParams();
+    return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE SoftVideoEncoderOMXComponent::internalSetParameter(
+        OMX_INDEXTYPE index, const OMX_PTR param) {
+    // can include extension index OMX_INDEXEXTTYPE
+    const int32_t indexFull = index;
+
+    switch (indexFull) {
+        case OMX_IndexParamVideoErrorCorrection:
+        {
+            return OMX_ErrorNotImplemented;
+        }
+
+        case OMX_IndexParamStandardComponentRole:
+        {
+            const OMX_PARAM_COMPONENTROLETYPE *roleParams =
+                (const OMX_PARAM_COMPONENTROLETYPE *)param;
+
+            if (strncmp((const char *)roleParams->cRole,
+                        mComponentRole,
+                        OMX_MAX_STRINGNAME_SIZE - 1)) {
+                return OMX_ErrorUnsupportedSetting;
+            }
+
+            return OMX_ErrorNone;
+        }
+
+        case OMX_IndexParamPortDefinition:
+        {
+            OMX_ERRORTYPE err = internalSetPortParams((const OMX_PARAM_PORTDEFINITIONTYPE *)param);
+
+            if (err != OMX_ErrorNone) {
+                return err;
+            }
+
+            return SimpleSoftOMXComponent::internalSetParameter(index, param);
+        }
+
+        case OMX_IndexParamVideoPortFormat:
+        {
+            const OMX_VIDEO_PARAM_PORTFORMATTYPE* format =
+                (const OMX_VIDEO_PARAM_PORTFORMATTYPE *)param;
+
+            if (format->nPortIndex == kInputPortIndex) {
+                if (format->eColorFormat == OMX_COLOR_FormatYUV420Planar ||
+                    format->eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar ||
+                    format->eColorFormat == OMX_COLOR_FormatAndroidOpaque) {
+                    mColorFormat = format->eColorFormat;
+
+                    updatePortParams();
+                    return OMX_ErrorNone;
+                } else {
+                    ALOGE("Unsupported color format %i", format->eColorFormat);
+                    return OMX_ErrorUnsupportedSetting;
+                }
+            } else if (format->nPortIndex == kOutputPortIndex) {
+                if (format->eCompressionFormat == mCodingType) {
+                    return OMX_ErrorNone;
+                } else {
+                    return OMX_ErrorUnsupportedSetting;
+                }
+            } else {
+                return OMX_ErrorBadPortIndex;
+            }
+        }
+
+        case kStoreMetaDataExtensionIndex:
+        {
+            // storeMetaDataInBuffers
+            const StoreMetaDataInBuffersParams *storeParam =
+                (const StoreMetaDataInBuffersParams *)param;
+
+            if (storeParam->nPortIndex == kOutputPortIndex) {
+                return storeParam->bStoreMetaData ? OMX_ErrorUnsupportedSetting : OMX_ErrorNone;
+            } else if (storeParam->nPortIndex != kInputPortIndex) {
+                return OMX_ErrorBadPortIndex;
+            }
+
+            mInputDataIsMeta = (storeParam->bStoreMetaData == OMX_TRUE);
+            if (mInputDataIsMeta) {
+                mColorFormat = OMX_COLOR_FormatAndroidOpaque;
+            } else if (mColorFormat == OMX_COLOR_FormatAndroidOpaque) {
+                mColorFormat = OMX_COLOR_FormatYUV420Planar;
+            }
+            updatePortParams();
+            return OMX_ErrorNone;
+        }
+
+        default:
+            return SimpleSoftOMXComponent::internalSetParameter(index, param);
+    }
+}
+
+OMX_ERRORTYPE SoftVideoEncoderOMXComponent::internalGetParameter(
+        OMX_INDEXTYPE index, OMX_PTR param) {
+    switch (index) {
+        case OMX_IndexParamVideoErrorCorrection:
+        {
+            return OMX_ErrorNotImplemented;
+        }
+
+        case OMX_IndexParamVideoPortFormat:
+        {
+            OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
+                (OMX_VIDEO_PARAM_PORTFORMATTYPE *)param;
+
+            if (formatParams->nPortIndex == kInputPortIndex) {
+                if (formatParams->nIndex >= NELEM(kSupportedColorFormats)) {
+                    return OMX_ErrorNoMore;
+                }
+
+                // Color formats, in order of preference
+                formatParams->eColorFormat = kSupportedColorFormats[formatParams->nIndex];
+                formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused;
+                formatParams->xFramerate = mFramerate;
+                return OMX_ErrorNone;
+            } else if (formatParams->nPortIndex == kOutputPortIndex) {
+                formatParams->eCompressionFormat = mCodingType;
+                formatParams->eColorFormat = OMX_COLOR_FormatUnused;
+                formatParams->xFramerate = 0;
+                return OMX_ErrorNone;
+            } else {
+                return OMX_ErrorBadPortIndex;
+            }
+        }
+
+        case OMX_IndexParamVideoProfileLevelQuerySupported:
+        {
+            OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileLevel =
+                  (OMX_VIDEO_PARAM_PROFILELEVELTYPE *) param;
+
+            if (profileLevel->nPortIndex != kOutputPortIndex) {
+                ALOGE("Invalid port index: %u", profileLevel->nPortIndex);
+                return OMX_ErrorUnsupportedIndex;
+            }
+
+            if (profileLevel->nProfileIndex >= mNumProfileLevels) {
+                return OMX_ErrorNoMore;
+            }
+
+            profileLevel->eProfile = mProfileLevels[profileLevel->nProfileIndex].mProfile;
+            profileLevel->eLevel   = mProfileLevels[profileLevel->nProfileIndex].mLevel;
+            return OMX_ErrorNone;
+        }
+
+        default:
+            return SimpleSoftOMXComponent::internalGetParameter(index, param);
+    }
 }
 
 // static
diff --git a/media/libstagefright/tests/Utils_test.cpp b/media/libstagefright/tests/Utils_test.cpp
index 43e0269..5c323c1 100644
--- a/media/libstagefright/tests/Utils_test.cpp
+++ b/media/libstagefright/tests/Utils_test.cpp
@@ -172,6 +172,13 @@
     ASSERT_EQ(divUp(12, 4), 3);
     ASSERT_EQ(divUp(13, 4), 4);
 
+    ASSERT_EQ(align(11, 4), 12);
+    ASSERT_EQ(align(12, 4), 12);
+    ASSERT_EQ(align(13, 4), 16);
+    ASSERT_EQ(align(11, 8), 16);
+    ASSERT_EQ(align(11, 2), 12);
+    ASSERT_EQ(align(11, 1), 11);
+
     ASSERT_EQ(abs(5L), 5L);
     ASSERT_EQ(abs(-25), 25);
 
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index dab6d91..87f636c 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -2180,7 +2180,13 @@
 
 void AudioFlinger::PlaybackThread::threadLoop_exit()
 {
-    // Default implementation has nothing to do
+    {
+        Mutex::Autolock _l(mLock);
+        for (size_t i = 0; i < mTracks.size(); i++) {
+            sp<Track> track = mTracks[i];
+            track->invalidate();
+        }
+    }
 }
 
 /*
@@ -4003,9 +4009,14 @@
         bool last = l.get() == track;
 
         // The first time a track is added we wait
-        // for all its buffers to be filled before processing it
+        // for all its buffers to be filled before processing it.
+        // Allow draining the buffer in case the client
+        // app does not call stop() and relies on underrun to stop:
+        // hence the test on (track->mRetryCount > 1).
+        // If retryCount<=1 then track is about to underrun and be removed.
         uint32_t minFrames;
-        if ((track->sharedBuffer() == 0) && !track->isStopping_1() && !track->isPausing()) {
+        if ((track->sharedBuffer() == 0) && !track->isStopping_1() && !track->isPausing()
+            && (track->mRetryCount > 1)) {
             minFrames = mNormalFrameCount;
         } else {
             minFrames = 1;
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index 5bcbca8..2826cad 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -18,6 +18,7 @@
 #define ANDROID_AUDIOPOLICY_INTERFACE_H
 
 #include <media/AudioSystem.h>
+#include <media/AudioPolicy.h>
 #include <utils/String8.h>
 
 #include <hardware/audio_policy.h>
@@ -201,6 +202,9 @@
                                            audio_devices_t *device) = 0;
 
     virtual status_t releaseSoundTriggerSession(audio_session_t session) = 0;
+
+    virtual status_t registerPolicyMixes(Vector<AudioMix> mixes) = 0;
+    virtual status_t unregisterPolicyMixes(Vector<AudioMix> mixes) = 0;
 };
 
 
diff --git a/services/audiopolicy/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/AudioPolicyInterfaceImpl.cpp
index abb1b21..d3c9013 100644
--- a/services/audiopolicy/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/AudioPolicyInterfaceImpl.cpp
@@ -614,4 +614,20 @@
     return mAudioPolicyManager->releaseSoundTriggerSession(session);
 }
 
+status_t AudioPolicyService::registerPolicyMixes(Vector<AudioMix> mixes, bool registration)
+{
+    Mutex::Autolock _l(mLock);
+    if(!modifyAudioRoutingAllowed()) {
+        return PERMISSION_DENIED;
+    }
+    if (mAudioPolicyManager == NULL) {
+        return NO_INIT;
+    }
+    if (registration) {
+        return mAudioPolicyManager->registerPolicyMixes(mixes);
+    } else {
+        return mAudioPolicyManager->unregisterPolicyMixes(mixes);
+    }
+}
+
 }; // namespace android
diff --git a/services/audiopolicy/AudioPolicyInterfaceImplLegacy.cpp b/services/audiopolicy/AudioPolicyInterfaceImplLegacy.cpp
index 4e42b25..4bc3c7f 100644
--- a/services/audiopolicy/AudioPolicyInterfaceImplLegacy.cpp
+++ b/services/audiopolicy/AudioPolicyInterfaceImplLegacy.cpp
@@ -555,7 +555,7 @@
 
 status_t AudioPolicyService::getOutputForAttr(const audio_attributes_t *attr,
                                               audio_io_handle_t *output,
-                                              audio_session_t session,
+                                              audio_session_t session __unused,
                                               audio_stream_type_t *stream,
                                               uint32_t samplingRate,
                                               audio_format_t format,
@@ -590,4 +590,10 @@
     return INVALID_OPERATION;
 }
 
+status_t AudioPolicyService::registerPolicyMixes(Vector<AudioMix> mixes __unused,
+                                                 bool registration __unused)
+{
+    return INVALID_OPERATION;
+}
+
 }; // namespace android
diff --git a/services/audiopolicy/AudioPolicyManager.cpp b/services/audiopolicy/AudioPolicyManager.cpp
index b5a3d5b..744556d 100644
--- a/services/audiopolicy/AudioPolicyManager.cpp
+++ b/services/audiopolicy/AudioPolicyManager.cpp
@@ -211,11 +211,17 @@
 // AudioPolicyInterface implementation
 // ----------------------------------------------------------------------------
 
-
 status_t AudioPolicyManager::setDeviceConnectionState(audio_devices_t device,
                                                           audio_policy_dev_state_t state,
                                                   const char *device_address)
 {
+    return setDeviceConnectionStateInt(device, state, device_address);
+}
+
+status_t AudioPolicyManager::setDeviceConnectionStateInt(audio_devices_t device,
+                                                          audio_policy_dev_state_t state,
+                                                  const char *device_address)
+{
     String8 address = (device_address == NULL) ? String8("") : String8(device_address);
     // handle legacy remote submix case where the address was not always specified
     if (deviceDistinguishesOnAddress(device) && (address.length() == 0)) {
@@ -458,7 +464,7 @@
     audio_patch_handle_t afPatchHandle;
     DeviceVector deviceList;
 
-    audio_devices_t txDevice = getDeviceForInputSource(AUDIO_SOURCE_VOICE_COMMUNICATION);
+    audio_devices_t txDevice = getDeviceAndMixForInputSource(AUDIO_SOURCE_VOICE_COMMUNICATION);
     ALOGV("updateCallRouting device rxDevice %08x txDevice %08x", rxDevice, txDevice);
 
     // release existing RX patch if any
@@ -818,7 +824,7 @@
         }
         for (size_t j = 0; j < mHwModules[i]->mOutputProfiles.size(); j++) {
             sp<IOProfile> profile = mHwModules[i]->mOutputProfiles[j];
-            bool found = profile->isCompatibleProfile(device, samplingRate,
+            bool found = profile->isCompatibleProfile(device, String8(""), samplingRate,
                     NULL /*updatedSamplingRate*/, format, channelMask,
                     flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD ?
                         AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD : AUDIO_OUTPUT_FLAG_DIRECT);
@@ -874,10 +880,53 @@
         stream_type_to_audio_attributes(*stream, &attributes);
     }
 
+    for (size_t i = 0; i < mPolicyMixes.size(); i++) {
+        sp<AudioOutputDescriptor> desc;
+        if (mPolicyMixes[i]->mMix.mMixType == MIX_TYPE_PLAYERS) {
+            for (size_t j = 0; j < mPolicyMixes[i]->mMix.mCriteria.size(); j++) {
+                if ((RULE_MATCH_ATTRIBUTE_USAGE == mPolicyMixes[i]->mMix.mCriteria[j].mRule &&
+                        mPolicyMixes[i]->mMix.mCriteria[j].mAttr.mUsage == attributes.usage) ||
+                    (RULE_EXCLUDE_ATTRIBUTE_USAGE == mPolicyMixes[i]->mMix.mCriteria[j].mRule &&
+                        mPolicyMixes[i]->mMix.mCriteria[j].mAttr.mUsage != attributes.usage)) {
+                    desc = mPolicyMixes[i]->mOutput;
+                    break;
+                }
+                if (strncmp(attributes.tags, "addr=", strlen("addr=")) == 0 &&
+                        strncmp(attributes.tags + strlen("addr="),
+                                mPolicyMixes[i]->mMix.mRegistrationId.string(),
+                                AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - strlen("addr=") - 1) == 0) {
+                    desc = mPolicyMixes[i]->mOutput;
+                    break;
+                }
+            }
+        } else if (mPolicyMixes[i]->mMix.mMixType == MIX_TYPE_RECORDERS) {
+            if (attributes.usage == AUDIO_USAGE_VIRTUAL_SOURCE &&
+                    strncmp(attributes.tags, "addr=", strlen("addr=")) == 0 &&
+                    strncmp(attributes.tags + strlen("addr="),
+                            mPolicyMixes[i]->mMix.mRegistrationId.string(),
+                            AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - strlen("addr=") - 1) == 0) {
+                desc = mPolicyMixes[i]->mOutput;
+            }
+        }
+        if (desc != 0) {
+            if (!audio_is_linear_pcm(format)) {
+                return BAD_VALUE;
+            }
+            desc->mPolicyMix = &mPolicyMixes[i]->mMix;
+            *stream = streamTypefromAttributesInt(&attributes);
+            *output = desc->mIoHandle;
+            ALOGV("getOutputForAttr() returns output %d", *output);
+            return NO_ERROR;
+        }
+    }
+    if (attributes.usage == AUDIO_USAGE_VIRTUAL_SOURCE) {
+        ALOGW("getOutputForAttr() no policy mix found for usage AUDIO_USAGE_VIRTUAL_SOURCE");
+        return BAD_VALUE;
+    }
+
     ALOGV("getOutputForAttr() usage=%d, content=%d, tag=%s flags=%08x",
             attributes.usage, attributes.content_type, attributes.tags, attributes.flags);
 
-    // TODO this is where filtering for custom policies (rerouting, dynamic sources) will go
     routing_strategy strategy = (routing_strategy) getStrategyForAttr(&attributes);
     audio_devices_t device = getDeviceForStrategy(strategy, false /*fromCache*/);
 
@@ -1182,7 +1231,13 @@
     outputDesc->changeRefCount(stream, 1);
 
     if (outputDesc->mRefCount[stream] == 1) {
-        audio_devices_t newDevice = getNewOutputDevice(output, false /*fromCache*/);
+        // starting an output being rerouted?
+        audio_devices_t newDevice;
+        if (outputDesc->mPolicyMix != NULL) {
+            newDevice = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+        } else {
+            newDevice = getNewOutputDevice(output, false /*fromCache*/);
+        }
         routing_strategy strategy = getStrategy(stream);
         bool shouldWait = (strategy == STRATEGY_SONIFICATION) ||
                             (strategy == STRATEGY_SONIFICATION_RESPECTFUL) ||
@@ -1225,6 +1280,16 @@
         // update the outputs if starting an output with a stream that can affect notification
         // routing
         handleNotificationRoutingForStream(stream);
+
+        // Automatically enable the remote submix input when output is started on a re routing mix
+        // of type MIX_TYPE_RECORDERS
+        if (audio_is_remote_submix_device(newDevice) && outputDesc->mPolicyMix != NULL &&
+                outputDesc->mPolicyMix->mMixType == MIX_TYPE_RECORDERS) {
+                setDeviceConnectionStateInt(AUDIO_DEVICE_IN_REMOTE_SUBMIX,
+                        AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+                        outputDesc->mPolicyMix->mRegistrationId);
+        }
+
         if (waitMs > muteWaitMs) {
             usleep((waitMs - muteWaitMs) * 2 * 1000);
         }
@@ -1259,6 +1324,16 @@
         outputDesc->changeRefCount(stream, -1);
         // store time at which the stream was stopped - see isStreamActive()
         if (outputDesc->mRefCount[stream] == 0) {
+            // Automatically disable the remote submix input when output is stopped on a
+            // re routing mix of type MIX_TYPE_RECORDERS
+            if (audio_is_remote_submix_device(outputDesc->mDevice) &&
+                    outputDesc->mPolicyMix != NULL &&
+                    outputDesc->mPolicyMix->mMixType == MIX_TYPE_RECORDERS) {
+                setDeviceConnectionStateInt(AUDIO_DEVICE_IN_REMOTE_SUBMIX,
+                        AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                        outputDesc->mPolicyMix->mRegistrationId);
+            }
+
             outputDesc->mStopTime[stream] = systemTime();
             audio_devices_t newDevice = getNewOutputDevice(output, false /*fromCache*/);
             // delay the device switch by twice the latency because stopOutput() is executed when
@@ -1350,57 +1425,76 @@
             "session %d, flags %#x",
           attr->source, samplingRate, format, channelMask, session, flags);
 
-    audio_devices_t device = getDeviceForInputSource(attr->source);
-
-    if (device == AUDIO_DEVICE_NONE) {
-        ALOGW("getInputForAttr() could not find device for source %d", attr->source);
-        return BAD_VALUE;
-    }
-
-    // adapt channel selection to input source
-    switch (attr->source) {
-    case AUDIO_SOURCE_VOICE_UPLINK:
-        channelMask = AUDIO_CHANNEL_IN_VOICE_UPLINK;
-        break;
-    case AUDIO_SOURCE_VOICE_DOWNLINK:
-        channelMask = AUDIO_CHANNEL_IN_VOICE_DNLINK;
-        break;
-    case AUDIO_SOURCE_VOICE_CALL:
-        channelMask = AUDIO_CHANNEL_IN_VOICE_UPLINK | AUDIO_CHANNEL_IN_VOICE_DNLINK;
-        break;
-    default:
-        break;
-    }
-
     *input = AUDIO_IO_HANDLE_NONE;
+    audio_devices_t device;
+    // handle legacy remote submix case where the address was not always specified
+    String8 address = String8("");
     bool isSoundTrigger = false;
     audio_source_t halInputSource = attr->source;
-    if (attr->source == AUDIO_SOURCE_HOTWORD) {
-        ssize_t index = mSoundTriggerSessions.indexOfKey(session);
-        if (index >= 0) {
-            *input = mSoundTriggerSessions.valueFor(session);
-            isSoundTrigger = true;
-            flags = (audio_input_flags_t)(flags | AUDIO_INPUT_FLAG_HW_HOTWORD);
-            ALOGV("SoundTrigger capture on session %d input %d", session, *input);
-        } else {
-            halInputSource = AUDIO_SOURCE_VOICE_RECOGNITION;
+    AudioMix *policyMix = NULL;
+
+    if (attr->source == AUDIO_SOURCE_REMOTE_SUBMIX &&
+            strncmp(attr->tags, "addr=", strlen("addr=")) == 0) {
+        device = AUDIO_DEVICE_IN_REMOTE_SUBMIX;
+        address = String8(attr->tags + strlen("addr="));
+        ssize_t index = mPolicyMixes.indexOfKey(address);
+        if (index < 0) {
+            ALOGW("getInputForAttr() no policy for address %s", address.string());
+            return BAD_VALUE;
+        }
+        if (mPolicyMixes[index]->mMix.mMixType != MIX_TYPE_PLAYERS) {
+            ALOGW("getInputForAttr() bad policy mix type for address %s", address.string());
+            return BAD_VALUE;
+        }
+        policyMix = &mPolicyMixes[index]->mMix;
+    } else {
+        device = getDeviceAndMixForInputSource(attr->source, &policyMix);
+        if (device == AUDIO_DEVICE_NONE) {
+            ALOGW("getInputForAttr() could not find device for source %d", attr->source);
+            return BAD_VALUE;
+        }
+        if (policyMix != NULL) {
+            address = policyMix->mRegistrationId;
+        } else if (audio_is_remote_submix_device(device)) {
+            address = String8("0");
+        }
+        // adapt channel selection to input source
+        switch (attr->source) {
+        case AUDIO_SOURCE_VOICE_UPLINK:
+            channelMask = AUDIO_CHANNEL_IN_VOICE_UPLINK;
+            break;
+        case AUDIO_SOURCE_VOICE_DOWNLINK:
+            channelMask = AUDIO_CHANNEL_IN_VOICE_DNLINK;
+            break;
+        case AUDIO_SOURCE_VOICE_CALL:
+            channelMask = AUDIO_CHANNEL_IN_VOICE_UPLINK | AUDIO_CHANNEL_IN_VOICE_DNLINK;
+            break;
+        default:
+            break;
+        }
+        if (attr->source == AUDIO_SOURCE_HOTWORD) {
+            ssize_t index = mSoundTriggerSessions.indexOfKey(session);
+            if (index >= 0) {
+                *input = mSoundTriggerSessions.valueFor(session);
+                isSoundTrigger = true;
+                flags = (audio_input_flags_t)(flags | AUDIO_INPUT_FLAG_HW_HOTWORD);
+                ALOGV("SoundTrigger capture on session %d input %d", session, *input);
+            } else {
+                halInputSource = AUDIO_SOURCE_VOICE_RECOGNITION;
+            }
         }
     }
 
-    sp<IOProfile> profile = getInputProfile(device,
-                                         samplingRate,
-                                         format,
-                                         channelMask,
-                                         flags);
+    sp<IOProfile> profile = getInputProfile(device, address,
+                                            samplingRate, format, channelMask,
+                                            flags);
     if (profile == 0) {
         //retry without flags
         audio_input_flags_t log_flags = flags;
         flags = AUDIO_INPUT_FLAG_NONE;
-        profile = getInputProfile(device,
-                                 samplingRate,
-                                 format,
-                                 channelMask,
-                                 flags);
+        profile = getInputProfile(device, address,
+                                  samplingRate, format, channelMask,
+                                  flags);
         if (profile == 0) {
             ALOGW("getInputForAttr() could not find profile for device 0x%X, samplingRate %u,"
                     "format %#x, channelMask 0x%X, flags %#x",
@@ -1419,9 +1513,6 @@
     config.channel_mask = channelMask;
     config.format = format;
 
-    // handle legacy remote submix case where the address was not always specified
-    String8 address = deviceDistinguishesOnAddress(device) ? String8("0") : String8("");
-
     status_t status = mpClientInterface->openInput(profile->mModule->mHandle,
                                                    input,
                                                    &config,
@@ -1453,6 +1544,7 @@
     inputDesc->mDevice = device;
     inputDesc->mSessions.add(session);
     inputDesc->mIsSoundTrigger = isSoundTrigger;
+    inputDesc->mPolicyMix = policyMix;
 
     addInput(*input, inputDesc);
     mpClientInterface->onAudioPortListUpdate();
@@ -1503,11 +1595,21 @@
         }
         setInputDevice(input, getNewInputDevice(input), true /* force */);
 
-        // Automatically enable the remote submix output when input is started.
+        // automatically enable the remote submix output when input is started if not
+        // used by a policy mix of type MIX_TYPE_RECORDERS
         // For remote submix (a virtual device), we open only one input per capture request.
         if (audio_is_remote_submix_device(inputDesc->mDevice)) {
-            setDeviceConnectionState(AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
-                    AUDIO_POLICY_DEVICE_STATE_AVAILABLE, AUDIO_REMOTE_SUBMIX_DEVICE_ADDRESS);
+            String8 address = String8("");
+            if (inputDesc->mPolicyMix == NULL) {
+                address = String8("0");
+            } else if (inputDesc->mPolicyMix->mMixType == MIX_TYPE_PLAYERS) {
+                address = inputDesc->mPolicyMix->mRegistrationId;
+            }
+            if (address != "") {
+                setDeviceConnectionStateInt(AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
+                        AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+                        address);
+            }
         }
     }
 
@@ -1542,10 +1644,20 @@
     inputDesc->mRefCount--;
     if (inputDesc->mRefCount == 0) {
 
-        // automatically disable the remote submix output when input is stopped
+        // automatically disable the remote submix output when input is stopped if not
+        // used by a policy mix of type MIX_TYPE_RECORDERS
         if (audio_is_remote_submix_device(inputDesc->mDevice)) {
-            setDeviceConnectionState(AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
-                    AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE, AUDIO_REMOTE_SUBMIX_DEVICE_ADDRESS);
+            String8 address = String8("");
+            if (inputDesc->mPolicyMix == NULL) {
+                address = String8("0");
+            } else if (inputDesc->mPolicyMix->mMixType == MIX_TYPE_PLAYERS) {
+                address = inputDesc->mPolicyMix->mRegistrationId;
+            }
+            if (address != "") {
+                setDeviceConnectionStateInt(AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
+                                         AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                                         address);
+            }
         }
 
         resetInputDevice(input);
@@ -1903,7 +2015,11 @@
         const sp<AudioOutputDescriptor> outputDesc = mOutputs.valueAt(i);
         if (((outputDesc->device() & APM_AUDIO_OUT_DEVICE_REMOTE_ALL) != 0) &&
                 outputDesc->isStreamActive(stream, inPastMs, sysTime)) {
-            return true;
+            // do not consider re routing (when the output is going to a dynamic policy)
+            // as "remote playback"
+            if (outputDesc->mPolicyMix == NULL) {
+                return true;
+            }
         }
     }
     return false;
@@ -1923,6 +2039,127 @@
     return false;
 }
 
+// Register a list of custom mixes with their attributes and format.
+// When a mix is registered, corresponding input and output profiles are
+// added to the remote submix hw module. The profile contains only the
+// parameters (sampling rate, format...) specified by the mix.
+// The corresponding input remote submix device is also connected.
+//
+// When a remote submix device is connected, the address is checked to select the
+// appropriate profile and the corresponding input or output stream is opened.
+//
+// When capture starts, getInputForAttr() will:
+//  - 1 look for a mix matching the address passed in attribtutes tags if any
+//  - 2 if none found, getDeviceForInputSource() will:
+//     - 2.1 look for a mix matching the attributes source
+//     - 2.2 if none found, default to device selection by policy rules
+// At this time, the corresponding output remote submix device is also connected
+// and active playback use cases can be transferred to this mix if needed when reconnecting
+// after AudioTracks are invalidated
+//
+// When playback starts, getOutputForAttr() will:
+//  - 1 look for a mix matching the address passed in attribtutes tags if any
+//  - 2 if none found, look for a mix matching the attributes usage
+//  - 3 if none found, default to device and output selection by policy rules.
+
+status_t AudioPolicyManager::registerPolicyMixes(Vector<AudioMix> mixes)
+{
+    sp<HwModule> module;
+    for (size_t i = 0; i < mHwModules.size(); i++) {
+        if (strcmp(AUDIO_HARDWARE_MODULE_ID_REMOTE_SUBMIX, mHwModules[i]->mName) == 0 &&
+                mHwModules[i]->mHandle != 0) {
+            module = mHwModules[i];
+            break;
+        }
+    }
+
+    if (module == 0) {
+        return INVALID_OPERATION;
+    }
+
+    ALOGV("registerPolicyMixes() num mixes %d", mixes.size());
+
+    for (size_t i = 0; i < mixes.size(); i++) {
+        String8 address = mixes[i].mRegistrationId;
+        ssize_t index = mPolicyMixes.indexOfKey(address);
+        if (index >= 0) {
+            ALOGE("registerPolicyMixes(): mix for address %s already registered", address.string());
+            continue;
+        }
+        audio_config_t outputConfig = mixes[i].mFormat;
+        audio_config_t inputConfig = mixes[i].mFormat;
+        // NOTE: audio flinger mixer does not support mono output: configure remote submix HAL in
+        // stereo and let audio flinger do the channel conversion if needed.
+        outputConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+        inputConfig.channel_mask = AUDIO_CHANNEL_IN_STEREO;
+        module->addOutputProfile(address, &outputConfig,
+                                 AUDIO_DEVICE_OUT_REMOTE_SUBMIX, address);
+        module->addInputProfile(address, &inputConfig,
+                                 AUDIO_DEVICE_IN_REMOTE_SUBMIX, address);
+        sp<AudioPolicyMix> policyMix = new AudioPolicyMix();
+        policyMix->mMix = mixes[i];
+        mPolicyMixes.add(address, policyMix);
+        if (mixes[i].mMixType == MIX_TYPE_PLAYERS) {
+            setDeviceConnectionStateInt(AUDIO_DEVICE_IN_REMOTE_SUBMIX,
+                                     AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+                                     address.string());
+        } else {
+            setDeviceConnectionStateInt(AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
+                                     AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+                                     address.string());
+        }
+    }
+    return NO_ERROR;
+}
+
+status_t AudioPolicyManager::unregisterPolicyMixes(Vector<AudioMix> mixes)
+{
+    sp<HwModule> module;
+    for (size_t i = 0; i < mHwModules.size(); i++) {
+        if (strcmp(AUDIO_HARDWARE_MODULE_ID_REMOTE_SUBMIX, mHwModules[i]->mName) == 0 &&
+                mHwModules[i]->mHandle != 0) {
+            module = mHwModules[i];
+            break;
+        }
+    }
+
+    if (module == 0) {
+        return INVALID_OPERATION;
+    }
+
+    ALOGV("unregisterPolicyMixes() num mixes %d", mixes.size());
+
+    for (size_t i = 0; i < mixes.size(); i++) {
+        String8 address = mixes[i].mRegistrationId;
+        ssize_t index = mPolicyMixes.indexOfKey(address);
+        if (index < 0) {
+            ALOGE("unregisterPolicyMixes(): mix for address %s not registered", address.string());
+            continue;
+        }
+
+        mPolicyMixes.removeItemsAt(index);
+
+        if (getDeviceConnectionState(AUDIO_DEVICE_IN_REMOTE_SUBMIX, address.string()) ==
+                                             AUDIO_POLICY_DEVICE_STATE_AVAILABLE)
+        {
+            setDeviceConnectionStateInt(AUDIO_DEVICE_IN_REMOTE_SUBMIX,
+                                     AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                                     address.string());
+        }
+
+        if (getDeviceConnectionState(AUDIO_DEVICE_OUT_REMOTE_SUBMIX, address.string()) ==
+                                             AUDIO_POLICY_DEVICE_STATE_AVAILABLE)
+        {
+            setDeviceConnectionStateInt(AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
+                                     AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                                     address.string());
+        }
+        module->removeOutputProfile(address);
+        module->removeInputProfile(address);
+    }
+    return NO_ERROR;
+}
+
 
 status_t AudioPolicyManager::dump(int fd)
 {
@@ -2323,6 +2560,7 @@
             }
 
             if (!outputDesc->mProfile->isCompatibleProfile(devDesc->mDeviceType,
+                                                           devDesc->mAddress,
                                                            patch->sources[0].sample_rate,
                                                          NULL,  // updatedSamplingRate
                                                          patch->sources[0].format,
@@ -2377,13 +2615,14 @@
             }
 
             if (!inputDesc->mProfile->isCompatibleProfile(devDesc->mDeviceType,
-                                                         patch->sinks[0].sample_rate,
-                                                         NULL, /*updatedSampleRate*/
-                                                         patch->sinks[0].format,
-                                                         patch->sinks[0].channel_mask,
-                                                         // FIXME for the parameter type,
-                                                         // and the NONE
-                                                         (audio_output_flags_t)
+                                                          devDesc->mAddress,
+                                                          patch->sinks[0].sample_rate,
+                                                          NULL, /*updatedSampleRate*/
+                                                          patch->sinks[0].format,
+                                                          patch->sinks[0].channel_mask,
+                                                          // FIXME for the parameter type,
+                                                          // and the NONE
+                                                          (audio_output_flags_t)
                                                             AUDIO_INPUT_FLAG_NONE)) {
                 return INVALID_OPERATION;
             }
@@ -2650,13 +2889,10 @@
 
 void AudioPolicyManager::clearAudioPatches(uid_t uid)
 {
-    for (ssize_t i = 0; i < (ssize_t)mAudioPatches.size(); i++)  {
+    for (ssize_t i = (ssize_t)mAudioPatches.size() - 1; i >= 0; i--)  {
         sp<AudioPatch> patchDesc = mAudioPatches.valueAt(i);
         if (patchDesc->mUid == uid) {
-            // releaseAudioPatch() removes the patch from mAudioPatches
-            if (releaseAudioPatch(mAudioPatches.keyAt(i), uid) == NO_ERROR) {
-                i--;
-            }
+            releaseAudioPatch(mAudioPatches.keyAt(i), uid);
         }
     }
 }
@@ -2667,7 +2903,7 @@
 {
     *session = (audio_session_t)mpClientInterface->newAudioUniqueId();
     *ioHandle = (audio_io_handle_t)mpClientInterface->newAudioUniqueId();
-    *device = getDeviceForInputSource(AUDIO_SOURCE_HOTWORD);
+    *device = getDeviceAndMixForInputSource(AUDIO_SOURCE_HOTWORD);
 
     mSoundTriggerSessions.add(*session, *ioHandle);
 
@@ -3228,9 +3464,13 @@
             }
             for (size_t j = 0; j < mHwModules[i]->mOutputProfiles.size(); j++)
             {
-                if (mHwModules[i]->mOutputProfiles[j]->mSupportedDevices.types() & device) {
-                    ALOGV("checkOutputsForDevice(): adding profile %zu from module %zu", j, i);
-                    profiles.add(mHwModules[i]->mOutputProfiles[j]);
+                sp<IOProfile> profile = mHwModules[i]->mOutputProfiles[j];
+                if (profile->mSupportedDevices.types() & device) {
+                    if (!deviceDistinguishesOnAddress(device) ||
+                            address == profile->mSupportedDevices[0]->mAddress) {
+                        profiles.add(profile);
+                        ALOGV("checkOutputsForDevice(): adding profile %zu from module %zu", j, i);
+                    }
                 }
             }
         }
@@ -3362,7 +3602,18 @@
 
                 if (output != AUDIO_IO_HANDLE_NONE) {
                     addOutput(output, desc);
-                    if ((desc->mFlags & AUDIO_OUTPUT_FLAG_DIRECT) == 0) {
+                    if (deviceDistinguishesOnAddress(device) && address != "0") {
+                        ssize_t index = mPolicyMixes.indexOfKey(address);
+                        if (index >= 0) {
+                            mPolicyMixes[index]->mOutput = desc;
+                            desc->mPolicyMix = &mPolicyMixes[index]->mMix;
+                        } else {
+                            ALOGE("checkOutputsForDevice() cannot find policy for address %s",
+                                  address.string());
+                        }
+                    } else if ((desc->mFlags & AUDIO_OUTPUT_FLAG_DIRECT) == 0) {
+                        // no duplicated output for direct outputs and
+                        // outputs used by dynamic policy mixes
                         audio_io_handle_t duplicatedOutput = AUDIO_IO_HANDLE_NONE;
 
                         // set initial stream volume for device
@@ -3425,15 +3676,15 @@
         for (size_t i = 0; i < mOutputs.size(); i++) {
             desc = mOutputs.valueAt(i);
             if (!desc->isDuplicated()) {
-                if  (!(desc->mProfile->mSupportedDevices.types()
+                // exact match on device
+                if (deviceDistinguishesOnAddress(device) &&
+                        (desc->mProfile->mSupportedDevices.types() == device)) {
+                    findIoHandlesByAddress(desc, address, outputs);
+                } else if (!(desc->mProfile->mSupportedDevices.types()
                         & mAvailableOutputDevices.types())) {
                     ALOGV("checkOutputsForDevice(): disconnecting adding output %d",
                             mOutputs.keyAt(i));
                     outputs.add(mOutputs.keyAt(i));
-                } else if (deviceDistinguishesOnAddress(device) &&
-                        // exact match on device
-                        (desc->mProfile->mSupportedDevices.types() == device)) {
-                    findIoHandlesByAddress(desc, address, outputs);
                 }
             }
         }
@@ -3495,11 +3746,15 @@
                  profile_index < mHwModules[module_idx]->mInputProfiles.size();
                  profile_index++)
             {
-                if (mHwModules[module_idx]->mInputProfiles[profile_index]->mSupportedDevices.types()
-                        & (device & ~AUDIO_DEVICE_BIT_IN)) {
-                    ALOGV("checkInputsForDevice(): adding profile %zu from module %zu",
-                          profile_index, module_idx);
-                    profiles.add(mHwModules[module_idx]->mInputProfiles[profile_index]);
+                sp<IOProfile> profile = mHwModules[module_idx]->mInputProfiles[profile_index];
+
+                if (profile->mSupportedDevices.types() & (device & ~AUDIO_DEVICE_BIT_IN)) {
+                    if (!deviceDistinguishesOnAddress(device) ||
+                            address == profile->mSupportedDevices[0]->mAddress) {
+                        profiles.add(profile);
+                        ALOGV("checkInputsForDevice(): adding profile %zu from module %zu",
+                              profile_index, module_idx);
+                    }
                 }
             }
         }
@@ -3667,6 +3922,12 @@
         return;
     }
 
+    for (size_t i = 0; i < mPolicyMixes.size(); i++) {
+        if (mPolicyMixes[i]->mOutput == outputDesc) {
+            mPolicyMixes[i]->mOutput.clear();
+        }
+    }
+
     // look for duplicated outputs connected to the output being removed.
     for (size_t i = 0; i < mOutputs.size(); i++) {
         sp<AudioOutputDescriptor> dupOutputDesc = mOutputs.valueAt(i);
@@ -3970,7 +4231,7 @@
         }
     }
 
-    audio_devices_t device = getDeviceForInputSource(inputDesc->mInputSource);
+    audio_devices_t device = getDeviceAndMixForInputSource(inputDesc->mInputSource);
 
     ALOGV("getNewInputDevice() selected device %x", device);
     return device;
@@ -4236,7 +4497,8 @@
         //   - cannot route from voice call RX OR
         //   - audio HAL version is < 3.0 and TX device is on the primary HW module
         if (mPhoneState == AUDIO_MODE_IN_CALL) {
-            audio_devices_t txDevice = getDeviceForInputSource(AUDIO_SOURCE_VOICE_COMMUNICATION);
+            audio_devices_t txDevice =
+                    getDeviceAndMixForInputSource(AUDIO_SOURCE_VOICE_COMMUNICATION);
             sp<AudioOutputDescriptor> hwOutputDesc = mOutputs.valueFor(mPrimaryOutput);
             if (((mAvailableInputDevices.types() &
                     AUDIO_DEVICE_IN_TELEPHONY_RX & ~AUDIO_DEVICE_BIT_IN) == 0) ||
@@ -4376,7 +4638,9 @@
         uint32_t device2 = AUDIO_DEVICE_NONE;
         if (strategy != STRATEGY_SONIFICATION) {
             // no sonification on remote submix (e.g. WFD)
-            device2 = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+            if (mAvailableOutputDevices.getDevice(AUDIO_DEVICE_OUT_REMOTE_SUBMIX, String8("0")) != 0) {
+                device2 = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+            }
         }
         if ((device2 == AUDIO_DEVICE_NONE) &&
                 (mForceUse[AUDIO_POLICY_FORCE_FOR_MEDIA] != AUDIO_POLICY_FORCE_NO_BT_A2DP) &&
@@ -4786,6 +5050,7 @@
 }
 
 sp<AudioPolicyManager::IOProfile> AudioPolicyManager::getInputProfile(audio_devices_t device,
+                                                   String8 address,
                                                    uint32_t& samplingRate,
                                                    audio_format_t format,
                                                    audio_channel_mask_t channelMask,
@@ -4803,9 +5068,10 @@
         {
             sp<IOProfile> profile = mHwModules[i]->mInputProfiles[j];
             // profile->log();
-            if (profile->isCompatibleProfile(device, samplingRate,
+            if (profile->isCompatibleProfile(device, address, samplingRate,
                                              &samplingRate /*updatedSamplingRate*/,
                                              format, channelMask, (audio_output_flags_t) flags)) {
+
                 return profile;
             }
         }
@@ -4813,11 +5079,42 @@
     return NULL;
 }
 
+
+audio_devices_t AudioPolicyManager::getDeviceAndMixForInputSource(audio_source_t inputSource,
+                                                            AudioMix **policyMix)
+{
+    audio_devices_t availableDeviceTypes = mAvailableInputDevices.types() &
+                                            ~AUDIO_DEVICE_BIT_IN;
+
+    for (size_t i = 0; i < mPolicyMixes.size(); i++) {
+        if (mPolicyMixes[i]->mMix.mMixType != MIX_TYPE_RECORDERS) {
+            continue;
+        }
+        for (size_t j = 0; j < mPolicyMixes[i]->mMix.mCriteria.size(); j++) {
+            if ((RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET == mPolicyMixes[i]->mMix.mCriteria[j].mRule &&
+                    mPolicyMixes[i]->mMix.mCriteria[j].mAttr.mSource == inputSource) ||
+               (RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET == mPolicyMixes[i]->mMix.mCriteria[j].mRule &&
+                    mPolicyMixes[i]->mMix.mCriteria[j].mAttr.mSource != inputSource)) {
+                if (availableDeviceTypes & AUDIO_DEVICE_IN_REMOTE_SUBMIX) {
+                    if (policyMix != NULL) {
+                        *policyMix = &mPolicyMixes[i]->mMix;
+                    }
+                    return AUDIO_DEVICE_IN_REMOTE_SUBMIX;
+                }
+                break;
+            }
+        }
+    }
+
+    return getDeviceForInputSource(inputSource);
+}
+
 audio_devices_t AudioPolicyManager::getDeviceForInputSource(audio_source_t inputSource)
 {
     uint32_t device = AUDIO_DEVICE_NONE;
     audio_devices_t availableDeviceTypes = mAvailableInputDevices.types() &
                                             ~AUDIO_DEVICE_BIT_IN;
+
     switch (inputSource) {
     case AUDIO_SOURCE_VOICE_UPLINK:
       if (availableDeviceTypes & AUDIO_DEVICE_IN_VOICE_CALL) {
@@ -4931,7 +5228,7 @@
 }
 
 bool AudioPolicyManager::deviceDistinguishesOnAddress(audio_devices_t device) {
-    return ((device & APM_AUDIO_DEVICE_MATCH_ADDRESS_ALL) != 0);
+    return ((device & APM_AUDIO_DEVICE_MATCH_ADDRESS_ALL & ~AUDIO_DEVICE_BIT_IN) != 0);
 }
 
 audio_io_handle_t AudioPolicyManager::getActiveInput(bool ignoreVirtualInputs)
@@ -5341,6 +5638,18 @@
     }
 
     float volume = computeVolume(stream, index, output, device);
+    // unit gain if rerouting to external policy
+    if (device == AUDIO_DEVICE_OUT_REMOTE_SUBMIX) {
+        ssize_t index = mOutputs.indexOfKey(output);
+        if (index >= 0) {
+            sp<AudioOutputDescriptor> outputDesc = mOutputs.valueAt(index);
+            if (outputDesc->mPolicyMix != NULL) {
+                ALOGV("max gain when rerouting for output=%d", output);
+                volume = 1.0f;
+            }
+        }
+
+    }
     // We actually change the volume if:
     // - the float value returned by computeVolume() changed
     // - the force flag is set
@@ -5523,7 +5832,8 @@
 AudioPolicyManager::AudioOutputDescriptor::AudioOutputDescriptor(
         const sp<IOProfile>& profile)
     : mId(0), mIoHandle(0), mLatency(0),
-    mFlags((audio_output_flags_t)0), mDevice(AUDIO_DEVICE_NONE), mPatchHandle(0),
+    mFlags((audio_output_flags_t)0), mDevice(AUDIO_DEVICE_NONE), mPolicyMix(NULL),
+    mPatchHandle(0),
     mOutput1(0), mOutput2(0), mProfile(profile), mDirectOpenCount(0)
 {
     // clear usage count for all stream types
@@ -5718,7 +6028,7 @@
 
 AudioPolicyManager::AudioInputDescriptor::AudioInputDescriptor(const sp<IOProfile>& profile)
     : mId(0), mIoHandle(0),
-      mDevice(AUDIO_DEVICE_NONE), mPatchHandle(0), mRefCount(0),
+      mDevice(AUDIO_DEVICE_NONE), mPolicyMix(NULL), mPatchHandle(0), mRefCount(0),
       mInputSource(AUDIO_SOURCE_DEFAULT), mProfile(profile), mIsSoundTrigger(false)
 {
     if (profile != NULL) {
@@ -6008,6 +6318,69 @@
     return NO_ERROR;
 }
 
+status_t AudioPolicyManager::HwModule::addOutputProfile(String8 name, const audio_config_t *config,
+                                                  audio_devices_t device, String8 address)
+{
+    sp<IOProfile> profile = new IOProfile(name, AUDIO_PORT_ROLE_SOURCE, this);
+
+    profile->mSamplingRates.add(config->sample_rate);
+    profile->mChannelMasks.add(config->channel_mask);
+    profile->mFormats.add(config->format);
+
+    sp<DeviceDescriptor> devDesc = new DeviceDescriptor(String8(""), device);
+    devDesc->mAddress = address;
+    profile->mSupportedDevices.add(devDesc);
+
+    mOutputProfiles.add(profile);
+
+    return NO_ERROR;
+}
+
+status_t AudioPolicyManager::HwModule::removeOutputProfile(String8 name)
+{
+    for (size_t i = 0; i < mOutputProfiles.size(); i++) {
+        if (mOutputProfiles[i]->mName == name) {
+            mOutputProfiles.removeAt(i);
+            break;
+        }
+    }
+
+    return NO_ERROR;
+}
+
+status_t AudioPolicyManager::HwModule::addInputProfile(String8 name, const audio_config_t *config,
+                                                  audio_devices_t device, String8 address)
+{
+    sp<IOProfile> profile = new IOProfile(name, AUDIO_PORT_ROLE_SINK, this);
+
+    profile->mSamplingRates.add(config->sample_rate);
+    profile->mChannelMasks.add(config->channel_mask);
+    profile->mFormats.add(config->format);
+
+    sp<DeviceDescriptor> devDesc = new DeviceDescriptor(String8(""), device);
+    devDesc->mAddress = address;
+    profile->mSupportedDevices.add(devDesc);
+
+    ALOGV("addInputProfile() name %s rate %d mask 0x08", name.string(), config->sample_rate, config->channel_mask);
+
+    mInputProfiles.add(profile);
+
+    return NO_ERROR;
+}
+
+status_t AudioPolicyManager::HwModule::removeInputProfile(String8 name)
+{
+    for (size_t i = 0; i < mInputProfiles.size(); i++) {
+        if (mInputProfiles[i]->mName == name) {
+            mInputProfiles.removeAt(i);
+            break;
+        }
+    }
+
+    return NO_ERROR;
+}
+
+
 void AudioPolicyManager::HwModule::dump(int fd)
 {
     const size_t SIZE = 256;
@@ -6325,6 +6698,10 @@
 
 status_t AudioPolicyManager::AudioPort::checkExactSamplingRate(uint32_t samplingRate) const
 {
+    if (mSamplingRates.isEmpty()) {
+        return NO_ERROR;
+    }
+
     for (size_t i = 0; i < mSamplingRates.size(); i ++) {
         if (mSamplingRates[i] == samplingRate) {
             return NO_ERROR;
@@ -6336,6 +6713,10 @@
 status_t AudioPolicyManager::AudioPort::checkCompatibleSamplingRate(uint32_t samplingRate,
         uint32_t *updatedSamplingRate) const
 {
+    if (mSamplingRates.isEmpty()) {
+        return NO_ERROR;
+    }
+
     // Search for the closest supported sampling rate that is above (preferred)
     // or below (acceptable) the desired sampling rate, within a permitted ratio.
     // The sampling rates do not need to be sorted in ascending order.
@@ -6394,6 +6775,10 @@
 
 status_t AudioPolicyManager::AudioPort::checkExactChannelMask(audio_channel_mask_t channelMask) const
 {
+    if (mChannelMasks.isEmpty()) {
+        return NO_ERROR;
+    }
+
     for (size_t i = 0; i < mChannelMasks.size(); i++) {
         if (mChannelMasks[i] == channelMask) {
             return NO_ERROR;
@@ -6405,6 +6790,10 @@
 status_t AudioPolicyManager::AudioPort::checkCompatibleChannelMask(audio_channel_mask_t channelMask)
         const
 {
+    if (mChannelMasks.isEmpty()) {
+        return NO_ERROR;
+    }
+
     const bool isRecordThread = mType == AUDIO_PORT_TYPE_MIX && mRole == AUDIO_PORT_ROLE_SINK;
     for (size_t i = 0; i < mChannelMasks.size(); i ++) {
         // FIXME Does not handle multi-channel automatic conversions yet
@@ -6428,6 +6817,10 @@
 
 status_t AudioPolicyManager::AudioPort::checkFormat(audio_format_t format) const
 {
+    if (mFormats.isEmpty()) {
+        return NO_ERROR;
+    }
+
     for (size_t i = 0; i < mFormats.size(); i ++) {
         if (mFormats[i] == format) {
             return NO_ERROR;
@@ -6894,17 +7287,18 @@
 // Sampling rate, format and channel mask must be specified in order to
 // get a valid a match
 bool AudioPolicyManager::IOProfile::isCompatibleProfile(audio_devices_t device,
-                                                            uint32_t samplingRate,
-                                                            uint32_t *updatedSamplingRate,
-                                                            audio_format_t format,
-                                                            audio_channel_mask_t channelMask,
-                                                            uint32_t flags) const
+                                                        String8 address,
+                                                        uint32_t samplingRate,
+                                                        uint32_t *updatedSamplingRate,
+                                                        audio_format_t format,
+                                                        audio_channel_mask_t channelMask,
+                                                        uint32_t flags) const
 {
     const bool isPlaybackThread = mType == AUDIO_PORT_TYPE_MIX && mRole == AUDIO_PORT_ROLE_SOURCE;
     const bool isRecordThread = mType == AUDIO_PORT_TYPE_MIX && mRole == AUDIO_PORT_ROLE_SINK;
     ALOG_ASSERT(isPlaybackThread != isRecordThread);
 
-    if ((mSupportedDevices.types() & device) != device) {
+    if (device != AUDIO_DEVICE_NONE && mSupportedDevices.getDevice(device, address) == 0) {
         return false;
     }
 
@@ -7099,7 +7493,8 @@
                                  devName);
             if (type != AUDIO_DEVICE_NONE) {
                 sp<DeviceDescriptor> dev = new DeviceDescriptor(String8(""), type);
-                if (type == AUDIO_DEVICE_IN_REMOTE_SUBMIX) {
+                if (type == AUDIO_DEVICE_IN_REMOTE_SUBMIX ||
+                        type == AUDIO_DEVICE_OUT_REMOTE_SUBMIX ) {
                     dev->mAddress = String8("0");
                 }
                 add(dev);
@@ -7111,7 +7506,7 @@
                 }
             }
          }
-        devName = strtok(NULL, "|");
+         devName = strtok(NULL, "|");
      }
 }
 
@@ -7121,13 +7516,15 @@
     sp<DeviceDescriptor> device;
     for (size_t i = 0; i < size(); i++) {
         if (itemAt(i)->mDeviceType == type) {
-            device = itemAt(i);
-            if (itemAt(i)->mAddress = address) {
-                break;
+            if (address == "" || itemAt(i)->mAddress == address) {
+                device = itemAt(i);
+                if (itemAt(i)->mAddress == address) {
+                    break;
+                }
             }
         }
     }
-    ALOGV("DeviceVector::getDevice() for type %d address %s found %p",
+    ALOGV("DeviceVector::getDevice() for type %08x address %s found %p",
           type, address.string(), device.get());
     return device;
 }
@@ -7165,13 +7562,9 @@
         audio_devices_t type, String8 address) const
 {
     DeviceVector devices;
-    //ALOGV("   looking for device=%x, addr=%s", type, address.string());
     for (size_t i = 0; i < size(); i++) {
-        //ALOGV("     at i=%d: device=%x, addr=%s",
-        //        i, itemAt(i)->mDeviceType, itemAt(i)->mAddress.string());
         if (itemAt(i)->mDeviceType == type) {
             if (itemAt(i)->mAddress == address) {
-                //ALOGV("      found matching address %s", address.string());
                 devices.add(itemAt(i));
             }
         }
@@ -7599,6 +7992,7 @@
     case AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE:
     case AUDIO_USAGE_ASSISTANCE_SONIFICATION:
     case AUDIO_USAGE_GAME:
+    case AUDIO_USAGE_VIRTUAL_SOURCE:
         break;
     default:
         return false;
diff --git a/services/audiopolicy/AudioPolicyManager.h b/services/audiopolicy/AudioPolicyManager.h
index 17348e9..ff3afab 100644
--- a/services/audiopolicy/AudioPolicyManager.h
+++ b/services/audiopolicy/AudioPolicyManager.h
@@ -23,6 +23,7 @@
 #include <utils/Errors.h>
 #include <utils/KeyedVector.h>
 #include <utils/SortedVector.h>
+#include <media/AudioPolicy.h>
 #include "AudioPolicyInterface.h"
 
 
@@ -154,6 +155,8 @@
         // return whether a stream is playing remotely, override to change the definition of
         //   local/remote playback, used for instance by notification manager to not make
         //   media players lose audio focus when not playing locally
+        //   For the base implementation, "remotely" means playing during screen mirroring which
+        //   uses an output for playback with a non-empty, non "0" address.
         virtual bool isStreamActiveRemotely(audio_stream_type_t stream, uint32_t inPastMs = 0) const;
         virtual bool isSourceActive(audio_source_t source) const;
 
@@ -184,6 +187,9 @@
 
         virtual status_t releaseSoundTriggerSession(audio_session_t session);
 
+        virtual status_t registerPolicyMixes(Vector<AudioMix> mixes);
+        virtual status_t unregisterPolicyMixes(Vector<AudioMix> mixes);
+
 protected:
 
         enum routing_strategy {
@@ -392,6 +398,7 @@
             // For input, flags is interpreted as audio_input_flags_t.
             // TODO: merge audio_output_flags_t and audio_input_flags_t.
             bool isCompatibleProfile(audio_devices_t device,
+                                     String8 address,
                                      uint32_t samplingRate,
                                      uint32_t *updatedSamplingRate,
                                      audio_format_t format,
@@ -415,6 +422,13 @@
             status_t loadInput(cnode *root);
             status_t loadDevice(cnode *root);
 
+            status_t addOutputProfile(String8 name, const audio_config_t *config,
+                                      audio_devices_t device, String8 address);
+            status_t removeOutputProfile(String8 name);
+            status_t addInputProfile(String8 name, const audio_config_t *config,
+                                      audio_devices_t device, String8 address);
+            status_t removeInputProfile(String8 name);
+
             void dump(int fd);
 
             const char *const        mName; // base name of the audio HW module (primary, a2dp ...)
@@ -483,6 +497,7 @@
             uint32_t mLatency;                  //
             audio_output_flags_t mFlags;   //
             audio_devices_t mDevice;                   // current device this output is routed to
+            AudioMix *mPolicyMix;             // non NULL when used by a dynamic policy
             audio_patch_handle_t mPatchHandle;
             uint32_t mRefCount[AUDIO_STREAM_CNT]; // number of streams of each type using this output
             nsecs_t mStopTime[AUDIO_STREAM_CNT];
@@ -508,6 +523,7 @@
             audio_port_handle_t           mId;
             audio_io_handle_t             mIoHandle;       // input handle
             audio_devices_t               mDevice;         // current device this input is routed to
+            AudioMix                      *mPolicyMix;     // non NULL when used by a dynamic policy
             audio_patch_handle_t          mPatchHandle;
             uint32_t                      mRefCount;       // number of AudioRecord clients using
                                                            // this input
@@ -731,10 +747,11 @@
                                        audio_format_t format);
         // samplingRate parameter is an in/out and so may be modified
         sp<IOProfile> getInputProfile(audio_devices_t device,
-                                   uint32_t& samplingRate,
-                                   audio_format_t format,
-                                   audio_channel_mask_t channelMask,
-                                   audio_input_flags_t flags);
+                                      String8 address,
+                                      uint32_t& samplingRate,
+                                      audio_format_t format,
+                                      audio_channel_mask_t channelMask,
+                                      audio_input_flags_t flags);
         sp<IOProfile> getProfileForDirectOutput(audio_devices_t device,
                                                        uint32_t samplingRate,
                                                        audio_format_t format,
@@ -832,6 +849,17 @@
         uint32_t mBeaconPlayingRefCount;// ref count for the playing beacon streams
         bool mBeaconMuted;              // has STREAM_TTS been muted
 
+        // custom mix entry in mPolicyMixes
+        class AudioPolicyMix : public RefBase {
+        public:
+            AudioPolicyMix() {}
+
+            AudioMix    mMix;                   // Audio policy mix descriptor
+            sp<AudioOutputDescriptor> mOutput;  // Corresponding output stream
+        };
+        DefaultKeyedVector<String8, sp<AudioPolicyMix> > mPolicyMixes; // list of registered mixes
+
+
 #ifdef AUDIO_POLICY_TEST
         Mutex   mLock;
         Condition mWaitWorkCV;
@@ -885,6 +913,16 @@
         uint32_t handleEventForBeacon(int event);
         uint32_t setBeaconMute(bool mute);
         bool     isValidAttributes(const audio_attributes_t *paa);
+
+        // select input device corresponding to requested audio source and return associated policy
+        // mix if any. Calls getDeviceForInputSource().
+        audio_devices_t getDeviceAndMixForInputSource(audio_source_t inputSource,
+                                                        AudioMix **policyMix = NULL);
+
+        // Called by setDeviceConnectionState().
+        status_t setDeviceConnectionStateInt(audio_devices_t device,
+                                                          audio_policy_dev_state_t state,
+                                                          const char *device_address);
 };
 
 };
diff --git a/services/audiopolicy/AudioPolicyService.h b/services/audiopolicy/AudioPolicyService.h
index da17728..09375cf 100644
--- a/services/audiopolicy/AudioPolicyService.h
+++ b/services/audiopolicy/AudioPolicyService.h
@@ -30,6 +30,7 @@
 #include <media/IAudioPolicyService.h>
 #include <media/ToneGenerator.h>
 #include <media/AudioEffect.h>
+#include <media/AudioPolicy.h>
 #include <hardware_legacy/AudioPolicyInterface.h>
 #include "AudioPolicyEffects.h"
 #include "AudioPolicyManager.h"
@@ -185,6 +186,8 @@
 
     virtual audio_mode_t getPhoneState();
 
+    virtual status_t registerPolicyMixes(Vector<AudioMix> mixes, bool registration);
+
             status_t doStopOutput(audio_io_handle_t output,
                                   audio_stream_type_t stream,
                                   audio_session_t session);
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 6e7824e..dcab4ad 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -917,6 +917,15 @@
                 ALOGE("%s: Camera %d: Can't stop streaming: %s (%d)",
                         __FUNCTION__, mCameraId, strerror(-res), res);
             }
+
+            // Flush all in-process captures and buffer in order to stop
+            // preview faster.
+            res = mDevice->flush();
+            if (res != OK) {
+                ALOGE("%s: Camera %d: Unable to flush pending requests: %s (%d)",
+                        __FUNCTION__, mCameraId, strerror(-res), res);
+            }
+
             res = mDevice->waitUntilDrained();
             if (res != OK) {
                 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index 234247b..4f4cfb0 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -596,6 +596,10 @@
                     supportedSceneModes +=
                         CameraParameters::SCENE_MODE_BARCODE;
                     break;
+                case ANDROID_CONTROL_SCENE_MODE_HDR:
+                    supportedSceneModes +=
+                        CameraParameters::SCENE_MODE_HDR;
+                    break;
                 default:
                     ALOGW("%s: Camera %d: Unknown scene mode value: %d",
                         __FUNCTION__, cameraId,
@@ -2386,6 +2390,8 @@
             ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT :
         !strcmp(sceneMode, CameraParameters::SCENE_MODE_BARCODE) ?
             ANDROID_CONTROL_SCENE_MODE_BARCODE:
+        !strcmp(sceneMode, CameraParameters::SCENE_MODE_HDR) ?
+            ANDROID_CONTROL_SCENE_MODE_HDR:
         -1;
 }
 
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index e3301aa..e6865bb 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -353,6 +353,14 @@
         useAsync = true;
     }
 
+    int32_t disallowedFlags = GraphicBuffer::USAGE_HW_VIDEO_ENCODER |
+                              GRALLOC_USAGE_RENDERSCRIPT;
+    int32_t allowedFlags = GraphicBuffer::USAGE_SW_READ_MASK |
+                           GraphicBuffer::USAGE_HW_TEXTURE |
+                           GraphicBuffer::USAGE_HW_COMPOSER;
+    bool flexibleConsumer = (consumerUsage & disallowedFlags) == 0 &&
+            (consumerUsage & allowedFlags) != 0;
+
     sp<IBinder> binder;
     sp<ANativeWindow> anw;
     if (bufferProducer != 0) {
@@ -382,14 +390,18 @@
     //       IMPLEMENTATION_DEFINED. b/9487482
     if (format >= HAL_PIXEL_FORMAT_RGBA_8888 &&
         format <= HAL_PIXEL_FORMAT_BGRA_8888) {
-        ALOGW("%s: Camera %d: Overriding format 0x%x to IMPLEMENTATION_DEFINED",
+        ALOGW("%s: Camera %d: Overriding format %#x to IMPLEMENTATION_DEFINED",
               __FUNCTION__, mCameraId, format);
         format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
     }
 
-    // TODO: add startConfigure/stopConfigure call to CameraDeviceBase
-    // this will make it so Camera3Device doesn't call configure_streams
-    // after each call, but only once we are done with all.
+    // Round dimensions to the nearest dimensions available for this format
+    if (flexibleConsumer && !CameraDeviceClient::roundBufferDimensionNearest(width, height,
+            format, mDevice->info(), /*out*/&width, /*out*/&height)) {
+        ALOGE("%s: No stream configurations with the format %#x defined, failed to create stream.",
+                __FUNCTION__, format);
+        return BAD_VALUE;
+    }
 
     int streamId = -1;
     res = mDevice->createStream(anw, width, height, format, &streamId);
@@ -425,6 +437,62 @@
     return res;
 }
 
+
+bool CameraDeviceClient::roundBufferDimensionNearest(int32_t width, int32_t height,
+        int32_t format, const CameraMetadata& info,
+        /*out*/int32_t* outWidth, /*out*/int32_t* outHeight) {
+
+    camera_metadata_ro_entry streamConfigs =
+            info.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
+
+    int32_t bestWidth = -1;
+    int32_t bestHeight = -1;
+
+    // Iterate through listed stream configurations and find the one with the smallest euclidean
+    // distance from the given dimensions for the given format.
+    for (size_t i = 0; i < streamConfigs.count; i += 4) {
+        int32_t fmt = streamConfigs.data.i32[i];
+        int32_t w = streamConfigs.data.i32[i + 1];
+        int32_t h = streamConfigs.data.i32[i + 2];
+
+        // Ignore input/output type for now
+        if (fmt == format) {
+            if (w == width && h == height) {
+                bestWidth = width;
+                bestHeight = height;
+                break;
+            } else if (w <= ROUNDING_WIDTH_CAP && (bestWidth == -1 ||
+                    CameraDeviceClient::euclidDistSquare(w, h, width, height) <
+                    CameraDeviceClient::euclidDistSquare(bestWidth, bestHeight, width, height))) {
+                bestWidth = w;
+                bestHeight = h;
+            }
+        }
+    }
+
+    if (bestWidth == -1) {
+        // Return false if no configurations for this format were listed
+        return false;
+    }
+
+    // Set the outputs to the closet width/height
+    if (outWidth != NULL) {
+        *outWidth = bestWidth;
+    }
+    if (outHeight != NULL) {
+        *outHeight = bestHeight;
+    }
+
+    // Return true if at least one configuration for this format was listed
+    return true;
+}
+
+int64_t CameraDeviceClient::euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1) {
+    int64_t d0 = x0 - x1;
+    int64_t d1 = y0 - y1;
+    return d0 * d0 + d1 * d1;
+}
+
 // Create a request object from a template.
 status_t CameraDeviceClient::createDefaultRequest(int templateId,
                                                   /*out*/
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 9981dfe..84e46b7 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -154,6 +154,15 @@
     /** Utility members */
     bool enforceRequestPermissions(CameraMetadata& metadata);
 
+    // Find the square of the euclidean distance between two points
+    static int64_t euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1);
+
+    // Find the closest dimensions for a given format in available stream configurations with
+    // a width <= ROUNDING_WIDTH_CAP
+    static const int32_t ROUNDING_WIDTH_CAP = 1080;
+    static bool roundBufferDimensionNearest(int32_t width, int32_t height, int32_t format,
+            const CameraMetadata& info, /*out*/int32_t* outWidth, /*out*/int32_t* outHeight);
+
     // IGraphicsBufferProducer binder -> Stream ID
     KeyedVector<sp<IBinder>, int> mStreamMap;
 
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 5281ea6..bba3905 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -427,7 +427,7 @@
             InFlightRequest r = mInFlightMap.valueAt(i);
             lines.appendFormat("      Frame %d |  Timestamp: %" PRId64 ", metadata"
                     " arrived: %s, buffers left: %d\n", mInFlightMap.keyAt(i),
-                    r.captureTimestamp, r.haveResultMetadata ? "true" : "false",
+                    r.shutterTimestamp, r.haveResultMetadata ? "true" : "false",
                     r.numBuffersLeft);
         }
     }
@@ -1880,6 +1880,131 @@
     return true;
 }
 
+
+void Camera3Device::returnOutputBuffers(
+        const camera3_stream_buffer_t *outputBuffers, size_t numBuffers,
+        nsecs_t timestamp) {
+    for (size_t i = 0; i < numBuffers; i++)
+    {
+        Camera3Stream *stream = Camera3Stream::cast(outputBuffers[i].stream);
+        status_t res = stream->returnBuffer(outputBuffers[i], timestamp);
+        // Note: stream may be deallocated at this point, if this buffer was
+        // the last reference to it.
+        if (res != OK) {
+            ALOGE("Can't return buffer to its stream: %s (%d)",
+                strerror(-res), res);
+        }
+    }
+}
+
+
+void Camera3Device::removeInFlightRequestIfReadyLocked(int idx) {
+
+    const InFlightRequest &request = mInFlightMap.valueAt(idx);
+    const uint32_t frameNumber = mInFlightMap.keyAt(idx);
+
+    nsecs_t sensorTimestamp = request.sensorTimestamp;
+    nsecs_t shutterTimestamp = request.shutterTimestamp;
+
+    // Check if it's okay to remove the request from InFlightMap:
+    // In the case of a successful request:
+    //      all input and output buffers, all result metadata, shutter callback
+    //      arrived.
+    // In the case of a unsuccessful request:
+    //      all input and output buffers arrived.
+    if (request.numBuffersLeft == 0 &&
+            (request.requestStatus != OK ||
+            (request.haveResultMetadata && shutterTimestamp != 0))) {
+        ATRACE_ASYNC_END("frame capture", frameNumber);
+
+        // Sanity check - if sensor timestamp matches shutter timestamp
+        if (request.requestStatus == OK &&
+                sensorTimestamp != shutterTimestamp) {
+            SET_ERR("sensor timestamp (%" PRId64
+                ") for frame %d doesn't match shutter timestamp (%" PRId64 ")",
+                sensorTimestamp, frameNumber, shutterTimestamp);
+        }
+
+        // for an unsuccessful request, it may have pending output buffers to
+        // return.
+        assert(request.requestStatus != OK ||
+               request.pendingOutputBuffers.size() == 0);
+        returnOutputBuffers(request.pendingOutputBuffers.array(),
+            request.pendingOutputBuffers.size(), 0);
+
+        mInFlightMap.removeItemsAt(idx, 1);
+
+        ALOGVV("%s: removed frame %d from InFlightMap", __FUNCTION__, frameNumber);
+     }
+
+    // Sanity check - if we have too many in-flight frames, something has
+    // likely gone wrong
+    if (mInFlightMap.size() > kInFlightWarnLimit) {
+        CLOGE("In-flight list too large: %zu", mInFlightMap.size());
+    }
+}
+
+
+void Camera3Device::sendCaptureResult(CameraMetadata &pendingMetadata,
+        CaptureResultExtras &resultExtras,
+        CameraMetadata &collectedPartialResult,
+        uint32_t frameNumber) {
+    if (pendingMetadata.isEmpty())
+        return;
+
+    Mutex::Autolock l(mOutputLock);
+
+    // TODO: need to track errors for tighter bounds on expected frame number
+    if (frameNumber < mNextResultFrameNumber) {
+        SET_ERR("Out-of-order capture result metadata submitted! "
+                "(got frame number %d, expecting %d)",
+                frameNumber, mNextResultFrameNumber);
+        return;
+    }
+    mNextResultFrameNumber = frameNumber + 1;
+
+    CaptureResult captureResult;
+    captureResult.mResultExtras = resultExtras;
+    captureResult.mMetadata = pendingMetadata;
+
+    if (captureResult.mMetadata.update(ANDROID_REQUEST_FRAME_COUNT,
+            (int32_t*)&frameNumber, 1) != OK) {
+        SET_ERR("Failed to set frame# in metadata (%d)",
+                frameNumber);
+        return;
+    } else {
+        ALOGVV("%s: Camera %d: Set frame# in metadata (%d)",
+                __FUNCTION__, mId, frameNumber);
+    }
+
+    // Append any previous partials to form a complete result
+    if (mUsePartialResult && !collectedPartialResult.isEmpty()) {
+        captureResult.mMetadata.append(collectedPartialResult);
+    }
+
+    captureResult.mMetadata.sort();
+
+    // Check that there's a timestamp in the result metadata
+    camera_metadata_entry entry =
+            captureResult.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
+    if (entry.count == 0) {
+        SET_ERR("No timestamp provided by HAL for frame %d!",
+                frameNumber);
+        return;
+    }
+
+    // Valid result, insert into queue
+    List<CaptureResult>::iterator queuedResult =
+            mResultQueue.insert(mResultQueue.end(), CaptureResult(captureResult));
+    ALOGVV("%s: result requestId = %" PRId32 ", frameNumber = %" PRId64
+           ", burstId = %" PRId32, __FUNCTION__,
+           queuedResult->mResultExtras.requestId,
+           queuedResult->mResultExtras.frameNumber,
+           queuedResult->mResultExtras.burstId);
+
+    mResultSignal.signal();
+}
+
 /**
  * Camera HAL device callback methods
  */
@@ -1914,11 +2039,14 @@
     CaptureResultExtras resultExtras;
     bool hasInputBufferInRequest = false;
 
-    // Get capture timestamp and resultExtras from list of in-flight requests,
-    // where it was added by the shutter notification for this frame.
-    // Then update the in-flight status and remove the in-flight entry if
-    // all result data has been received.
-    nsecs_t timestamp = 0;
+    // Get shutter timestamp and resultExtras from list of in-flight requests,
+    // where it was added by the shutter notification for this frame. If the
+    // shutter timestamp isn't received yet, append the output buffers to the
+    // in-flight request and they will be returned when the shutter timestamp
+    // arrives. Update the in-flight status and remove the in-flight entry if
+    // all result data and shutter timestamp have been received.
+    nsecs_t shutterTimestamp = 0;
+
     {
         Mutex::Autolock l(mInFlightLock);
         ssize_t idx = mInFlightMap.indexOfKey(frameNumber);
@@ -1928,13 +2056,17 @@
             return;
         }
         InFlightRequest &request = mInFlightMap.editValueAt(idx);
-        ALOGVV("%s: got InFlightRequest requestId = %" PRId32 ", frameNumber = %" PRId64
-                ", burstId = %" PRId32,
-                __FUNCTION__, request.resultExtras.requestId, request.resultExtras.frameNumber,
-                request.resultExtras.burstId);
-        // Always update the partial count to the latest one. When framework aggregates adjacent
-        // partial results into one, the latest partial count will be used.
-        request.resultExtras.partialResultCount = result->partial_result;
+        ALOGVV("%s: got InFlightRequest requestId = %" PRId32
+                ", frameNumber = %" PRId64 ", burstId = %" PRId32
+                ", partialResultCount = %d",
+                __FUNCTION__, request.resultExtras.requestId,
+                request.resultExtras.frameNumber, request.resultExtras.burstId,
+                result->partial_result);
+        // Always update the partial count to the latest one if it's not 0
+        // (buffers only). When framework aggregates adjacent partial results
+        // into one, the latest partial count will be used.
+        if (result->partial_result != 0)
+            request.resultExtras.partialResultCount = result->partial_result;
 
         // Check if this result carries only partial metadata
         if (mUsePartialResult && result->result != NULL) {
@@ -1978,22 +2110,9 @@
             }
         }
 
-        timestamp = request.captureTimestamp;
-        resultExtras = request.resultExtras;
+        shutterTimestamp = request.shutterTimestamp;
         hasInputBufferInRequest = request.hasInputBuffer;
 
-        /**
-         * One of the following must happen before it's legal to call process_capture_result,
-         * unless partial metadata is being provided:
-         * - CAMERA3_MSG_SHUTTER (expected during normal operation)
-         * - CAMERA3_MSG_ERROR (expected during flush)
-         */
-        if (request.requestStatus == OK && timestamp == 0 && !isPartialResult) {
-            SET_ERR("Called before shutter notify for frame %d",
-                    frameNumber);
-            return;
-        }
-
         // Did we get the (final) result metadata for this capture?
         if (result->result != NULL && !isPartialResult) {
             if (request.haveResultMetadata) {
@@ -2026,103 +2145,38 @@
             return;
         }
 
-        // Check if everything has arrived for this result (buffers and metadata), remove it from
-        // InFlightMap if both arrived or HAL reports error for this request (i.e. during flush).
-        // For per-frame error notifications, camera3.h requirements state that all the
-        // buffer handles for a failed frame capture must be returned via process_capture_result()
-        // call(s). Hence, Camera3Device needs to ensure that the frame entry is not deleted from
-        // mInFlightMap until all buffers for that frame have been returned by HAL.
-        if ((request.numBuffersLeft == 0) &&
-            ((request.requestStatus != OK) || (request.haveResultMetadata))) {
-            ATRACE_ASYNC_END("frame capture", frameNumber);
-            mInFlightMap.removeItemsAt(idx, 1);
+        camera_metadata_ro_entry_t entry;
+        res = find_camera_metadata_ro_entry(result->result,
+                ANDROID_SENSOR_TIMESTAMP, &entry);
+        if (res == OK && entry.count == 1) {
+            request.sensorTimestamp = entry.data.i64[0];
         }
 
-        // Sanity check - if we have too many in-flight frames, something has
-        // likely gone wrong
-        if (mInFlightMap.size() > kInFlightWarnLimit) {
-            CLOGE("In-flight list too large: %zu", mInFlightMap.size());
-        }
-
-    }
-
-    // Process the result metadata, if provided
-    bool gotResult = false;
-    if (result->result != NULL && !isPartialResult) {
-        Mutex::Autolock l(mOutputLock);
-
-        gotResult = true;
-
-        // TODO: need to track errors for tighter bounds on expected frame number
-        if (frameNumber < mNextResultFrameNumber) {
-            SET_ERR("Out-of-order capture result metadata submitted! "
-                    "(got frame number %d, expecting %d)",
-                    frameNumber, mNextResultFrameNumber);
-            return;
-        }
-        mNextResultFrameNumber = frameNumber + 1;
-
-        CaptureResult captureResult;
-        captureResult.mResultExtras = resultExtras;
-        captureResult.mMetadata = result->result;
-
-        if (captureResult.mMetadata.update(ANDROID_REQUEST_FRAME_COUNT,
-                (int32_t*)&frameNumber, 1) != OK) {
-            SET_ERR("Failed to set frame# in metadata (%d)",
-                    frameNumber);
-            gotResult = false;
+        // If shutter event isn't received yet, append the output buffers to
+        // the in-flight request. Otherwise, return the output buffers to
+        // streams.
+        if (shutterTimestamp == 0) {
+            request.pendingOutputBuffers.appendArray(result->output_buffers,
+                result->num_output_buffers);
         } else {
-            ALOGVV("%s: Camera %d: Set frame# in metadata (%d)",
-                    __FUNCTION__, mId, frameNumber);
+            returnOutputBuffers(result->output_buffers,
+                result->num_output_buffers, shutterTimestamp);
         }
 
-        // Append any previous partials to form a complete result
-        if (mUsePartialResult && !collectedPartialResult.isEmpty()) {
-            captureResult.mMetadata.append(collectedPartialResult);
+        if (result->result != NULL && !isPartialResult) {
+            if (shutterTimestamp == 0) {
+                request.pendingMetadata = result->result;
+                request.partialResult.collectedResult = collectedPartialResult;
+            } else {
+                CameraMetadata metadata;
+                metadata = result->result;
+                sendCaptureResult(metadata, request.resultExtras,
+                    collectedPartialResult, frameNumber);
+            }
         }
 
-        captureResult.mMetadata.sort();
-
-        // Check that there's a timestamp in the result metadata
-
-        camera_metadata_entry entry =
-                captureResult.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
-        if (entry.count == 0) {
-            SET_ERR("No timestamp provided by HAL for frame %d!",
-                    frameNumber);
-            gotResult = false;
-        } else if (timestamp != entry.data.i64[0]) {
-            SET_ERR("Timestamp mismatch between shutter notify and result"
-                    " metadata for frame %d (%" PRId64 " vs %" PRId64 " respectively)",
-                    frameNumber, timestamp, entry.data.i64[0]);
-            gotResult = false;
-        }
-
-        if (gotResult) {
-            // Valid result, insert into queue
-            List<CaptureResult>::iterator queuedResult =
-                    mResultQueue.insert(mResultQueue.end(), CaptureResult(captureResult));
-            ALOGVV("%s: result requestId = %" PRId32 ", frameNumber = %" PRId64
-                   ", burstId = %" PRId32, __FUNCTION__,
-                   queuedResult->mResultExtras.requestId,
-                   queuedResult->mResultExtras.frameNumber,
-                   queuedResult->mResultExtras.burstId);
-        }
-    } // scope for mOutputLock
-
-    // Return completed buffers to their streams with the timestamp
-
-    for (size_t i = 0; i < result->num_output_buffers; i++) {
-        Camera3Stream *stream =
-                Camera3Stream::cast(result->output_buffers[i].stream);
-        res = stream->returnBuffer(result->output_buffers[i], timestamp);
-        // Note: stream may be deallocated at this point, if this buffer was the
-        // last reference to it.
-        if (res != OK) {
-            ALOGE("Can't return buffer %zu for frame %d to its stream: "
-                    " %s (%d)", i, frameNumber, strerror(-res), res);
-        }
-    }
+        removeInFlightRequestIfReadyLocked(idx);
+    } // scope for mInFlightLock
 
     if (result->input_buffer != NULL) {
         if (hasInputBufferInRequest) {
@@ -2142,13 +2196,6 @@
                     __FUNCTION__);
         }
     }
-
-    // Finally, signal any waiters for new frames
-
-    if (gotResult) {
-        mResultSignal.signal();
-    }
-
 }
 
 void Camera3Device::notify(const camera3_notify_msg *msg) {
@@ -2266,8 +2313,6 @@
         mNextShutterFrameNumber = msg.frame_number + 1;
     }
 
-    CaptureResultExtras resultExtras;
-
     // Set timestamp for the request in the in-flight tracking
     // and get the request ID to send upstream
     {
@@ -2275,21 +2320,30 @@
         idx = mInFlightMap.indexOfKey(msg.frame_number);
         if (idx >= 0) {
             InFlightRequest &r = mInFlightMap.editValueAt(idx);
-            r.captureTimestamp = msg.timestamp;
-            resultExtras = r.resultExtras;
+
+            ALOGVV("Camera %d: %s: Shutter fired for frame %d (id %d) at %" PRId64,
+                    mId, __FUNCTION__,
+                    msg.frame_number, r.resultExtras.requestId, msg.timestamp);
+            // Call listener, if any
+            if (listener != NULL) {
+                listener->notifyShutter(r.resultExtras, msg.timestamp);
+            }
+
+            r.shutterTimestamp = msg.timestamp;
+
+            // send pending result and buffers
+            sendCaptureResult(r.pendingMetadata, r.resultExtras,
+                r.partialResult.collectedResult, msg.frame_number);
+            returnOutputBuffers(r.pendingOutputBuffers.array(),
+                r.pendingOutputBuffers.size(), r.shutterTimestamp);
+            r.pendingOutputBuffers.clear();
+
+            removeInFlightRequestIfReadyLocked(idx);
         }
     }
     if (idx < 0) {
         SET_ERR("Shutter notification for non-existent frame number %d",
                 msg.frame_number);
-        return;
-    }
-    ALOGVV("Camera %d: %s: Shutter fired for frame %d (id %d) at %" PRId64,
-            mId, __FUNCTION__,
-            msg.frame_number, resultExtras.requestId, msg.timestamp);
-    // Call listener, if any
-    if (listener != NULL) {
-        listener->notifyShutter(resultExtras, msg.timestamp);
     }
 }
 
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index ec6bba1..ec8dc10 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -521,7 +521,9 @@
 
     struct InFlightRequest {
         // Set by notify() SHUTTER call.
-        nsecs_t captureTimestamp;
+        nsecs_t shutterTimestamp;
+        // Set by process_capture_result().
+        nsecs_t sensorTimestamp;
         int     requestStatus;
         // Set by process_capture_result call with valid metadata
         bool    haveResultMetadata;
@@ -532,6 +534,21 @@
         // If this request has any input buffer
         bool hasInputBuffer;
 
+
+        // The last metadata that framework receives from HAL and
+        // not yet send out because the shutter event hasn't arrived.
+        // It's added by process_capture_result and sent when framework
+        // receives the shutter event.
+        CameraMetadata pendingMetadata;
+
+        // Buffers are added by process_capture_result when output buffers
+        // return from HAL but framework has not yet received the shutter
+        // event. They will be returned to the streams when framework receives
+        // the shutter event.
+        Vector<camera3_stream_buffer_t> pendingOutputBuffers;
+
+
+
         // Fields used by the partial result only
         struct PartialResultInFlight {
             // Set by process_capture_result once 3A has been sent to clients
@@ -546,7 +563,8 @@
 
         // Default constructor needed by KeyedVector
         InFlightRequest() :
-                captureTimestamp(0),
+                shutterTimestamp(0),
+                sensorTimestamp(0),
                 requestStatus(OK),
                 haveResultMetadata(false),
                 numBuffersLeft(0),
@@ -554,7 +572,8 @@
         }
 
         InFlightRequest(int numBuffers) :
-                captureTimestamp(0),
+                shutterTimestamp(0),
+                sensorTimestamp(0),
                 requestStatus(OK),
                 haveResultMetadata(false),
                 numBuffersLeft(numBuffers),
@@ -562,7 +581,8 @@
         }
 
         InFlightRequest(int numBuffers, CaptureResultExtras extras) :
-                captureTimestamp(0),
+                shutterTimestamp(0),
+                sensorTimestamp(0),
                 requestStatus(OK),
                 haveResultMetadata(false),
                 numBuffersLeft(numBuffers),
@@ -571,7 +591,8 @@
         }
 
         InFlightRequest(int numBuffers, CaptureResultExtras extras, bool hasInput) :
-                captureTimestamp(0),
+                shutterTimestamp(0),
+                sensorTimestamp(0),
                 requestStatus(OK),
                 haveResultMetadata(false),
                 numBuffersLeft(numBuffers),
@@ -639,6 +660,24 @@
     void notifyShutter(const camera3_shutter_msg_t &msg,
             NotificationListener *listener);
 
+    // helper function to return the output buffers to the streams.
+    void returnOutputBuffers(const camera3_stream_buffer_t *outputBuffers,
+            size_t numBuffers, nsecs_t timestamp);
+
+    // Insert the capture result given the pending metadata, result extras,
+    // partial results, and the frame number to the result queue.
+    void sendCaptureResult(CameraMetadata &pendingMetadata,
+            CaptureResultExtras &resultExtras,
+            CameraMetadata &collectedPartialResult, uint32_t frameNumber);
+
+    /**** Scope for mInFlightLock ****/
+
+    // Remove the in-flight request of the given index from mInFlightMap
+    // if it's no longer needed. It must only be called with mInFlightLock held.
+    void removeInFlightRequestIfReadyLocked(int idx);
+
+    /**** End scope for mInFlightLock ****/
+
     /**
      * Static callback forwarding methods from HAL to instance
      */