Merge "Fix missing check of hidl return status"
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index d35a52b..a81fe8c 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -6998,19 +6998,24 @@
      * <li>ACAMERA_LENS_RADIAL_DISTORTION</li>
      * </ul>
      * </li>
+     * <li>The SENSOR_INFO_TIMESTAMP_SOURCE of the logical device and physical devices must be
+     *   the same.</li>
      * <li>The logical camera device must be LIMITED or higher device.</li>
      * </ul>
      * <p>Both the logical camera device and its underlying physical devices support the
      * mandatory stream combinations required for their device levels.</p>
      * <p>Additionally, for each guaranteed stream combination, the logical camera supports:</p>
      * <ul>
-     * <li>Replacing one logical {@link AIMAGE_FORMAT_YUV_420_888 YUV_420_888}
+     * <li>For each guaranteed stream combination, the logical camera supports replacing one
+     *   logical {@link AIMAGE_FORMAT_YUV_420_888 YUV_420_888}
      *   or raw stream with two physical streams of the same size and format, each from a
      *   separate physical camera, given that the size and format are supported by both
      *   physical cameras.</li>
-     * <li>Adding two raw streams, each from one physical camera, if the logical camera doesn't
-     *   advertise RAW capability, but the underlying physical cameras do. This is usually
-     *   the case when the physical cameras have different sensor sizes.</li>
+     * <li>If the logical camera doesn't advertise RAW capability, but the underlying physical
+     *   cameras do, the logical camera will support guaranteed stream combinations for RAW
+     *   capability, except that the RAW streams will be physical streams, each from a separate
+     *   physical camera. This is usually the case when the physical cameras have different
+     *   sensor sizes.</li>
      * </ul>
      * <p>Using physical streams in place of a logical stream of the same size and format will
      * not slow down the frame rate of the capture, as long as the minimum frame duration
diff --git a/cmds/stagefright/SineSource.cpp b/cmds/stagefright/SineSource.cpp
index cad8caf..9e40a0f 100644
--- a/cmds/stagefright/SineSource.cpp
+++ b/cmds/stagefright/SineSource.cpp
@@ -4,6 +4,7 @@
 
 #include <media/stagefright/MediaBufferGroup.h>
 #include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MetaData.h>
 
@@ -59,10 +60,10 @@
 }
 
 status_t SineSource::read(
-        MediaBuffer **out, const ReadOptions * /* options */) {
+        MediaBufferBase **out, const ReadOptions * /* options */) {
     *out = NULL;
 
-    MediaBuffer *buffer;
+    MediaBufferBase *buffer;
     status_t err = mGroup->acquire_buffer(&buffer);
 
     if (err != OK) {
diff --git a/cmds/stagefright/SineSource.h b/cmds/stagefright/SineSource.h
index f1fb96d..1817291 100644
--- a/cmds/stagefright/SineSource.h
+++ b/cmds/stagefright/SineSource.h
@@ -18,7 +18,7 @@
     virtual sp<MetaData> getFormat();
 
     virtual status_t read(
-            MediaBuffer **out, const ReadOptions *options = NULL);
+            MediaBufferBase **out, const ReadOptions *options = NULL);
 
 protected:
     virtual ~SineSource();
diff --git a/cmds/stagefright/audioloop.cpp b/cmds/stagefright/audioloop.cpp
index fc24646..d4f2e8d 100644
--- a/cmds/stagefright/audioloop.cpp
+++ b/cmds/stagefright/audioloop.cpp
@@ -169,7 +169,7 @@
 ALOGI("Line: %d", __LINE__);
         } else {
             CHECK_EQ(decoder->start(), (status_t)OK);
-            MediaBuffer* buffer;
+            MediaBufferBase* buffer;
             while (decoder->read(&buffer) == OK) {
                 // do something with buffer (save it eventually?)
                 // need to stop after some count though...
diff --git a/cmds/stagefright/record.cpp b/cmds/stagefright/record.cpp
index 073ee6b..44b0015 100644
--- a/cmds/stagefright/record.cpp
+++ b/cmds/stagefright/record.cpp
@@ -328,7 +328,7 @@
 
     int32_t n = 0;
     status_t err;
-    MediaBuffer *buffer;
+    MediaBufferBase *buffer;
     while ((err = encoder->read(&buffer)) == OK) {
         printf(".");
         fflush(stdout);
diff --git a/cmds/stagefright/recordvideo.cpp b/cmds/stagefright/recordvideo.cpp
index af39d46..b7a5066 100644
--- a/cmds/stagefright/recordvideo.cpp
+++ b/cmds/stagefright/recordvideo.cpp
@@ -90,7 +90,7 @@
     }
 
     virtual status_t read(
-            MediaBuffer **buffer, const MediaSource::ReadOptions *options __unused) {
+            MediaBufferBase **buffer, const MediaSource::ReadOptions *options __unused) {
 
         if (mNumFramesOutput % 10 == 0) {
             fprintf(stderr, ".");
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index bb517aa..5fa8304 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -150,7 +150,7 @@
 
     status_t err;
     for (;;) {
-        MediaBuffer *mbuf;
+        MediaBufferBase *mbuf;
         err = source->read(&mbuf);
 
         if (err == INFO_FORMAT_CHANGED) {
@@ -234,7 +234,7 @@
         CHECK(meta->findInt64(kKeyDuration, &durationUs));
 
         status_t err;
-        MediaBuffer *buffer;
+        MediaBufferBase *buffer;
         MediaSource::ReadOptions options;
         int64_t seekTimeUs = -1;
         for (;;) {
@@ -321,7 +321,7 @@
     while (numIterationsLeft-- > 0) {
         long numFrames = 0;
 
-        MediaBuffer *buffer;
+        MediaBufferBase *buffer;
 
         for (;;) {
             int64_t startDecodeUs = getNowUs();
@@ -416,7 +416,7 @@
     virtual sp<MetaData> getFormat();
 
     virtual status_t read(
-            MediaBuffer **buffer, const ReadOptions *options);
+            MediaBufferBase **buffer, const ReadOptions *options);
 
 private:
     enum StreamType {
@@ -465,7 +465,7 @@
     return mSource->getFormat();
 }
 
-static bool isIDRFrame(MediaBuffer *buffer) {
+static bool isIDRFrame(MediaBufferBase *buffer) {
     const uint8_t *data =
         (const uint8_t *)buffer->data() + buffer->range_offset();
     size_t size = buffer->range_length();
@@ -482,7 +482,7 @@
 }
 
 status_t DetectSyncSource::read(
-        MediaBuffer **buffer, const ReadOptions *options) {
+        MediaBufferBase **buffer, const ReadOptions *options) {
     for (;;) {
         status_t err = mSource->read(buffer, options);
 
@@ -562,7 +562,7 @@
         options.setSeekTo(
                 seekTimeUs, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
 
-        MediaBuffer *buffer;
+        MediaBufferBase *buffer;
         status_t err;
         for (;;) {
             err = source->read(&buffer, &options);
diff --git a/drm/libmediadrm/DrmHal.cpp b/drm/libmediadrm/DrmHal.cpp
index 36f40cd..068a52b 100644
--- a/drm/libmediadrm/DrmHal.cpp
+++ b/drm/libmediadrm/DrmHal.cpp
@@ -100,7 +100,7 @@
     return hidl_string(string.string());
 }
 
-std::string ToHexString(const std::string& str) {
+std::string toHexString(const std::string& str) {
   std::ostringstream out;
   out << std::hex << std::setfill('0');
   for (size_t i = 0; i < str.size(); i++) {
@@ -1292,13 +1292,13 @@
     String8 description;
     status_t result = getPropertyStringInternal(String8("vendor"), vendor);
     if (result != OK) {
-        ALOGE("Failed to get vendor from drm plugin. %d", result);
+        ALOGE("Failed to get vendor from drm plugin: %d", result);
     } else {
         item.setCString("vendor", vendor.c_str());
     }
     result = getPropertyStringInternal(String8("description"), description);
     if (result != OK) {
-        ALOGE("Failed to get description from drm plugin. %d", result);
+        ALOGE("Failed to get description from drm plugin: %d", result);
     } else {
         item.setCString("description", description.c_str());
     }
@@ -1306,14 +1306,14 @@
     std::string serializedMetrics;
     result = mMetrics.GetSerializedMetrics(&serializedMetrics);
     if (result != OK) {
-        ALOGE("Failed to serialize Framework metrics: %d", result);
+        ALOGE("Failed to serialize framework metrics: %d", result);
     }
-    serializedMetrics = ToHexString(serializedMetrics);
+    serializedMetrics = toHexString(serializedMetrics);
     if (!serializedMetrics.empty()) {
         item.setCString("serialized_metrics", serializedMetrics.c_str());
     }
     if (!item.selfrecord()) {
-        ALOGE("Failed to self record framework metrics.");
+        ALOGE("Failed to self record framework metrics");
     }
 }
 
@@ -1328,7 +1328,7 @@
         status_t res = android::reportDrmPluginMetrics(
                 metrics, vendor, description);
         if (res != OK) {
-            ALOGE("Metrics were retrieved but could not be reported: %i", res);
+            ALOGE("Metrics were retrieved but could not be reported: %d", res);
         }
     }
 }
diff --git a/media/extractors/aac/AACExtractor.cpp b/media/extractors/aac/AACExtractor.cpp
index 9df0aaa..f6c8664 100644
--- a/media/extractors/aac/AACExtractor.cpp
+++ b/media/extractors/aac/AACExtractor.cpp
@@ -46,7 +46,7 @@
     virtual sp<MetaData> getFormat();
 
     virtual status_t read(
-            MediaBuffer **buffer, const ReadOptions *options = NULL);
+            MediaBufferBase **buffer, const ReadOptions *options = NULL);
 
 protected:
     virtual ~AACSource();
@@ -259,7 +259,7 @@
 
     mCurrentTimeUs = 0;
     mGroup = new MediaBufferGroup;
-    mGroup->add_buffer(new MediaBuffer(kMaxFrameSize));
+    mGroup->add_buffer(MediaBufferBase::Create(kMaxFrameSize));
     mStarted = true;
 
     return OK;
@@ -280,7 +280,7 @@
 }
 
 status_t AACSource::read(
-        MediaBuffer **out, const ReadOptions *options) {
+        MediaBufferBase **out, const ReadOptions *options) {
     *out = NULL;
 
     int64_t seekTimeUs;
@@ -303,7 +303,7 @@
         return ERROR_END_OF_STREAM;
     }
 
-    MediaBuffer *buffer;
+    MediaBufferBase *buffer;
     status_t err = mGroup->acquire_buffer(&buffer);
     if (err != OK) {
         return err;
diff --git a/media/extractors/amr/AMRExtractor.cpp b/media/extractors/amr/AMRExtractor.cpp
index eec18b5..59d9ef1 100644
--- a/media/extractors/amr/AMRExtractor.cpp
+++ b/media/extractors/amr/AMRExtractor.cpp
@@ -45,7 +45,7 @@
     virtual sp<MetaData> getFormat();
 
     virtual status_t read(
-            MediaBuffer **buffer, const ReadOptions *options = NULL);
+            MediaBufferBase **buffer, const ReadOptions *options = NULL);
 
 protected:
     virtual ~AMRSource();
@@ -233,7 +233,7 @@
     mOffset = mIsWide ? 9 : 6;
     mCurrentTimeUs = 0;
     mGroup = new MediaBufferGroup;
-    mGroup->add_buffer(new MediaBuffer(128));
+    mGroup->add_buffer(MediaBufferBase::Create(128));
     mStarted = true;
 
     return OK;
@@ -254,7 +254,7 @@
 }
 
 status_t AMRSource::read(
-        MediaBuffer **out, const ReadOptions *options) {
+        MediaBufferBase **out, const ReadOptions *options) {
     *out = NULL;
 
     int64_t seekTimeUs;
@@ -303,7 +303,7 @@
         return ERROR_MALFORMED;
     }
 
-    MediaBuffer *buffer;
+    MediaBufferBase *buffer;
     status_t err = mGroup->acquire_buffer(&buffer);
     if (err != OK) {
         return err;
diff --git a/media/extractors/flac/FLACExtractor.cpp b/media/extractors/flac/FLACExtractor.cpp
index 2ce20db..2c5e43e 100644
--- a/media/extractors/flac/FLACExtractor.cpp
+++ b/media/extractors/flac/FLACExtractor.cpp
@@ -31,7 +31,7 @@
 #include <media/stagefright/MediaBufferGroup.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MetaData.h>
-#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaBufferBase.h>
 
 namespace android {
 
@@ -173,7 +173,7 @@
     virtual sp<MetaData> getFormat();
 
     virtual status_t read(
-            MediaBuffer **buffer, const ReadOptions *options = NULL);
+            MediaBufferBase **buffer, const ReadOptions *options = NULL);
 
 protected:
     virtual ~FLACSource();
@@ -232,10 +232,10 @@
     // media buffers
     void allocateBuffers();
     void releaseBuffers();
-    MediaBuffer *readBuffer() {
+    MediaBufferBase *readBuffer() {
         return readBuffer(false, 0LL);
     }
-    MediaBuffer *readBuffer(FLAC__uint64 sample) {
+    MediaBufferBase *readBuffer(FLAC__uint64 sample) {
         return readBuffer(true, sample);
     }
 
@@ -274,7 +274,7 @@
     FLAC__StreamDecoderErrorStatus mErrorStatus;
 
     status_t init();
-    MediaBuffer *readBuffer(bool doSeek, FLAC__uint64 sample);
+    MediaBufferBase *readBuffer(bool doSeek, FLAC__uint64 sample);
 
     // no copy constructor or assignment
     FLACParser(const FLACParser &);
@@ -763,7 +763,7 @@
     CHECK(mGroup == NULL);
     mGroup = new MediaBufferGroup;
     mMaxBufferSize = getMaxBlockSize() * getChannels() * sizeof(short);
-    mGroup->add_buffer(new MediaBuffer(mMaxBufferSize));
+    mGroup->add_buffer(MediaBufferBase::Create(mMaxBufferSize));
 }
 
 void FLACParser::releaseBuffers()
@@ -773,7 +773,7 @@
     mGroup = NULL;
 }
 
-MediaBuffer *FLACParser::readBuffer(bool doSeek, FLAC__uint64 sample)
+MediaBufferBase *FLACParser::readBuffer(bool doSeek, FLAC__uint64 sample)
 {
     mWriteRequested = true;
     mWriteCompleted = false;
@@ -810,7 +810,7 @@
     }
     // acquire a media buffer
     CHECK(mGroup != NULL);
-    MediaBuffer *buffer;
+    MediaBufferBase *buffer;
     status_t err = mGroup->acquire_buffer(&buffer);
     if (err != OK) {
         return NULL;
@@ -881,9 +881,9 @@
 }
 
 status_t FLACSource::read(
-        MediaBuffer **outBuffer, const ReadOptions *options)
+        MediaBufferBase **outBuffer, const ReadOptions *options)
 {
-    MediaBuffer *buffer;
+    MediaBufferBase *buffer;
     // process an optional seek request
     int64_t seekTimeUs;
     ReadOptions::SeekMode mode;
diff --git a/media/extractors/midi/MidiExtractor.cpp b/media/extractors/midi/MidiExtractor.cpp
index 1e38194..cf446db 100644
--- a/media/extractors/midi/MidiExtractor.cpp
+++ b/media/extractors/midi/MidiExtractor.cpp
@@ -30,7 +30,7 @@
 
 namespace android {
 
-// how many Sonivox output buffers to aggregate into one MediaBuffer
+// how many Sonivox output buffers to aggregate into one MediaBufferBase
 static const int NUM_COMBINE_BUFFERS = 4;
 
 class MidiSource : public MediaSourceBase {
@@ -45,7 +45,7 @@
     virtual sp<MetaData> getFormat();
 
     virtual status_t read(
-            MediaBuffer **buffer, const ReadOptions *options = NULL);
+            MediaBufferBase **buffer, const ReadOptions *options = NULL);
 
 protected:
     virtual ~MidiSource();
@@ -114,10 +114,10 @@
 }
 
 status_t MidiSource::read(
-        MediaBuffer **outBuffer, const ReadOptions *options)
+        MediaBufferBase **outBuffer, const ReadOptions *options)
 {
     ALOGV("MidiSource::read");
-    MediaBuffer *buffer;
+    MediaBufferBase *buffer;
     // process an optional seek request
     int64_t seekTimeUs;
     ReadOptions::SeekMode mode;
@@ -207,7 +207,7 @@
     int bufsize = sizeof(EAS_PCM)
             * mEasConfig->mixBufferSize * mEasConfig->numChannels * NUM_COMBINE_BUFFERS;
     ALOGV("using %d byte buffer", bufsize);
-    mGroup->add_buffer(new MediaBuffer(bufsize));
+    mGroup->add_buffer(MediaBufferBase::Create(bufsize));
     return OK;
 }
 
@@ -223,13 +223,13 @@
     return result == EAS_SUCCESS ? OK : UNKNOWN_ERROR;
 }
 
-MediaBuffer* MidiEngine::readBuffer() {
+MediaBufferBase* MidiEngine::readBuffer() {
     EAS_STATE state;
     EAS_State(mEasData, mEasHandle, &state);
     if ((state == EAS_STATE_STOPPED) || (state == EAS_STATE_ERROR)) {
         return NULL;
     }
-    MediaBuffer *buffer;
+    MediaBufferBase *buffer;
     status_t err = mGroup->acquire_buffer(&buffer);
     if (err != OK) {
         ALOGE("readBuffer: no buffer");
diff --git a/media/extractors/midi/MidiExtractor.h b/media/extractors/midi/MidiExtractor.h
index 173a814..4274513 100644
--- a/media/extractors/midi/MidiExtractor.h
+++ b/media/extractors/midi/MidiExtractor.h
@@ -19,7 +19,7 @@
 
 #include <media/DataSourceBase.h>
 #include <media/MediaExtractor.h>
-#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaBufferBase.h>
 #include <media/stagefright/MediaBufferGroup.h>
 #include <media/MidiIoWrapper.h>
 #include <utils/String8.h>
@@ -39,7 +39,7 @@
     status_t allocateBuffers();
     status_t releaseBuffers();
     status_t seekTo(int64_t positionUs);
-    MediaBuffer* readBuffer();
+    MediaBufferBase* readBuffer();
 private:
     sp<MidiIoWrapper> mIoWrapper;
     MediaBufferGroup *mGroup;
diff --git a/media/extractors/mkv/MatroskaExtractor.cpp b/media/extractors/mkv/MatroskaExtractor.cpp
index 7396113..65988d3 100644
--- a/media/extractors/mkv/MatroskaExtractor.cpp
+++ b/media/extractors/mkv/MatroskaExtractor.cpp
@@ -29,7 +29,7 @@
 #include <media/stagefright/foundation/ByteUtils.h>
 #include <media/stagefright/foundation/ColorUtils.h>
 #include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaBufferBase.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/MetaData.h>
@@ -130,7 +130,7 @@
     virtual sp<MetaData> getFormat();
 
     virtual status_t read(
-            MediaBuffer **buffer, const ReadOptions *options);
+            MediaBufferBase **buffer, const ReadOptions *options);
 
 protected:
     virtual ~MatroskaSource();
@@ -150,11 +150,11 @@
     BlockIterator mBlockIter;
     ssize_t mNALSizeLen;  // for type AVC or HEVC
 
-    List<MediaBuffer *> mPendingFrames;
+    List<MediaBufferBase *> mPendingFrames;
 
     status_t advance();
 
-    status_t setWebmBlockCryptoInfo(MediaBuffer *mbuf);
+    status_t setWebmBlockCryptoInfo(MediaBufferBase *mbuf);
     status_t readBlock();
     void clearPendingFrames();
 
@@ -568,7 +568,7 @@
 
 void MatroskaSource::clearPendingFrames() {
     while (!mPendingFrames.empty()) {
-        MediaBuffer *frame = *mPendingFrames.begin();
+        MediaBufferBase *frame = *mPendingFrames.begin();
         mPendingFrames.erase(mPendingFrames.begin());
 
         frame->release();
@@ -576,7 +576,7 @@
     }
 }
 
-status_t MatroskaSource::setWebmBlockCryptoInfo(MediaBuffer *mbuf) {
+status_t MatroskaSource::setWebmBlockCryptoInfo(MediaBufferBase *mbuf) {
     if (mbuf->range_length() < 1 || mbuf->range_length() - 1 > INT32_MAX) {
         // 1-byte signal
         return ERROR_MALFORMED;
@@ -662,7 +662,7 @@
         }
 
         len += trackInfo->mHeaderLen;
-        MediaBuffer *mbuf = new MediaBuffer(len);
+        MediaBufferBase *mbuf = MediaBufferBase::Create(len);
         uint8_t *data = static_cast<uint8_t *>(mbuf->data());
         if (trackInfo->mHeader) {
             memcpy(data, trackInfo->mHeader, trackInfo->mHeaderLen);
@@ -695,7 +695,7 @@
 }
 
 status_t MatroskaSource::read(
-        MediaBuffer **out, const ReadOptions *options) {
+        MediaBufferBase **out, const ReadOptions *options) {
     *out = NULL;
 
     int64_t targetSampleTimeUs = -1ll;
@@ -731,7 +731,7 @@
         }
     }
 
-    MediaBuffer *frame = *mPendingFrames.begin();
+    MediaBufferBase *frame = *mPendingFrames.begin();
     mPendingFrames.erase(mPendingFrames.begin());
 
     if ((mType != AVC && mType != HEVC) || mNALSizeLen == 0) {
@@ -760,7 +760,7 @@
     size_t srcSize = frame->range_length();
 
     size_t dstSize = 0;
-    MediaBuffer *buffer = NULL;
+    MediaBufferBase *buffer = NULL;
     uint8_t *dstPtr = NULL;
 
     for (int32_t pass = 0; pass < 2; ++pass) {
@@ -820,7 +820,7 @@
                 // each 4-byte nal size with a 4-byte start code
                 buffer = frame;
             } else {
-                buffer = new MediaBuffer(dstSize);
+                buffer = MediaBufferBase::Create(dstSize);
             }
 
             int64_t timeUs;
diff --git a/media/extractors/mp3/MP3Extractor.cpp b/media/extractors/mp3/MP3Extractor.cpp
index e55084e..90ee653 100644
--- a/media/extractors/mp3/MP3Extractor.cpp
+++ b/media/extractors/mp3/MP3Extractor.cpp
@@ -30,7 +30,7 @@
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/foundation/avc_utils.h>
 #include <media/stagefright/foundation/ByteUtils.h>
-#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaBufferBase.h>
 #include <media/stagefright/MediaBufferGroup.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
@@ -222,7 +222,7 @@
     virtual sp<MetaData> getFormat();
 
     virtual status_t read(
-            MediaBuffer **buffer, const ReadOptions *options = NULL);
+            MediaBufferBase **buffer, const ReadOptions *options = NULL);
 
 protected:
     virtual ~MP3Source();
@@ -463,7 +463,7 @@
 
     mGroup = new MediaBufferGroup;
 
-    mGroup->add_buffer(new MediaBuffer(kMaxFrameSize));
+    mGroup->add_buffer(MediaBufferBase::Create(kMaxFrameSize));
 
     mCurrentPos = mFirstFramePos;
     mCurrentTimeUs = 0;
@@ -492,7 +492,7 @@
 }
 
 status_t MP3Source::read(
-        MediaBuffer **out, const ReadOptions *options) {
+        MediaBufferBase **out, const ReadOptions *options) {
     *out = NULL;
 
     int64_t seekTimeUs;
@@ -522,7 +522,7 @@
         mSamplesRead = 0;
     }
 
-    MediaBuffer *buffer;
+    MediaBufferBase *buffer;
     status_t err = mGroup->acquire_buffer(&buffer);
     if (err != OK) {
         return err;
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
index 3d7e45c..40c84a5 100644
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -41,7 +41,7 @@
 #include <media/stagefright/foundation/ColorUtils.h>
 #include <media/stagefright/foundation/avc_utils.h>
 #include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaBufferBase.h>
 #include <media/stagefright/MediaBufferGroup.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MetaData.h>
@@ -83,9 +83,9 @@
 
     virtual sp<MetaData> getFormat();
 
-    virtual status_t read(MediaBuffer **buffer, const ReadOptions *options = NULL);
+    virtual status_t read(MediaBufferBase **buffer, const ReadOptions *options = NULL);
     virtual bool supportNonblockingRead() { return true; }
-    virtual status_t fragmentedRead(MediaBuffer **buffer, const ReadOptions *options = NULL);
+    virtual status_t fragmentedRead(MediaBufferBase **buffer, const ReadOptions *options = NULL);
 
     virtual ~MPEG4Source();
 
@@ -128,7 +128,7 @@
 
     MediaBufferGroup *mGroup;
 
-    MediaBuffer *mBuffer;
+    MediaBufferBase *mBuffer;
 
     bool mWantsNALFragments;
 
@@ -354,9 +354,7 @@
       mPreferHeif(mime != NULL && !strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_HEIF)),
       mFirstTrack(NULL),
       mLastTrack(NULL),
-      mFileMetaData(new MetaData),
-      mFirstSINF(NULL),
-      mIsDrm(false) {
+      mFileMetaData(new MetaData) {
     ALOGV("mime=%s, mPreferHeif=%d", mime, mPreferHeif);
 }
 
@@ -370,15 +368,6 @@
     }
     mFirstTrack = mLastTrack = NULL;
 
-    SINF *sinf = mFirstSINF;
-    while (sinf) {
-        SINF *next = sinf->next;
-        delete[] sinf->IPMPData;
-        delete sinf;
-        sinf = next;
-    }
-    mFirstSINF = NULL;
-
     for (size_t i = 0; i < mPssh.size(); i++) {
         delete [] mPssh[i].data;
     }
@@ -672,177 +661,6 @@
     return mInitCheck;
 }
 
-char* MPEG4Extractor::getDrmTrackInfo(size_t trackID, int *len) {
-    if (mFirstSINF == NULL) {
-        return NULL;
-    }
-
-    SINF *sinf = mFirstSINF;
-    while (sinf && (trackID != sinf->trackID)) {
-        sinf = sinf->next;
-    }
-
-    if (sinf == NULL) {
-        return NULL;
-    }
-
-    *len = sinf->len;
-    return sinf->IPMPData;
-}
-
-// Reads an encoded integer 7 bits at a time until it encounters the high bit clear.
-static int32_t readSize(off64_t offset,
-        DataSourceBase *DataSourceBase, uint8_t *numOfBytes) {
-    uint32_t size = 0;
-    uint8_t data;
-    bool moreData = true;
-    *numOfBytes = 0;
-
-    while (moreData) {
-        if (DataSourceBase->readAt(offset, &data, 1) < 1) {
-            return -1;
-        }
-        offset ++;
-        moreData = (data >= 128) ? true : false;
-        size = (size << 7) | (data & 0x7f); // Take last 7 bits
-        (*numOfBytes) ++;
-    }
-
-    return size;
-}
-
-status_t MPEG4Extractor::parseDrmSINF(
-        off64_t * /* offset */, off64_t data_offset) {
-    uint8_t updateIdTag;
-    if (mDataSource->readAt(data_offset, &updateIdTag, 1) < 1) {
-        return ERROR_IO;
-    }
-    data_offset ++;
-
-    if (0x01/*OBJECT_DESCRIPTOR_UPDATE_ID_TAG*/ != updateIdTag) {
-        return ERROR_MALFORMED;
-    }
-
-    uint8_t numOfBytes;
-    int32_t size = readSize(data_offset, mDataSource, &numOfBytes);
-    if (size < 0) {
-        return ERROR_IO;
-    }
-    data_offset += numOfBytes;
-
-    while(size >= 11 ) {
-        uint8_t descriptorTag;
-        if (mDataSource->readAt(data_offset, &descriptorTag, 1) < 1) {
-            return ERROR_IO;
-        }
-        data_offset ++;
-
-        if (0x11/*OBJECT_DESCRIPTOR_ID_TAG*/ != descriptorTag) {
-            return ERROR_MALFORMED;
-        }
-
-        uint8_t buffer[8];
-        //ObjectDescriptorID and ObjectDescriptor url flag
-        if (mDataSource->readAt(data_offset, buffer, 2) < 2) {
-            return ERROR_IO;
-        }
-        data_offset += 2;
-
-        if ((buffer[1] >> 5) & 0x0001) { //url flag is set
-            return ERROR_MALFORMED;
-        }
-
-        if (mDataSource->readAt(data_offset, buffer, 8) < 8) {
-            return ERROR_IO;
-        }
-        data_offset += 8;
-
-        if ((0x0F/*ES_ID_REF_TAG*/ != buffer[1])
-                || ( 0x0A/*IPMP_DESCRIPTOR_POINTER_ID_TAG*/ != buffer[5])) {
-            return ERROR_MALFORMED;
-        }
-
-        SINF *sinf = new SINF;
-        sinf->trackID = U16_AT(&buffer[3]);
-        sinf->IPMPDescriptorID = buffer[7];
-        sinf->next = mFirstSINF;
-        mFirstSINF = sinf;
-
-        size -= (8 + 2 + 1);
-    }
-
-    if (size != 0) {
-        return ERROR_MALFORMED;
-    }
-
-    if (mDataSource->readAt(data_offset, &updateIdTag, 1) < 1) {
-        return ERROR_IO;
-    }
-    data_offset ++;
-
-    if(0x05/*IPMP_DESCRIPTOR_UPDATE_ID_TAG*/ != updateIdTag) {
-        return ERROR_MALFORMED;
-    }
-
-    size = readSize(data_offset, mDataSource, &numOfBytes);
-    if (size < 0) {
-        return ERROR_IO;
-    }
-    data_offset += numOfBytes;
-
-    while (size > 0) {
-        uint8_t tag;
-        int32_t dataLen;
-        if (mDataSource->readAt(data_offset, &tag, 1) < 1) {
-            return ERROR_IO;
-        }
-        data_offset ++;
-
-        if (0x0B/*IPMP_DESCRIPTOR_ID_TAG*/ == tag) {
-            uint8_t id;
-            dataLen = readSize(data_offset, mDataSource, &numOfBytes);
-            if (dataLen < 0) {
-                return ERROR_IO;
-            } else if (dataLen < 4) {
-                return ERROR_MALFORMED;
-            }
-            data_offset += numOfBytes;
-
-            if (mDataSource->readAt(data_offset, &id, 1) < 1) {
-                return ERROR_IO;
-            }
-            data_offset ++;
-
-            SINF *sinf = mFirstSINF;
-            while (sinf && (sinf->IPMPDescriptorID != id)) {
-                sinf = sinf->next;
-            }
-            if (sinf == NULL) {
-                return ERROR_MALFORMED;
-            }
-            sinf->len = dataLen - 3;
-            sinf->IPMPData = new (std::nothrow) char[sinf->len];
-            if (sinf->IPMPData == NULL) {
-                return ERROR_MALFORMED;
-            }
-            data_offset += 2;
-
-            if (mDataSource->readAt(data_offset, sinf->IPMPData, sinf->len) < sinf->len) {
-                return ERROR_IO;
-            }
-            data_offset += sinf->len;
-
-            size -= (dataLen + numOfBytes + 1);
-        }
-    }
-
-    if (size != 0) {
-        return ERROR_MALFORMED;
-    }
-
-    return UNKNOWN_ERROR;  // Return a dummy error.
-}
-
 struct PathAdder {
     PathAdder(Vector<uint32_t> *path, uint32_t chunkType)
         : mPath(path) {
@@ -1144,11 +962,7 @@
             } else if (chunk_type == FOURCC('m', 'o', 'o', 'v')) {
                 mInitCheck = OK;
 
-                if (!mIsDrm) {
-                    return UNKNOWN_ERROR;  // Return a dummy error.
-                } else {
-                    return OK;
-                }
+                return UNKNOWN_ERROR;  // Return a dummy error.
             }
             break;
         }
@@ -1596,7 +1410,7 @@
             }
 
             if (chunk_type != FOURCC('e', 'n', 'c', 'a')) {
-                // if the chunk type is enca, we'll get the type from the sinf/frma box later
+                // if the chunk type is enca, we'll get the type from the frma box later
                 mLastTrack->meta->setCString(kKeyMIMEType, FourCC2MIME(chunk_type));
                 AdjustChannelsAndRate(chunk_type, &num_channels, &sample_rate);
             }
@@ -1656,7 +1470,7 @@
                 return ERROR_MALFORMED;
 
             if (chunk_type != FOURCC('e', 'n', 'c', 'v')) {
-                // if the chunk type is encv, we'll get the type from the sinf/frma box later
+                // if the chunk type is encv, we'll get the type from the frma box later
                 mLastTrack->meta->setCString(kKeyMIMEType, FourCC2MIME(chunk_type));
             }
             mLastTrack->meta->setInt32(kKeyWidth, width);
@@ -2278,20 +2092,10 @@
 
         case FOURCC('m', 'd', 'a', 't'):
         {
-            ALOGV("mdat chunk, drm: %d", mIsDrm);
-
             mMdatFound = true;
 
-            if (!mIsDrm) {
-                *offset += chunk_size;
-                break;
-            }
-
-            if (chunk_size < 8) {
-                return ERROR_MALFORMED;
-            }
-
-            return parseDrmSINF(offset, data_offset);
+            *offset += chunk_size;
+            break;
         }
 
         case FOURCC('h', 'd', 'l', 'r'):
@@ -4681,7 +4485,7 @@
 }
 
 status_t MPEG4Source::read(
-        MediaBuffer **out, const ReadOptions *options) {
+        MediaBufferBase **out, const ReadOptions *options) {
     Mutex::Autolock autoLock(mLock);
 
     CHECK(mStarted);
@@ -4906,7 +4710,7 @@
             return ERROR_MALFORMED;
         }
 
-        MediaBuffer *clone = mBuffer->clone();
+        MediaBufferBase *clone = mBuffer->clone();
         CHECK(clone != NULL);
         clone->set_range(mBuffer->range_offset() + mNALLengthSize, nal_size);
 
@@ -5026,7 +4830,7 @@
 }
 
 status_t MPEG4Source::fragmentedRead(
-        MediaBuffer **out, const ReadOptions *options) {
+        MediaBufferBase **out, const ReadOptions *options) {
 
     ALOGV("MPEG4Source::fragmentedRead");
 
@@ -5230,7 +5034,7 @@
             return ERROR_MALFORMED;
         }
 
-        MediaBuffer *clone = mBuffer->clone();
+        MediaBufferBase *clone = mBuffer->clone();
         CHECK(clone != NULL);
         clone->set_range(mBuffer->range_offset() + mNALLengthSize, nal_size);
 
diff --git a/media/extractors/mp4/MPEG4Extractor.h b/media/extractors/mp4/MPEG4Extractor.h
index 644c430..5c86345 100644
--- a/media/extractors/mp4/MPEG4Extractor.h
+++ b/media/extractors/mp4/MPEG4Extractor.h
@@ -63,9 +63,6 @@
     virtual uint32_t flags() const;
     virtual const char * name() { return "MPEG4Extractor"; }
 
-    // for DRM
-    virtual char* getDrmTrackInfo(size_t trackID, int *len);
-
 protected:
     virtual ~MPEG4Extractor();
 
@@ -131,21 +128,8 @@
 
     static status_t verifyTrack(Track *track);
 
-    struct SINF {
-        SINF *next;
-        uint16_t trackID;
-        uint8_t IPMPDescriptorID;
-        ssize_t len;
-        char *IPMPData;
-    };
-
-    SINF *mFirstSINF;
-
-    bool mIsDrm;
     sp<ItemTable> mItemTable;
 
-    status_t parseDrmSINF(off64_t *offset, off64_t data_offset);
-
     status_t parseTrackHeader(off64_t data_offset, off64_t data_size);
 
     status_t parseSegmentIndex(off64_t data_offset, size_t data_size);
diff --git a/media/extractors/mpeg2/MPEG2PSExtractor.cpp b/media/extractors/mpeg2/MPEG2PSExtractor.cpp
index b4d0ee5..c2de6e7 100644
--- a/media/extractors/mpeg2/MPEG2PSExtractor.cpp
+++ b/media/extractors/mpeg2/MPEG2PSExtractor.cpp
@@ -49,7 +49,7 @@
     virtual sp<MetaData> getFormat();
 
     virtual status_t read(
-            MediaBuffer **buffer, const ReadOptions *options);
+            MediaBufferBase **buffer, const ReadOptions *options);
 
 protected:
     virtual ~Track();
@@ -80,7 +80,7 @@
     virtual sp<MetaData> getFormat();
 
     virtual status_t read(
-            MediaBuffer **buffer, const ReadOptions *options);
+            MediaBufferBase **buffer, const ReadOptions *options);
 
 protected:
     virtual ~WrappedTrack();
@@ -659,7 +659,7 @@
 }
 
 status_t MPEG2PSExtractor::Track::read(
-        MediaBuffer **buffer, const ReadOptions *options) {
+        MediaBufferBase **buffer, const ReadOptions *options) {
     if (mSource == NULL) {
         return NO_INIT;
     }
@@ -744,7 +744,7 @@
 }
 
 status_t MPEG2PSExtractor::WrappedTrack::read(
-        MediaBuffer **buffer, const ReadOptions *options) {
+        MediaBufferBase **buffer, const ReadOptions *options) {
     return mTrack->read(buffer, options);
 }
 
diff --git a/media/extractors/mpeg2/MPEG2TSExtractor.cpp b/media/extractors/mpeg2/MPEG2TSExtractor.cpp
index 3183064..7887a7c 100644
--- a/media/extractors/mpeg2/MPEG2TSExtractor.cpp
+++ b/media/extractors/mpeg2/MPEG2TSExtractor.cpp
@@ -61,7 +61,7 @@
     virtual sp<MetaData> getFormat();
 
     virtual status_t read(
-            MediaBuffer **buffer, const ReadOptions *options = NULL);
+            MediaBufferBase **buffer, const ReadOptions *options = NULL);
 
 private:
     MPEG2TSExtractor *mExtractor;
@@ -99,7 +99,7 @@
 }
 
 status_t MPEG2TSSource::read(
-        MediaBuffer **out, const ReadOptions *options) {
+        MediaBufferBase **out, const ReadOptions *options) {
     *out = NULL;
 
     int64_t seekTimeUs;
diff --git a/media/extractors/ogg/OggExtractor.cpp b/media/extractors/ogg/OggExtractor.cpp
index ab51e5e..6d7576f 100644
--- a/media/extractors/ogg/OggExtractor.cpp
+++ b/media/extractors/ogg/OggExtractor.cpp
@@ -27,7 +27,7 @@
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/base64.h>
 #include <media/stagefright/foundation/ByteUtils.h>
-#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaBufferBase.h>
 #include <media/stagefright/MediaBufferGroup.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
@@ -54,7 +54,7 @@
     virtual status_t stop();
 
     virtual status_t read(
-            MediaBuffer **buffer, const ReadOptions *options = NULL);
+            MediaBufferBase **buffer, const ReadOptions *options = NULL);
 
 protected:
     virtual ~OggSource();
@@ -82,7 +82,7 @@
 
     status_t seekToTime(int64_t timeUs);
     status_t seekToOffset(off64_t offset);
-    virtual status_t readNextPacket(MediaBuffer **buffer) = 0;
+    virtual status_t readNextPacket(MediaBufferBase **buffer) = 0;
 
     status_t init();
 
@@ -141,7 +141,7 @@
     // 1 - bitstream identification header
     // 3 - comment header
     // 5 - codec setup header (Vorbis only)
-    virtual status_t verifyHeader(MediaBuffer *buffer, uint8_t type) = 0;
+    virtual status_t verifyHeader(MediaBufferBase *buffer, uint8_t type) = 0;
 
     // Read the next ogg packet from the underlying data source; optionally
     // calculate the timestamp for the output packet whilst pretending
@@ -149,9 +149,9 @@
     //
     // *buffer is NULL'ed out immediately upon entry, and if successful a new buffer is allocated;
     // clients are responsible for releasing the original buffer.
-    status_t _readNextPacket(MediaBuffer **buffer, bool calcVorbisTimestamp);
+    status_t _readNextPacket(MediaBufferBase **buffer, bool calcVorbisTimestamp);
 
-    int32_t getPacketBlockSize(MediaBuffer *buffer);
+    int32_t getPacketBlockSize(MediaBufferBase *buffer);
 
     void parseFileMetaData();
 
@@ -173,7 +173,7 @@
 
     virtual uint64_t approxBitrate() const;
 
-    virtual status_t readNextPacket(MediaBuffer **buffer) {
+    virtual status_t readNextPacket(MediaBufferBase **buffer) {
         return _readNextPacket(buffer, /* calcVorbisTimestamp = */ true);
     }
 
@@ -185,7 +185,7 @@
         return granulePos * 1000000ll / mVi.rate;
     }
 
-    virtual status_t verifyHeader(MediaBuffer *buffer, uint8_t type);
+    virtual status_t verifyHeader(MediaBufferBase *buffer, uint8_t type);
 };
 
 struct MyOpusExtractor : public MyOggExtractor {
@@ -203,16 +203,16 @@
         return 0;
     }
 
-    virtual status_t readNextPacket(MediaBuffer **buffer);
+    virtual status_t readNextPacket(MediaBufferBase **buffer);
 
 protected:
     virtual int64_t getTimeUsOfGranule(uint64_t granulePos) const;
-    virtual status_t verifyHeader(MediaBuffer *buffer, uint8_t type);
+    virtual status_t verifyHeader(MediaBufferBase *buffer, uint8_t type);
 
 private:
-    status_t verifyOpusHeader(MediaBuffer *buffer);
-    status_t verifyOpusComments(MediaBuffer *buffer);
-    uint32_t getNumSamplesInPacket(MediaBuffer *buffer) const;
+    status_t verifyOpusHeader(MediaBufferBase *buffer);
+    status_t verifyOpusComments(MediaBufferBase *buffer);
+    uint32_t getNumSamplesInPacket(MediaBufferBase *buffer) const;
 
     uint8_t mChannelCount;
     uint16_t mCodecDelay;
@@ -256,7 +256,7 @@
 }
 
 status_t OggSource::read(
-        MediaBuffer **out, const ReadOptions *options) {
+        MediaBufferBase **out, const ReadOptions *options) {
     *out = NULL;
 
     int64_t seekTimeUs;
@@ -268,7 +268,7 @@
         }
     }
 
-    MediaBuffer *packet;
+    MediaBufferBase *packet;
     status_t err = mExtractor->mImpl->readNextPacket(&packet);
 
     if (err != OK) {
@@ -562,13 +562,13 @@
     return sizeof(header) + page->mNumSegments + totalSize;
 }
 
-status_t MyOpusExtractor::readNextPacket(MediaBuffer **out) {
+status_t MyOpusExtractor::readNextPacket(MediaBufferBase **out) {
     if (mOffset <= mFirstDataOffset && mStartGranulePosition < 0) {
         // The first sample might not start at time 0; find out where by subtracting
         // the number of samples on the first page from the granule position
         // (position of last complete sample) of the first page. This happens
         // the first time before we attempt to read a packet from the first page.
-        MediaBuffer *mBuf;
+        MediaBufferBase *mBuf;
         uint32_t numSamples = 0;
         uint64_t curGranulePosition = 0;
         while (true) {
@@ -623,7 +623,7 @@
     return OK;
 }
 
-uint32_t MyOpusExtractor::getNumSamplesInPacket(MediaBuffer *buffer) const {
+uint32_t MyOpusExtractor::getNumSamplesInPacket(MediaBufferBase *buffer) const {
     if (buffer == NULL || buffer->range_length() < 1) {
         return 0;
     }
@@ -669,10 +669,10 @@
     return numSamples;
 }
 
-status_t MyOggExtractor::_readNextPacket(MediaBuffer **out, bool calcVorbisTimestamp) {
+status_t MyOggExtractor::_readNextPacket(MediaBufferBase **out, bool calcVorbisTimestamp) {
     *out = NULL;
 
-    MediaBuffer *buffer = NULL;
+    MediaBufferBase *buffer = NULL;
     int64_t timeUs = -1;
 
     for (;;) {
@@ -708,7 +708,7 @@
                 ALOGE("b/36592202");
                 return ERROR_MALFORMED;
             }
-            MediaBuffer *tmp = new (std::nothrow) MediaBuffer(fullSize);
+            MediaBufferBase *tmp = MediaBufferBase::Create(fullSize);
             if (tmp == NULL) {
                 if (buffer != NULL) {
                     buffer->release();
@@ -833,7 +833,7 @@
     mMeta->setCString(kKeyMIMEType, mMimeType);
 
     status_t err;
-    MediaBuffer *packet;
+    MediaBufferBase *packet;
     for (size_t i = 0; i < mNumHeaders; ++i) {
         // ignore timestamp for configuration packets
         if ((err = _readNextPacket(&packet, /* calcVorbisTimestamp = */ false)) != OK) {
@@ -910,7 +910,7 @@
     }
 }
 
-int32_t MyOggExtractor::getPacketBlockSize(MediaBuffer *buffer) {
+int32_t MyOggExtractor::getPacketBlockSize(MediaBufferBase *buffer) {
     const uint8_t *data =
         (const uint8_t *)buffer->data() + buffer->range_offset();
 
@@ -950,7 +950,7 @@
     return pcmSamplePosition * 1000000ll / kOpusSampleRate;
 }
 
-status_t MyOpusExtractor::verifyHeader(MediaBuffer *buffer, uint8_t type) {
+status_t MyOpusExtractor::verifyHeader(MediaBufferBase *buffer, uint8_t type) {
     switch (type) {
         // there are actually no header types defined in the Opus spec; we choose 1 and 3 to mean
         // header and comments such that we can share code with MyVorbisExtractor.
@@ -963,7 +963,7 @@
     }
 }
 
-status_t MyOpusExtractor::verifyOpusHeader(MediaBuffer *buffer) {
+status_t MyOpusExtractor::verifyOpusHeader(MediaBufferBase *buffer) {
     const size_t kOpusHeaderSize = 19;
     const uint8_t *data =
         (const uint8_t *)buffer->data() + buffer->range_offset();
@@ -989,7 +989,7 @@
     return OK;
 }
 
-status_t MyOpusExtractor::verifyOpusComments(MediaBuffer *buffer) {
+status_t MyOpusExtractor::verifyOpusComments(MediaBufferBase *buffer) {
     // add artificial framing bit so we can reuse _vorbis_unpack_comment
     int32_t commentSize = buffer->range_length() + 1;
     sp<ABuffer> aBuf = new ABuffer(commentSize);
@@ -1081,7 +1081,7 @@
 }
 
 status_t MyVorbisExtractor::verifyHeader(
-        MediaBuffer *buffer, uint8_t type) {
+        MediaBufferBase *buffer, uint8_t type) {
     const uint8_t *data =
         (const uint8_t *)buffer->data() + buffer->range_offset();
 
diff --git a/media/extractors/wav/WAVExtractor.cpp b/media/extractors/wav/WAVExtractor.cpp
index 2c991a7..a18cee5 100644
--- a/media/extractors/wav/WAVExtractor.cpp
+++ b/media/extractors/wav/WAVExtractor.cpp
@@ -68,7 +68,7 @@
     virtual sp<MetaData> getFormat();
 
     virtual status_t read(
-            MediaBuffer **buffer, const ReadOptions *options = NULL);
+            MediaBufferBase **buffer, const ReadOptions *options = NULL);
 
     virtual bool supportNonblockingRead() { return true; }
 
@@ -385,7 +385,7 @@
 
     if (mBitsPerSample == 8) {
         // As a temporary buffer for 8->16 bit conversion.
-        mGroup->add_buffer(new MediaBuffer(kMaxFrameSize));
+        mGroup->add_buffer(MediaBufferBase::Create(kMaxFrameSize));
     }
 
     mCurrentPos = mOffset;
@@ -415,7 +415,7 @@
 }
 
 status_t WAVSource::read(
-        MediaBuffer **out, const ReadOptions *options) {
+        MediaBufferBase **out, const ReadOptions *options) {
     *out = NULL;
 
     if (options != nullptr && options->getNonBlocking() && !mGroup->has_buffers()) {
@@ -441,7 +441,7 @@
         mCurrentPos = pos + mOffset;
     }
 
-    MediaBuffer *buffer;
+    MediaBufferBase *buffer;
     status_t err = mGroup->acquire_buffer(&buffer);
     if (err != OK) {
         return err;
@@ -492,7 +492,7 @@
             // Convert 8-bit unsigned samples to 16-bit signed.
 
             // Create new buffer with 2 byte wide samples
-            MediaBuffer *tmp;
+            MediaBufferBase *tmp;
             CHECK_EQ(mGroup->acquire_buffer(&tmp), (status_t)OK);
             tmp->set_range(0, 2 * n);
 
diff --git a/media/libmedia/IMediaExtractor.cpp b/media/libmedia/IMediaExtractor.cpp
index 51ccb5a..e9a6230 100644
--- a/media/libmedia/IMediaExtractor.cpp
+++ b/media/libmedia/IMediaExtractor.cpp
@@ -35,9 +35,7 @@
     GETTRACKMETADATA,
     GETMETADATA,
     FLAGS,
-    GETDRMTRACKINFO,
     SETMEDIACAS,
-    SETUID,
     NAME,
     GETMETRICS
 };
@@ -112,11 +110,6 @@
         return 0;
     }
 
-    virtual char* getDrmTrackInfo(size_t trackID __unused, int *len __unused) {
-        ALOGV("getDrmTrackInfo NOT IMPLEMENTED");
-        return NULL;
-    }
-
     virtual status_t setMediaCas(const HInterfaceToken &casToken) {
         ALOGV("setMediaCas");
 
@@ -131,10 +124,6 @@
         return reply.readInt32();
     }
 
-    virtual void setUID(uid_t uid __unused) {
-        ALOGV("setUID NOT IMPLEMENTED");
-    }
-
     virtual const char * name() {
         ALOGV("name NOT IMPLEMENTED");
         return NULL;
diff --git a/media/libmedia/IMediaSource.cpp b/media/libmedia/IMediaSource.cpp
index 0d5127c..f6b9255 100644
--- a/media/libmedia/IMediaSource.cpp
+++ b/media/libmedia/IMediaSource.cpp
@@ -113,9 +113,9 @@
         return NULL;
     }
 
-    virtual status_t read(MediaBuffer **buffer,
+    virtual status_t read(MediaBufferBase **buffer,
             const MediaSource::ReadOptions *options) {
-        Vector<MediaBuffer *> buffers;
+        Vector<MediaBufferBase *> buffers;
         status_t ret = readMultiple(&buffers, 1 /* maxNumBuffers */, options);
         *buffer = buffers.size() == 0 ? nullptr : buffers[0];
         ALOGV("read status %d, bufferCount %u, sinceStop %u",
@@ -124,7 +124,7 @@
     }
 
     virtual status_t readMultiple(
-            Vector<MediaBuffer *> *buffers, uint32_t maxNumBuffers,
+            Vector<MediaBufferBase *> *buffers, uint32_t maxNumBuffers,
             const MediaSource::ReadOptions *options) {
         ALOGV("readMultiple");
         if (buffers == NULL || !buffers->isEmpty()) {
@@ -341,7 +341,7 @@
             uint32_t bufferCount = 0;
             for (; bufferCount < maxNumBuffers; ++bufferCount, ++mBuffersSinceStop) {
                 MediaBuffer *buf = nullptr;
-                ret = read(&buf, useOptions ? &opts : nullptr);
+                ret = read((MediaBufferBase **)&buf, useOptions ? &opts : nullptr);
                 opts.clearNonPersistent(); // Remove options that only apply to first buffer.
                 if (ret != NO_ERROR || buf == nullptr) {
                     break;
@@ -364,7 +364,7 @@
                     } else {
                         ALOGD("Large buffer %zu without IMemory!", length);
                         ret = mGroup->acquire_buffer(
-                                &transferBuf, false /* nonBlocking */, length);
+                                (MediaBufferBase **)&transferBuf, false /* nonBlocking */, length);
                         if (ret != OK
                                 || transferBuf == nullptr
                                 || transferBuf->mMemory == nullptr) {
diff --git a/media/libmedia/include/media/IMediaExtractor.h b/media/libmedia/include/media/IMediaExtractor.h
index 9899429..75e4ee2 100644
--- a/media/libmedia/include/media/IMediaExtractor.h
+++ b/media/libmedia/include/media/IMediaExtractor.h
@@ -60,13 +60,8 @@
     // CAN_SEEK_BACKWARD | CAN_SEEK_FORWARD | CAN_SEEK | CAN_PAUSE
     virtual uint32_t flags() const = 0;
 
-    // for DRM
-    virtual char* getDrmTrackInfo(size_t trackID, int *len)  = 0;
-
     virtual status_t setMediaCas(const HInterfaceToken &casToken) = 0;
 
-    virtual void setUID(uid_t uid)  = 0;
-
     virtual const char * name() = 0;
 };
 
diff --git a/media/libmedia/include/media/IMediaSource.h b/media/libmedia/include/media/IMediaSource.h
index dabe231..7a4b1b9 100644
--- a/media/libmedia/include/media/IMediaSource.h
+++ b/media/libmedia/include/media/IMediaSource.h
@@ -64,7 +64,7 @@
     //
     // TODO: consider removing read() in favor of readMultiple().
     virtual status_t read(
-            MediaBuffer **buffer,
+            MediaBufferBase **buffer,
             const MediaSource::ReadOptions *options = NULL) = 0;
 
     // Returns a vector of new buffers of data, where the new buffers are added
@@ -80,7 +80,7 @@
     // ReadOptions may be specified. Persistent options apply to all reads;
     // non-persistent options (e.g. seek) apply only to the first read.
     virtual status_t readMultiple(
-            Vector<MediaBuffer *> *buffers, uint32_t maxNumBuffers = 1,
+            Vector<MediaBufferBase *> *buffers, uint32_t maxNumBuffers = 1,
             const MediaSource::ReadOptions *options = nullptr) = 0;
 
     // Returns true if |readMultiple| is supported, otherwise false.
@@ -110,7 +110,7 @@
 
     // TODO: Implement this for local media sources.
     virtual status_t readMultiple(
-            Vector<MediaBuffer *> * /* buffers */, uint32_t /* maxNumBuffers = 1 */,
+            Vector<MediaBufferBase *> * /* buffers */, uint32_t /* maxNumBuffers = 1 */,
             const MediaSource::ReadOptions * /* options = nullptr */) {
         return ERROR_UNSUPPORTED;
     }
diff --git a/media/libmedia/include/media/MediaBufferHolder.h b/media/libmedia/include/media/MediaBufferHolder.h
index e8e2c4b..f9dfdf5 100644
--- a/media/libmedia/include/media/MediaBufferHolder.h
+++ b/media/libmedia/include/media/MediaBufferHolder.h
@@ -24,7 +24,7 @@
 namespace android {
 
 struct MediaBufferHolder : public RefBase {
-    MediaBufferHolder(MediaBuffer* buffer)
+    MediaBufferHolder(MediaBufferBase* buffer)
         : mMediaBuffer(buffer) {
         if (mMediaBuffer != nullptr) {
             mMediaBuffer->add_ref();
@@ -37,10 +37,10 @@
         }
     }
 
-    MediaBuffer* mediaBuffer() { return mMediaBuffer; }
+    MediaBufferBase* mediaBuffer() { return mMediaBuffer; }
 
 private:
-    MediaBuffer* const mMediaBuffer;
+    MediaBufferBase* const mMediaBuffer;
 };
 
 }  // android
diff --git a/media/libmediaextractor/Android.bp b/media/libmediaextractor/Android.bp
index 8f4ba70..79af058 100644
--- a/media/libmediaextractor/Android.bp
+++ b/media/libmediaextractor/Android.bp
@@ -25,6 +25,7 @@
     srcs: [
         "DataSourceBase.cpp",
         "MediaBuffer.cpp",
+        "MediaBufferBase.cpp",
         "MediaBufferGroup.cpp",
         "MediaSourceBase.cpp",
         "MediaSource.cpp",
diff --git a/media/libmediaextractor/MediaBuffer.cpp b/media/libmediaextractor/MediaBuffer.cpp
index 28fc760..dac3d50 100644
--- a/media/libmediaextractor/MediaBuffer.cpp
+++ b/media/libmediaextractor/MediaBuffer.cpp
@@ -177,7 +177,7 @@
     mObserver = observer;
 }
 
-MediaBuffer *MediaBuffer::clone() {
+MediaBufferBase *MediaBuffer::clone() {
     MediaBuffer *buffer = new MediaBuffer(mData, mSize);
     buffer->set_range(mRangeOffset, mRangeLength);
     buffer->mMetaData = new MetaData(*mMetaData.get());
diff --git a/media/libmediaextractor/MediaBufferBase.cpp b/media/libmediaextractor/MediaBufferBase.cpp
new file mode 100644
index 0000000..a553289
--- /dev/null
+++ b/media/libmediaextractor/MediaBufferBase.cpp
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MediaBufferBase"
+#include <utils/Log.h>
+
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaBufferBase.h>
+
+namespace android {
+
+//static
+MediaBufferBase *MediaBufferBase::Create(size_t size) {
+    return new (std::nothrow) MediaBuffer(size);
+}
+
+}  // android
diff --git a/media/libmediaextractor/MediaBufferGroup.cpp b/media/libmediaextractor/MediaBufferGroup.cpp
index 22f01a5..2a8dd41 100644
--- a/media/libmediaextractor/MediaBufferGroup.cpp
+++ b/media/libmediaextractor/MediaBufferGroup.cpp
@@ -40,7 +40,7 @@
     Mutex mLock;
     Condition mCondition;
     size_t mGrowthLimit;  // Do not automatically grow group larger than this.
-    std::list<MediaBuffer *> mBuffers;
+    std::list<MediaBufferBase *> mBuffers;
 };
 
 MediaBufferGroup::MediaBufferGroup(size_t growthLimit)
@@ -94,7 +94,7 @@
 }
 
 MediaBufferGroup::~MediaBufferGroup() {
-    for (MediaBuffer *buffer : mInternal->mBuffers) {
+    for (MediaBufferBase *buffer : mInternal->mBuffers) {
         if (buffer->refcount() != 0) {
             const int localRefcount = buffer->localRefcount();
             const int remoteRefcount = buffer->remoteRefcount();
@@ -119,7 +119,7 @@
     delete mInternal;
 }
 
-void MediaBufferGroup::add_buffer(MediaBuffer *buffer) {
+void MediaBufferGroup::add_buffer(MediaBufferBase *buffer) {
     Mutex::Autolock autoLock(mInternal->mLock);
 
     // if we're above our growth limit, release buffers if we can
@@ -144,7 +144,7 @@
     if (mInternal->mBuffers.size() < mInternal->mGrowthLimit) {
         return true; // We can add more buffers internally.
     }
-    for (MediaBuffer *buffer : mInternal->mBuffers) {
+    for (MediaBufferBase *buffer : mInternal->mBuffers) {
         if (buffer->refcount() == 0) {
             return true;
         }
@@ -153,11 +153,11 @@
 }
 
 status_t MediaBufferGroup::acquire_buffer(
-        MediaBuffer **out, bool nonBlocking, size_t requestedSize) {
+        MediaBufferBase **out, bool nonBlocking, size_t requestedSize) {
     Mutex::Autolock autoLock(mInternal->mLock);
     for (;;) {
         size_t smallest = requestedSize;
-        MediaBuffer *buffer = nullptr;
+        MediaBufferBase *buffer = nullptr;
         auto free = mInternal->mBuffers.end();
         for (auto it = mInternal->mBuffers.begin(); it != mInternal->mBuffers.end(); ++it) {
             if ((*it)->refcount() == 0) {
@@ -217,7 +217,7 @@
     return mInternal->mBuffers.size();
 }
 
-void MediaBufferGroup::signalBufferReturned(MediaBuffer *) {
+void MediaBufferGroup::signalBufferReturned(MediaBufferBase *) {
     Mutex::Autolock autoLock(mInternal->mLock);
     mInternal->mCondition.signal();
 }
diff --git a/media/libmediaextractor/include/media/DataSourceBase.h b/media/libmediaextractor/include/media/DataSourceBase.h
index f964137..8028dd7 100644
--- a/media/libmediaextractor/include/media/DataSourceBase.h
+++ b/media/libmediaextractor/include/media/DataSourceBase.h
@@ -58,20 +58,6 @@
     bool getUInt32Var(off64_t offset, uint32_t *x, size_t size);
     bool getUInt64Var(off64_t offset, uint64_t *x, size_t size);
 
-    // Reads in "count" entries of type T into vector *x.
-    // Returns true if "count" entries can be read.
-    // If fewer than "count" entries can be read, return false. In this case,
-    // the output vector *x will still have those entries that were read. Call
-    // x->size() to obtain the number of entries read.
-    // The optional parameter chunkSize specifies how many entries should be
-    // read from the data source at one time into a temporary buffer. Increasing
-    // chunkSize can improve the performance at the cost of extra memory usage.
-    // The default value for chunkSize is set to read at least 4k bytes at a
-    // time, depending on sizeof(T).
-    template <typename T>
-    bool getVector(off64_t offset, Vector<T>* x, size_t count,
-                   size_t chunkSize = (4095 / sizeof(T)) + 1);
-
     // May return ERROR_UNSUPPORTED.
     virtual status_t getSize(off64_t *size);
 
@@ -110,51 +96,6 @@
     DataSourceBase &operator=(const DataSourceBase &);
 };
 
-template <typename T>
-bool DataSourceBase::getVector(off64_t offset, Vector<T>* x, size_t count,
-                           size_t chunkSize)
-{
-    x->clear();
-    if (chunkSize == 0) {
-        return false;
-    }
-    if (count == 0) {
-        return true;
-    }
-
-    T tmp[chunkSize];
-    ssize_t numBytesRead;
-    size_t numBytesPerChunk = chunkSize * sizeof(T);
-    size_t i;
-
-    for (i = 0; i + chunkSize < count; i += chunkSize) {
-        // This loops is executed when more than chunkSize records need to be
-        // read.
-        numBytesRead = this->readAt(offset, (void*)&tmp, numBytesPerChunk);
-        if (numBytesRead == -1) { // If readAt() returns -1, there is an error.
-            return false;
-        }
-        if (static_cast<size_t>(numBytesRead) < numBytesPerChunk) {
-            // This case is triggered when the stream ends before the whole
-            // chunk is read.
-            x->appendArray(tmp, (size_t)numBytesRead / sizeof(T));
-            return false;
-        }
-        x->appendArray(tmp, chunkSize);
-        offset += numBytesPerChunk;
-    }
-
-    // There are (count - i) more records to read.
-    // Right now, (count - i) <= chunkSize.
-    // We do the same thing as above, but with chunkSize replaced by count - i.
-    numBytesRead = this->readAt(offset, (void*)&tmp, (count - i) * sizeof(T));
-    if (numBytesRead == -1) {
-        return false;
-    }
-    x->appendArray(tmp, (size_t)numBytesRead / sizeof(T));
-    return x->size() == count;
-}
-
 }  // namespace android
 
 #endif  // DATA_SOURCE_BASE_H_
diff --git a/media/libmediaextractor/include/media/MediaExtractor.h b/media/libmediaextractor/include/media/MediaExtractor.h
index 2e65620..c329903 100644
--- a/media/libmediaextractor/include/media/MediaExtractor.h
+++ b/media/libmediaextractor/include/media/MediaExtractor.h
@@ -72,12 +72,6 @@
     // CAN_SEEK_BACKWARD | CAN_SEEK_FORWARD | CAN_SEEK | CAN_PAUSE
     virtual uint32_t flags() const;
 
-    // for DRM
-    virtual char* getDrmTrackInfo(size_t /*trackID*/, int * /*len*/) {
-        return NULL;
-    }
-    virtual void setUID(uid_t /*uid*/) {
-    }
     virtual status_t setMediaCas(const uint8_t* /*casToken*/, size_t /*size*/) {
         return INVALID_OPERATION;
     }
diff --git a/media/libmediaextractor/include/media/MediaSourceBase.h b/media/libmediaextractor/include/media/MediaSourceBase.h
index 9db6099..ab56613 100644
--- a/media/libmediaextractor/include/media/MediaSourceBase.h
+++ b/media/libmediaextractor/include/media/MediaSourceBase.h
@@ -30,7 +30,7 @@
 
 namespace android {
 
-class MediaBuffer;
+class MediaBufferBase;
 
 class SourceBaseAllocTracker {
 public:
@@ -111,7 +111,7 @@
     // MediaSource has changed mid-stream, the client can continue reading
     // but should be prepared for buffers of the new configuration.
     virtual status_t read(
-            MediaBuffer **buffer, const ReadOptions *options = NULL) = 0;
+            MediaBufferBase **buffer, const ReadOptions *options = NULL) = 0;
 
     // Causes this source to suspend pulling data from its upstream source
     // until a subsequent read-with-seek. This is currently not supported
diff --git a/media/libmediaextractor/include/media/stagefright/MediaBuffer.h b/media/libmediaextractor/include/media/stagefright/MediaBuffer.h
index a8f8375..85b4521 100644
--- a/media/libmediaextractor/include/media/stagefright/MediaBuffer.h
+++ b/media/libmediaextractor/include/media/stagefright/MediaBuffer.h
@@ -26,6 +26,7 @@
 #include <binder/MemoryDealer.h>
 #include <utils/Errors.h>
 #include <utils/RefBase.h>
+#include <media/stagefright/MediaBufferBase.h>
 
 namespace android {
 
@@ -34,19 +35,7 @@
 class MediaBufferObserver;
 class MetaData;
 
-class MediaBufferObserver {
-public:
-    MediaBufferObserver() {}
-    virtual ~MediaBufferObserver() {}
-
-    virtual void signalBufferReturned(MediaBuffer *buffer) = 0;
-
-private:
-    MediaBufferObserver(const MediaBufferObserver &);
-    MediaBufferObserver &operator=(const MediaBufferObserver &);
-};
-
-class MediaBuffer {
+class MediaBuffer : public MediaBufferBase {
 public:
     // allocations larger than or equal to this will use shared memory.
     static const size_t kSharedMemThreshold = 64 * 1024;
@@ -70,42 +59,42 @@
     //
     // If no MediaBufferGroup is set, the local reference count must be zero
     // when called, whereupon the MediaBuffer is deleted.
-    void release();
+    virtual void release();
 
     // Increments the local reference count.
     // Use only when MediaBufferGroup is set.
-    void add_ref();
+    virtual void add_ref();
 
-    void *data() const;
-    size_t size() const;
+    virtual void *data() const;
+    virtual size_t size() const;
 
-    size_t range_offset() const;
-    size_t range_length() const;
+    virtual size_t range_offset() const;
+    virtual size_t range_length() const;
 
-    void set_range(size_t offset, size_t length);
+    virtual void set_range(size_t offset, size_t length);
 
-    sp<MetaData> meta_data();
+    virtual sp<MetaData> meta_data();
 
     // Clears meta data and resets the range to the full extent.
-    void reset();
+    virtual void reset();
 
-    void setObserver(MediaBufferObserver *group);
+    virtual void setObserver(MediaBufferObserver *group);
 
     // Returns a clone of this MediaBuffer increasing its reference count.
     // The clone references the same data but has its own range and
     // MetaData.
-    MediaBuffer *clone();
+    virtual MediaBufferBase *clone();
 
     // sum of localRefcount() and remoteRefcount()
-    int refcount() const {
+    virtual int refcount() const {
         return localRefcount() + remoteRefcount();
     }
 
-    int localRefcount() const {
+    virtual int localRefcount() const {
         return mRefCount;
     }
 
-    int remoteRefcount() const {
+    virtual int remoteRefcount() const {
         if (mMemory.get() == nullptr || mMemory->pointer() == nullptr) return 0;
         int32_t remoteRefcount =
                 reinterpret_cast<SharedControl *>(mMemory->pointer())->getRemoteRefcount();
diff --git a/media/libmediaextractor/include/media/stagefright/MediaBufferBase.h b/media/libmediaextractor/include/media/stagefright/MediaBufferBase.h
new file mode 100644
index 0000000..81dd7d9
--- /dev/null
+++ b/media/libmediaextractor/include/media/stagefright/MediaBufferBase.h
@@ -0,0 +1,86 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIA_BUFFER_BASE_H_
+
+#define MEDIA_BUFFER_BASE_H_
+
+#include <utils/RefBase.h>
+
+namespace android {
+
+class MediaBufferBase;
+class MetaData;
+
+class MediaBufferObserver {
+public:
+    MediaBufferObserver() {}
+    virtual ~MediaBufferObserver() {}
+
+    virtual void signalBufferReturned(MediaBufferBase *buffer) = 0;
+
+private:
+    MediaBufferObserver(const MediaBufferObserver &);
+    MediaBufferObserver &operator=(const MediaBufferObserver &);
+};
+
+class MediaBufferBase {
+public:
+    static MediaBufferBase *Create(size_t size);
+
+    // If MediaBufferGroup is set, decrement the local reference count;
+    // if the local reference count drops to 0, return the buffer to the
+    // associated MediaBufferGroup.
+    //
+    // If no MediaBufferGroup is set, the local reference count must be zero
+    // when called, whereupon the MediaBuffer is deleted.
+    virtual void release() = 0;
+
+    // Increments the local reference count.
+    // Use only when MediaBufferGroup is set.
+    virtual void add_ref() = 0;
+
+    virtual void *data() const = 0;
+    virtual size_t size() const = 0;
+
+    virtual size_t range_offset() const = 0;
+    virtual size_t range_length() const = 0;
+
+    virtual void set_range(size_t offset, size_t length) = 0;
+
+    virtual sp<MetaData> meta_data() = 0;
+
+    // Clears meta data and resets the range to the full extent.
+    virtual void reset() = 0;
+
+    virtual void setObserver(MediaBufferObserver *group) = 0;
+
+    // Returns a clone of this MediaBufferBase increasing its reference
+    // count. The clone references the same data but has its own range and
+    // MetaData.
+    virtual MediaBufferBase *clone() = 0;
+
+    virtual int refcount() const = 0;
+
+    virtual int localRefcount() const = 0;
+    virtual int remoteRefcount() const = 0;
+
+    virtual ~MediaBufferBase() {};
+};
+
+}  // namespace android
+
+#endif  // MEDIA_BUFFER_BASE_H_
diff --git a/media/libmediaextractor/include/media/stagefright/MediaBufferGroup.h b/media/libmediaextractor/include/media/stagefright/MediaBufferGroup.h
index 63d0a18..75d5df7 100644
--- a/media/libmediaextractor/include/media/stagefright/MediaBufferGroup.h
+++ b/media/libmediaextractor/include/media/stagefright/MediaBufferGroup.h
@@ -18,11 +18,15 @@
 
 #define MEDIA_BUFFER_GROUP_H_
 
-#include <media/stagefright/MediaBuffer.h>
+#include <list>
+
+#include <media/stagefright/MediaBufferBase.h>
+#include <utils/Errors.h>
+#include <utils/threads.h>
 
 namespace android {
 
-class MediaBuffer;
+class MediaBufferBase;
 
 class MediaBufferGroup : public MediaBufferObserver {
 public:
@@ -33,7 +37,7 @@
 
     ~MediaBufferGroup();
 
-    void add_buffer(MediaBuffer *buffer);
+    void add_buffer(MediaBufferBase *buffer);
 
     bool has_buffers();
 
@@ -46,16 +50,14 @@
     // If requestedSize is > 0, the returned MediaBuffer should have buffer
     // size of at least requstedSize.
     status_t acquire_buffer(
-            MediaBuffer **buffer, bool nonBlocking = false, size_t requestedSize = 0);
+            MediaBufferBase **buffer, bool nonBlocking = false, size_t requestedSize = 0);
 
     size_t buffers() const;
 
     // If buffer is nullptr, have acquire_buffer() check for remote release.
-    virtual void signalBufferReturned(MediaBuffer *buffer);
+    virtual void signalBufferReturned(MediaBufferBase *buffer);
 
 private:
-    friend class MediaBuffer;
-
     struct InternalData;
     InternalData *mInternal;
 
diff --git a/media/libmediaextractor/include/media/stagefright/MetaData.h b/media/libmediaextractor/include/media/stagefright/MetaData.h
index c2a2d1e..7562a72 100644
--- a/media/libmediaextractor/include/media/stagefright/MetaData.h
+++ b/media/libmediaextractor/include/media/stagefright/MetaData.h
@@ -149,8 +149,6 @@
     // To store the timed text format data
     kKeyTextFormatData    = 'text',  // raw data
 
-    kKeyRequiresSecureBuffers = 'secu',  // bool (int32_t)
-
     kKeyIsADTS            = 'adts',  // bool (int32_t)
     kKeyAACAOT            = 'aaot',  // int32_t
 
diff --git a/media/libmediaplayer2/MediaPlayer2Manager.cpp b/media/libmediaplayer2/MediaPlayer2Manager.cpp
index 44df2ac..39b102c 100644
--- a/media/libmediaplayer2/MediaPlayer2Manager.cpp
+++ b/media/libmediaplayer2/MediaPlayer2Manager.cpp
@@ -606,6 +606,39 @@
     return status;
 }
 
+status_t MediaPlayer2Manager::Client::prepareNextDataSource(
+        const sp<DataSourceDesc> &dsd) {
+    sp<MediaPlayer2Interface> p = getPlayer();
+    if (p == NULL) {
+        return NO_INIT;
+    }
+
+    if (dsd == NULL) {
+        return BAD_VALUE;
+    }
+
+    status_t status = p->prepareNextDataSource(dsd);
+    if (status != OK) {
+        ALOGE("prepareNextDataSource error: %d", status);
+    }
+
+    return status;
+}
+
+status_t MediaPlayer2Manager::Client::playNextDataSource(int64_t srcId) {
+    sp<MediaPlayer2Interface> p = getPlayer();
+    if (p == NULL) {
+        return NO_INIT;
+    }
+
+    status_t status = p->playNextDataSource(srcId);
+    if (status != OK) {
+        ALOGE("playNextDataSource error: %d", status);
+    }
+
+    return status;
+}
+
 void MediaPlayer2Manager::Client::disconnectNativeWindow_l() {
     if (mConnectedWindow != NULL && mConnectedWindow->getANativeWindow() != NULL) {
         status_t err = native_window_api_disconnect(
diff --git a/media/libmediaplayer2/MediaPlayer2Manager.h b/media/libmediaplayer2/MediaPlayer2Manager.h
index 7e6f0de..52bb9c6 100644
--- a/media/libmediaplayer2/MediaPlayer2Manager.h
+++ b/media/libmediaplayer2/MediaPlayer2Manager.h
@@ -274,7 +274,9 @@
         virtual status_t        getParameter(int key, Parcel *reply);
         virtual status_t        setNextPlayer(const sp<MediaPlayer2Engine>& player);
 
-        virtual status_t        setDataSource(const sp<DataSourceDesc> &dsd);
+        virtual status_t        setDataSource(const sp<DataSourceDesc> &dsd) override;
+        virtual status_t        prepareNextDataSource(const sp<DataSourceDesc> &dsd) override;
+        virtual status_t        playNextDataSource(int64_t srcId) override;
 
         static  void            notify(const wp<MediaPlayer2Engine> &listener, int64_t srcId,
                                        int msg, int ext1, int ext2, const Parcel *obj);
diff --git a/media/libmediaplayer2/include/mediaplayer2/MediaPlayer2Engine.h b/media/libmediaplayer2/include/mediaplayer2/MediaPlayer2Engine.h
index 2d1a24b..749d83c 100644
--- a/media/libmediaplayer2/include/mediaplayer2/MediaPlayer2Engine.h
+++ b/media/libmediaplayer2/include/mediaplayer2/MediaPlayer2Engine.h
@@ -48,6 +48,8 @@
     virtual void            disconnect() = 0;
 
     virtual status_t        setDataSource(const sp<DataSourceDesc>& source) = 0;
+    virtual status_t        prepareNextDataSource(const sp<DataSourceDesc>& source) = 0;
+    virtual status_t        playNextDataSource(int64_t srcId) = 0;
     virtual status_t        setVideoSurfaceTexture(const sp<ANativeWindowWrapper>& nww) = 0;
     virtual status_t        getBufferingSettings(
                                     BufferingSettings* buffering /* nonnull */) = 0;
diff --git a/media/libmediaplayer2/include/mediaplayer2/MediaPlayer2Interface.h b/media/libmediaplayer2/include/mediaplayer2/MediaPlayer2Interface.h
index b1cdf96..c0c490b 100644
--- a/media/libmediaplayer2/include/mediaplayer2/MediaPlayer2Interface.h
+++ b/media/libmediaplayer2/include/mediaplayer2/MediaPlayer2Interface.h
@@ -150,9 +150,11 @@
 
     virtual void        setAudioSink(const sp<AudioSink>& audioSink) { mAudioSink = audioSink; }
 
-    virtual status_t    setDataSource(const sp<DataSourceDesc>& /* dsd */) {
-        return INVALID_OPERATION;
-    }
+    virtual status_t    setDataSource(const sp<DataSourceDesc> &dsd) = 0;
+
+    virtual status_t    prepareNextDataSource(const sp<DataSourceDesc> &dsd) = 0;
+
+    virtual status_t    playNextDataSource(int64_t srcId) = 0;
 
     // pass the buffered native window to the media player service
     virtual status_t    setVideoSurfaceTexture(const sp<ANativeWindowWrapper>& nww) = 0;
@@ -166,7 +168,6 @@
         return OK;
     }
 
-    virtual status_t    prepare() = 0;
     virtual status_t    prepareAsync() = 0;
     virtual status_t    start() = 0;
     virtual status_t    stop() = 0;
diff --git a/media/libmediaplayer2/include/mediaplayer2/mediaplayer2.h b/media/libmediaplayer2/include/mediaplayer2/mediaplayer2.h
index e9d6f84..0afef1e 100644
--- a/media/libmediaplayer2/include/mediaplayer2/mediaplayer2.h
+++ b/media/libmediaplayer2/include/mediaplayer2/mediaplayer2.h
@@ -114,7 +114,24 @@
     // player, which just completed playback
     MEDIA2_INFO_STARTED_AS_NEXT = 2,
     // The player just pushed the very first video frame for rendering
-    MEDIA2_INFO_RENDERING_START = 3,
+    MEDIA2_INFO_VIDEO_RENDERING_START = 3,
+    // The player just pushed the very first audio frame for rendering
+    MEDIA2_INFO_AUDIO_RENDERING_START = 4,
+    // The player just completed the playback of this data source
+    MEDIA2_INFO_PLAYBACK_COMPLETE = 5,
+    // The player just completed the playback of the full play list
+    MEDIA2_INFO_PLAYLIST_END = 6,
+
+    //1xx
+    // The player just prepared a data source.
+    MEDIA2_INFO_PREPARED = 100,
+    // The player just completed a call play().
+    MEDIA2_INFO_COMPLETE_CALL_PLAY = 101,
+    // The player just completed a call pause().
+    MEDIA2_INFO_COMPLETE_CALL_PAUSE = 102,
+    // The player just completed a call seekTo.
+    MEDIA2_INFO_COMPLETE_CALL_SEEK = 103,
+
     // 7xx
     // The video is too complex for the decoder: it can't decode frames fast
     // enough. Possibly only the audio plays fine at this stage.
@@ -206,11 +223,12 @@
 
             status_t        getSrcId(int64_t *srcId);
             status_t        setDataSource(const sp<DataSourceDesc> &dsd);
+            status_t        prepareNextDataSource(const sp<DataSourceDesc> &dsd);
+            status_t        playNextDataSource(int64_t srcId);
             status_t        setVideoSurfaceTexture(const sp<ANativeWindowWrapper>& nww);
             status_t        setListener(const sp<MediaPlayer2Listener>& listener);
             status_t        getBufferingSettings(BufferingSettings* buffering /* nonnull */);
             status_t        setBufferingSettings(const BufferingSettings& buffering);
-            status_t        prepare();
             status_t        prepareAsync();
             status_t        start();
             status_t        stop();
@@ -271,7 +289,6 @@
     thread_id_t                 mLockThreadId;
     Mutex                       mLock;
     Mutex                       mNotifyLock;
-    Condition                   mSignal;
     sp<MediaPlayer2Listener>    mListener;
     void*                       mCookie;
     media_player2_states        mCurrentState;
@@ -279,8 +296,6 @@
     MediaPlayer2SeekMode        mCurrentSeekMode;
     int                         mSeekPosition;
     MediaPlayer2SeekMode        mSeekMode;
-    bool                        mPrepareSync;
-    status_t                    mPrepareStatus;
     audio_stream_type_t         mStreamType;
     Parcel*                     mAudioAttributesParcel;
     bool                        mLoop;
diff --git a/media/libmediaplayer2/mediaplayer2.cpp b/media/libmediaplayer2/mediaplayer2.cpp
index ab30273..75d06b8 100644
--- a/media/libmediaplayer2/mediaplayer2.cpp
+++ b/media/libmediaplayer2/mediaplayer2.cpp
@@ -63,8 +63,6 @@
     mSeekPosition = -1;
     mSeekMode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC;
     mCurrentState = MEDIA_PLAYER2_IDLE;
-    mPrepareSync = false;
-    mPrepareStatus = NO_ERROR;
     mLoop = false;
     mLeftVolume = mRightVolume = 1.0;
     mVideoWidth = mVideoHeight = 0;
@@ -176,6 +174,32 @@
     return err;
 }
 
+status_t MediaPlayer2::prepareNextDataSource(const sp<DataSourceDesc> &dsd) {
+    if (dsd == NULL) {
+        return BAD_VALUE;
+    }
+    ALOGV("prepareNextDataSource type(%d), srcId(%lld)", dsd->mType, (long long)dsd->mId);
+
+    Mutex::Autolock _l(mLock);
+    if (mPlayer != NULL) {
+        return mPlayer->prepareNextDataSource(dsd);
+    }
+    ALOGE("prepareNextDataSource failed: state %X, mPlayer(%p)", mCurrentState, mPlayer.get());
+    return INVALID_OPERATION;
+}
+
+status_t MediaPlayer2::playNextDataSource(int64_t srcId) {
+    ALOGV("playNextDataSource srcId(%lld)", (long long)srcId);
+
+    Mutex::Autolock _l(mLock);
+    if (mPlayer != NULL) {
+        mSrcId = srcId;
+        return mPlayer->playNextDataSource(srcId);
+    }
+    ALOGE("playNextDataSource failed: state %X, mPlayer(%p)", mCurrentState, mPlayer.get());
+    return INVALID_OPERATION;
+}
+
 status_t MediaPlayer2::invoke(const Parcel& request, Parcel *reply)
 {
     Mutex::Autolock _l(mLock);
@@ -256,35 +280,6 @@
     return INVALID_OPERATION;
 }
 
-// TODO: In case of error, prepareAsync provides the caller with 2 error codes,
-// one defined in the Android framework and one provided by the implementation
-// that generated the error. The sync version of prepare returns only 1 error
-// code.
-status_t MediaPlayer2::prepare()
-{
-    ALOGV("prepare");
-    Mutex::Autolock _l(mLock);
-    mLockThreadId = getThreadId();
-    if (mPrepareSync) {
-        mLockThreadId = 0;
-        return -EALREADY;
-    }
-    mPrepareSync = true;
-    status_t ret = prepareAsync_l();
-    if (ret != NO_ERROR) {
-        mLockThreadId = 0;
-        return ret;
-    }
-
-    if (mPrepareSync) {
-        mSignal.wait(mLock);  // wait for prepare done
-        mPrepareSync = false;
-    }
-    ALOGV("prepare complete - status=%d", mPrepareStatus);
-    mLockThreadId = 0;
-    return mPrepareStatus;
-}
-
 status_t MediaPlayer2::prepareAsync()
 {
     ALOGV("prepareAsync");
@@ -576,7 +571,6 @@
 {
     mLoop = false;
     if (mCurrentState == MEDIA_PLAYER2_IDLE) return NO_ERROR;
-    mPrepareSync = false;
     if (mPlayer != 0) {
         status_t ret = mPlayer->reset();
         if (ret != NO_ERROR) {
@@ -808,12 +802,6 @@
     case MEDIA2_PREPARED:
         ALOGV("MediaPlayer2::notify() prepared");
         mCurrentState = MEDIA_PLAYER2_PREPARED;
-        if (mPrepareSync) {
-            ALOGV("signal application thread");
-            mPrepareSync = false;
-            mPrepareStatus = NO_ERROR;
-            mSignal.signal();
-        }
         break;
     case MEDIA2_DRM_INFO:
         ALOGV("MediaPlayer2::notify() MEDIA2_DRM_INFO(%lld, %d, %d, %d, %p)",
@@ -834,14 +822,6 @@
         // ext2: Implementation dependant error code.
         ALOGE("error (%d, %d)", ext1, ext2);
         mCurrentState = MEDIA_PLAYER2_STATE_ERROR;
-        if (mPrepareSync)
-        {
-            ALOGV("signal application thread");
-            mPrepareSync = false;
-            mPrepareStatus = ext1;
-            mSignal.signal();
-            send = false;
-        }
         break;
     case MEDIA2_INFO:
         // ext1: Media framework error code.
diff --git a/media/libmediaplayer2/nuplayer2/GenericSource2.cpp b/media/libmediaplayer2/nuplayer2/GenericSource2.cpp
index 4700660..c34aabb 100644
--- a/media/libmediaplayer2/nuplayer2/GenericSource2.cpp
+++ b/media/libmediaplayer2/nuplayer2/GenericSource2.cpp
@@ -1149,7 +1149,7 @@
 }
 
 sp<ABuffer> NuPlayer2::GenericSource2::mediaBufferToABuffer(
-        MediaBuffer* mb,
+        MediaBufferBase* mb,
         media_track_type trackType) {
     bool audio = trackType == MEDIA_TRACK_TYPE_AUDIO;
     size_t outLength = mb->range_length();
@@ -1333,7 +1333,7 @@
 
     int32_t generation = getDataGeneration(trackType);
     for (size_t numBuffers = 0; numBuffers < maxBuffers; ) {
-        Vector<MediaBuffer *> mediaBuffers;
+        Vector<MediaBufferBase *> mediaBuffers;
         status_t err = NO_ERROR;
 
         sp<IMediaSource> source = track->mSource;
@@ -1342,7 +1342,7 @@
             err = source->readMultiple(
                     &mediaBuffers, maxBuffers - numBuffers, &options);
         } else {
-            MediaBuffer *mbuf = NULL;
+            MediaBufferBase *mbuf = NULL;
             err = source->read(&mbuf, &options);
             if (err == OK && mbuf != NULL) {
                 mediaBuffers.push_back(mbuf);
@@ -1365,7 +1365,7 @@
 
         for (; id < count; ++id) {
             int64_t timeUs;
-            MediaBuffer *mbuf = mediaBuffers[id];
+            MediaBufferBase *mbuf = mediaBuffers[id];
             if (!mbuf->meta_data()->findInt64(kKeyTime, &timeUs)) {
                 mbuf->meta_data()->dumpToLog();
                 track->mPackets->signalEOS(ERROR_MALFORMED);
@@ -1659,7 +1659,7 @@
     return OK;
 }
 
-void NuPlayer2::GenericSource2::signalBufferReturned(MediaBuffer *buffer)
+void NuPlayer2::GenericSource2::signalBufferReturned(MediaBufferBase *buffer)
 {
     //ALOGV("signalBufferReturned %p  refCount: %d", buffer, buffer->localRefcount());
 
diff --git a/media/libmediaplayer2/nuplayer2/GenericSource2.h b/media/libmediaplayer2/nuplayer2/GenericSource2.h
index 1a5409a..896c397 100644
--- a/media/libmediaplayer2/nuplayer2/GenericSource2.h
+++ b/media/libmediaplayer2/nuplayer2/GenericSource2.h
@@ -86,7 +86,7 @@
     virtual bool isStreaming() const;
 
     // Modular DRM
-    virtual void signalBufferReturned(MediaBuffer *buffer);
+    virtual void signalBufferReturned(MediaBufferBase *buffer);
 
     virtual status_t prepareDrm(
             const uint8_t uuid[16],
@@ -202,7 +202,7 @@
             int32_t curGen, const sp<AnotherPacketSource>& packets, const sp<AMessage>& msg);
 
     sp<ABuffer> mediaBufferToABuffer(
-            MediaBuffer *mbuf,
+            MediaBufferBase *mbuf,
             media_track_type trackType);
 
     void postReadBuffer(media_track_type trackType);
diff --git a/media/libmediaplayer2/nuplayer2/NuPlayer2.cpp b/media/libmediaplayer2/nuplayer2/NuPlayer2.cpp
index 462a904..38d107d 100644
--- a/media/libmediaplayer2/nuplayer2/NuPlayer2.cpp
+++ b/media/libmediaplayer2/nuplayer2/NuPlayer2.cpp
@@ -423,6 +423,14 @@
     msg->post();
 }
 
+void NuPlayer2::playNextDataSource(int64_t srcId) {
+    disconnectSource();
+
+    sp<AMessage> msg = new AMessage(kWhatPlayNextDataSource, this);
+    msg->setInt64("srcId", srcId);
+    msg->post();
+}
+
 status_t NuPlayer2::getBufferingSettings(
         BufferingSettings *buffering /* nonnull */) {
     sp<AMessage> msg = new AMessage(kWhatGetBufferingSettings, this);
@@ -538,6 +546,11 @@
 }
 
 void NuPlayer2::resetAsync() {
+    disconnectSource();
+    (new AMessage(kWhatReset, this))->post();
+}
+
+void NuPlayer2::disconnectSource() {
     sp<Source> source;
     {
         Mutex::Autolock autoLock(mSourceLock);
@@ -554,7 +567,6 @@
         source->disconnect();
     }
 
-    (new AMessage(kWhatReset, this))->post();
 }
 
 status_t NuPlayer2::notifyAt(int64_t mediaTimeUs) {
@@ -671,6 +683,32 @@
             break;
         }
 
+        case kWhatPlayNextDataSource:
+        {
+            ALOGV("kWhatPlayNextDataSource");
+            int64_t srcId;
+            CHECK(msg->findInt64("srcId", &srcId));
+            if (srcId != mNextSrcId) {
+                notifyListener(srcId, MEDIA2_ERROR, MEDIA2_ERROR_UNKNOWN, 0);
+                return;
+            }
+
+            mResetting = true;
+            stopPlaybackTimer("kWhatPlayNextDataSource");
+            stopRebufferingTimer(true);
+
+            mDeferredActions.push_back(
+                    new FlushDecoderAction(
+                        FLUSH_CMD_SHUTDOWN /* audio */,
+                        FLUSH_CMD_SHUTDOWN /* video */));
+
+            mDeferredActions.push_back(
+                    new SimpleAction(&NuPlayer2::performPlayNextDataSource));
+
+            processDeferredActions();
+            break;
+        }
+
         case kWhatGetBufferingSettings:
         {
             sp<AReplyToken> replyID;
@@ -1382,7 +1420,7 @@
                 handleFlushComplete(audio, false /* isDecoder */);
                 finishFlushIfPossible();
             } else if (what == Renderer::kWhatVideoRenderingStart) {
-                notifyListener(mSrcId, MEDIA2_INFO, MEDIA2_INFO_RENDERING_START, 0);
+                notifyListener(mSrcId, MEDIA2_INFO, MEDIA2_INFO_VIDEO_RENDERING_START, 0);
             } else if (what == Renderer::kWhatMediaRenderingStart) {
                 ALOGV("media rendering started");
                 notifyListener(mSrcId, MEDIA2_STARTED, 0, 0);
@@ -2403,6 +2441,67 @@
     mIsDrmProtected = false;
 }
 
+void NuPlayer2::performPlayNextDataSource() {
+    ALOGV("performPlayNextDataSource");
+
+    CHECK(mAudioDecoder == NULL);
+    CHECK(mVideoDecoder == NULL);
+
+    stopPlaybackTimer("performPlayNextDataSource");
+    stopRebufferingTimer(true);
+
+    cancelPollDuration();
+
+    ++mScanSourcesGeneration;
+    mScanSourcesPending = false;
+
+    ++mRendererGeneration;
+
+    if (mSource != NULL) {
+        mSource->stop();
+    }
+
+    long previousSrcId;
+    {
+        Mutex::Autolock autoLock(mSourceLock);
+        mSource = mNextSource;
+        mNextSource = NULL;
+        previousSrcId = mSrcId;
+        mSrcId = mNextSrcId;
+        ++mNextSrcId;  // to distinguish the two sources.
+    }
+
+    if (mDriver != NULL) {
+        sp<NuPlayer2Driver> driver = mDriver.promote();
+        if (driver != NULL) {
+            notifyListener(previousSrcId, MEDIA2_INFO, MEDIA2_INFO_PLAYBACK_COMPLETE, 0);
+            notifyListener(mSrcId, MEDIA2_INFO, MEDIA2_INFO_STARTED_AS_NEXT, 0);
+        }
+    }
+
+    mStarted = false;
+    mPrepared = true;  // TODO: what if it's not prepared
+    mResetting = false;
+    mSourceStarted = false;
+
+    // Modular DRM
+    if (mCrypto != NULL) {
+        // decoders will be flushed before this so their mCrypto would go away on their own
+        // TODO change to ALOGV
+        ALOGD("performReset mCrypto: %p", mCrypto.get());
+        mCrypto.clear();
+    }
+    mIsDrmProtected = false;
+
+    if (mRenderer != NULL) {
+        mRenderer->resume();
+    }
+
+    onStart();
+    mPausedByClient = false;
+    notifyListener(mSrcId, MEDIA2_STARTED, 0, 0);
+}
+
 void NuPlayer2::performScanSources() {
     ALOGV("performScanSources");
 
diff --git a/media/libmediaplayer2/nuplayer2/NuPlayer2.h b/media/libmediaplayer2/nuplayer2/NuPlayer2.h
index e7b774c..96f85f9 100644
--- a/media/libmediaplayer2/nuplayer2/NuPlayer2.h
+++ b/media/libmediaplayer2/nuplayer2/NuPlayer2.h
@@ -44,6 +44,7 @@
 
     void setDataSourceAsync(const sp<DataSourceDesc> &dsd);
     void prepareNextDataSourceAsync(const sp<DataSourceDesc> &dsd);
+    void playNextDataSource(int64_t srcId);
 
     status_t getBufferingSettings(BufferingSettings* buffering /* nonnull */);
     status_t setBufferingSettings(const BufferingSettings& buffering);
@@ -121,6 +122,7 @@
         kWhatSetDataSource              = '=DaS',
         kWhatPrepare                    = 'prep',
         kWhatPrepareNextDataSource      = 'pNDS',
+        kWhatPlayNextDataSource         = 'plNS',
         kWhatSetVideoSurface            = '=VSu',
         kWhatSetAudioSink               = '=AuS',
         kWhatMoreDataQueued             = 'more',
@@ -269,6 +271,8 @@
         mFlushComplete[1][1] = false;
     }
 
+    void disconnectSource();
+
     status_t createNuPlayer2Source(const sp<DataSourceDesc> &dsd,
                                    sp<Source> *source,
                                    DATA_SOURCE_TYPE *dataSourceType);
@@ -314,6 +318,7 @@
     void performSeek(int64_t seekTimeUs, MediaPlayer2SeekMode mode);
     void performDecoderFlush(FlushCommand audio, FlushCommand video);
     void performReset();
+    void performPlayNextDataSource();
     void performScanSources();
     void performSetSurface(const sp<ANativeWindowWrapper> &nw);
     void performResumeDecoders(bool needNotify);
diff --git a/media/libmediaplayer2/nuplayer2/NuPlayer2Decoder.cpp b/media/libmediaplayer2/nuplayer2/NuPlayer2Decoder.cpp
index a436592..c49bccb 100644
--- a/media/libmediaplayer2/nuplayer2/NuPlayer2Decoder.cpp
+++ b/media/libmediaplayer2/nuplayer2/NuPlayer2Decoder.cpp
@@ -1066,7 +1066,7 @@
         }
 
         // Modular DRM
-        MediaBuffer *mediaBuf = NULL;
+        MediaBufferBase *mediaBuf = NULL;
         sp<AMediaCodecCryptoInfoWrapper> cryptInfo;
 
         // copy into codec buffer
diff --git a/media/libmediaplayer2/nuplayer2/NuPlayer2Driver.cpp b/media/libmediaplayer2/nuplayer2/NuPlayer2Driver.cpp
index 60a07a3..ca08e79 100644
--- a/media/libmediaplayer2/nuplayer2/NuPlayer2Driver.cpp
+++ b/media/libmediaplayer2/nuplayer2/NuPlayer2Driver.cpp
@@ -107,7 +107,6 @@
 
 NuPlayer2Driver::NuPlayer2Driver(pid_t pid, uid_t uid)
     : mState(STATE_IDLE),
-      mIsAsyncPrepare(false),
       mAsyncResult(UNKNOWN_ERROR),
       mSrcId(0),
       mSetSurfaceInProgress(false),
@@ -174,7 +173,7 @@
 }
 
 status_t NuPlayer2Driver::setDataSource(const sp<DataSourceDesc> &dsd) {
-    ALOGV("setDataSource(%p) callback source", this);
+    ALOGV("setDataSource(%p)", this);
     Mutex::Autolock autoLock(mLock);
 
     if (mState != STATE_IDLE) {
@@ -193,6 +192,25 @@
     return mAsyncResult;
 }
 
+status_t NuPlayer2Driver::prepareNextDataSource(const sp<DataSourceDesc> &dsd) {
+    ALOGV("prepareNextDataSource(%p)", this);
+    Mutex::Autolock autoLock(mLock);
+
+    mPlayer->prepareNextDataSourceAsync(dsd);
+
+    return OK;
+}
+
+status_t NuPlayer2Driver::playNextDataSource(int64_t srcId) {
+    ALOGV("playNextDataSource(%p)", this);
+    Mutex::Autolock autoLock(mLock);
+
+    mSrcId = srcId;
+    mPlayer->playNextDataSource(srcId);
+
+    return OK;
+}
+
 status_t NuPlayer2Driver::setVideoSurfaceTexture(const sp<ANativeWindowWrapper> &nww) {
     ALOGV("setVideoSurfaceTexture(%p)", this);
     Mutex::Autolock autoLock(mLock);
@@ -245,42 +263,6 @@
     return mPlayer->setBufferingSettings(buffering);
 }
 
-status_t NuPlayer2Driver::prepare() {
-    ALOGV("prepare(%p)", this);
-    Mutex::Autolock autoLock(mLock);
-    return prepare_l();
-}
-
-status_t NuPlayer2Driver::prepare_l() {
-    switch (mState) {
-        case STATE_UNPREPARED:
-            mState = STATE_PREPARING;
-
-            // Make sure we're not posting any notifications, success or
-            // failure information is only communicated through our result
-            // code.
-            mIsAsyncPrepare = false;
-            mPlayer->prepareAsync();
-            while (mState == STATE_PREPARING) {
-                mCondition.wait(mLock);
-            }
-            return (mState == STATE_PREPARED) ? OK : UNKNOWN_ERROR;
-        case STATE_STOPPED:
-            // this is really just paused. handle as seek to start
-            mAtEOS = false;
-            mState = STATE_STOPPED_AND_PREPARING;
-            mIsAsyncPrepare = false;
-            mPlayer->seekToAsync(0, MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC /* mode */,
-                    true /* needNotify */);
-            while (mState == STATE_STOPPED_AND_PREPARING) {
-                mCondition.wait(mLock);
-            }
-            return (mState == STATE_STOPPED_AND_PREPARED) ? OK : UNKNOWN_ERROR;
-        default:
-            return INVALID_OPERATION;
-    };
-}
-
 status_t NuPlayer2Driver::prepareAsync() {
     ALOGV("prepareAsync(%p)", this);
     Mutex::Autolock autoLock(mLock);
@@ -288,14 +270,12 @@
     switch (mState) {
         case STATE_UNPREPARED:
             mState = STATE_PREPARING;
-            mIsAsyncPrepare = true;
             mPlayer->prepareAsync();
             return OK;
         case STATE_STOPPED:
             // this is really just paused. handle as seek to start
             mAtEOS = false;
             mState = STATE_STOPPED_AND_PREPARING;
-            mIsAsyncPrepare = true;
             mPlayer->seekToAsync(0, MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC /* mode */,
                     true /* needNotify */);
             return OK;
@@ -312,19 +292,6 @@
 
 status_t NuPlayer2Driver::start_l() {
     switch (mState) {
-        case STATE_UNPREPARED:
-        {
-            status_t err = prepare_l();
-
-            if (err != OK) {
-                return err;
-            }
-
-            CHECK_EQ(mState, STATE_PREPARED);
-
-            // fall through
-        }
-
         case STATE_PAUSED:
         case STATE_STOPPED_AND_PREPARED:
         case STATE_PREPARED:
@@ -626,8 +593,6 @@
 
         case STATE_PREPARING:
         {
-            CHECK(mIsAsyncPrepare);
-
             notifyListener_l(mSrcId, MEDIA2_PREPARED);
             break;
         }
@@ -824,10 +789,6 @@
         wasSeeking = false;
         mState = STATE_STOPPED_AND_PREPARED;
         mCondition.broadcast();
-        if (!mIsAsyncPrepare) {
-            // if we are preparing synchronously, no need to notify listener
-            return;
-        }
     } else if (mState == STATE_STOPPED) {
         // no need to notify listener
         return;
@@ -947,58 +908,60 @@
     ALOGD("notifyListener_l(%p), (%lld, %d, %d, %d, %d), loop setting(%d, %d)",
             this, (long long)srcId, msg, ext1, ext2,
             (in == NULL ? -1 : (int)in->dataSize()), mAutoLoop, mLooping);
-    switch (msg) {
-        case MEDIA2_PLAYBACK_COMPLETE:
-        {
-            if (mState != STATE_RESET_IN_PROGRESS) {
-                if (mAutoLoop) {
-                    audio_stream_type_t streamType = AUDIO_STREAM_MUSIC;
-                    if (mAudioSink != NULL) {
-                        streamType = mAudioSink->getAudioStreamType();
+    if (srcId == mSrcId) {
+        switch (msg) {
+            case MEDIA2_PLAYBACK_COMPLETE:
+            {
+                if (mState != STATE_RESET_IN_PROGRESS) {
+                    if (mAutoLoop) {
+                        audio_stream_type_t streamType = AUDIO_STREAM_MUSIC;
+                        if (mAudioSink != NULL) {
+                            streamType = mAudioSink->getAudioStreamType();
+                        }
+                        if (streamType == AUDIO_STREAM_NOTIFICATION) {
+                            ALOGW("disabling auto-loop for notification");
+                            mAutoLoop = false;
+                        }
                     }
-                    if (streamType == AUDIO_STREAM_NOTIFICATION) {
-                        ALOGW("disabling auto-loop for notification");
-                        mAutoLoop = false;
+                    if (mLooping || mAutoLoop) {
+                        mPlayer->seekToAsync(0);
+                        if (mAudioSink != NULL) {
+                            // The renderer has stopped the sink at the end in order to play out
+                            // the last little bit of audio. In looping mode, we need to restart it.
+                            mAudioSink->start();
+                        }
+                        // don't send completion event when looping
+                        return;
                     }
-                }
-                if (mLooping || mAutoLoop) {
-                    mPlayer->seekToAsync(0);
-                    if (mAudioSink != NULL) {
-                        // The renderer has stopped the sink at the end in order to play out
-                        // the last little bit of audio. If we're looping, we need to restart it.
-                        mAudioSink->start();
+                    if (property_get_bool("persist.debug.sf.stats", false)) {
+                        Vector<String16> args;
+                        dump(-1, args);
                     }
-                    // don't send completion event when looping
-                    return;
+                    mPlayer->pause();
+                    mState = STATE_PAUSED;
                 }
-                if (property_get_bool("persist.debug.sf.stats", false)) {
-                    Vector<String16> args;
-                    dump(-1, args);
-                }
-                mPlayer->pause();
-                mState = STATE_PAUSED;
+                // fall through
             }
-            // fall through
-        }
 
-        case MEDIA2_ERROR:
-        {
-            // when we have an error, add it to the analytics for this playback.
-            // ext1 is our primary 'error type' value. Only add ext2 when non-zero.
-            // [test against msg is due to fall through from previous switch value]
-            if (msg == MEDIA2_ERROR) {
-                mAnalyticsItem->setInt32(kPlayerError, ext1);
-                if (ext2 != 0) {
-                    mAnalyticsItem->setInt32(kPlayerErrorCode, ext2);
+            case MEDIA2_ERROR:
+            {
+                // when we have an error, add it to the analytics for this playback.
+                // ext1 is our primary 'error type' value. Only add ext2 when non-zero.
+                // [test against msg is due to fall through from previous switch value]
+                if (msg == MEDIA2_ERROR) {
+                    mAnalyticsItem->setInt32(kPlayerError, ext1);
+                    if (ext2 != 0) {
+                        mAnalyticsItem->setInt32(kPlayerErrorCode, ext2);
+                    }
+                    mAnalyticsItem->setCString(kPlayerErrorState, stateString(mState).c_str());
                 }
-                mAnalyticsItem->setCString(kPlayerErrorState, stateString(mState).c_str());
+                mAtEOS = true;
+                break;
             }
-            mAtEOS = true;
-            break;
-        }
 
-        default:
-            break;
+            default:
+                break;
+        }
     }
 
     sp<AMessage> notify = new AMessage(kWhatNotifyListener, this);
@@ -1025,6 +988,15 @@
 
     Mutex::Autolock autoLock(mLock);
 
+    if (srcId != mSrcId) {
+        if (err == OK) {
+            notifyListener_l(srcId, MEDIA2_PREPARED);
+        } else {
+            notifyListener_l(srcId, MEDIA2_ERROR, MEDIA2_ERROR_UNKNOWN, err);
+        }
+        return;
+    }
+
     if (mState != STATE_PREPARING) {
         // We were preparing asynchronously when the client called
         // reset(), we sent a premature "prepared" notification and
@@ -1041,14 +1013,10 @@
         // update state before notifying client, so that if client calls back into NuPlayer2Driver
         // in response, NuPlayer2Driver has the right state
         mState = STATE_PREPARED;
-        if (mIsAsyncPrepare) {
-            notifyListener_l(srcId, MEDIA2_PREPARED);
-        }
+        notifyListener_l(srcId, MEDIA2_PREPARED);
     } else {
         mState = STATE_UNPREPARED;
-        if (mIsAsyncPrepare) {
-            notifyListener_l(srcId, MEDIA2_ERROR, MEDIA2_ERROR_UNKNOWN, err);
-        }
+        notifyListener_l(srcId, MEDIA2_ERROR, MEDIA2_ERROR_UNKNOWN, err);
     }
 
     sp<MetaData> meta = mPlayer->getFileMeta();
diff --git a/media/libmediaplayer2/nuplayer2/NuPlayer2Driver.h b/media/libmediaplayer2/nuplayer2/NuPlayer2Driver.h
index 4c57cfd..502a2cc 100644
--- a/media/libmediaplayer2/nuplayer2/NuPlayer2Driver.h
+++ b/media/libmediaplayer2/nuplayer2/NuPlayer2Driver.h
@@ -28,17 +28,18 @@
 struct NuPlayer2Driver : public MediaPlayer2Interface {
     explicit NuPlayer2Driver(pid_t pid, uid_t uid);
 
-    virtual status_t initCheck();
+    virtual status_t initCheck() override;
 
     virtual status_t setDataSource(const sp<DataSourceDesc> &dsd) override;
+    virtual status_t prepareNextDataSource(const sp<DataSourceDesc> &dsd) override;
+    virtual status_t playNextDataSource(int64_t srcId) override;
 
-    virtual status_t setVideoSurfaceTexture(const sp<ANativeWindowWrapper> &nww);
+    virtual status_t setVideoSurfaceTexture(const sp<ANativeWindowWrapper> &nww) override;
 
     virtual status_t getBufferingSettings(
             BufferingSettings* buffering /* nonnull */) override;
     virtual status_t setBufferingSettings(const BufferingSettings& buffering) override;
 
-    virtual status_t prepare();
     virtual status_t prepareAsync();
     virtual status_t start();
     virtual status_t stop();
@@ -114,7 +115,6 @@
 
     State mState;
 
-    bool mIsAsyncPrepare;
     status_t mAsyncResult;
 
     // The following are protected through "mLock"
@@ -147,7 +147,6 @@
     void updateMetrics(const char *where);
     void logMetrics(const char *where);
 
-    status_t prepare_l();
     status_t start_l();
     void notifyListener_l(int64_t srcId, int msg, int ext1 = 0, int ext2 = 0,
                           const Parcel *in = NULL);
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index 511f46f..b0c82f2 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -1143,7 +1143,7 @@
 }
 
 sp<ABuffer> NuPlayer::GenericSource::mediaBufferToABuffer(
-        MediaBuffer* mb,
+        MediaBufferBase* mb,
         media_track_type trackType) {
     bool audio = trackType == MEDIA_TRACK_TYPE_AUDIO;
     size_t outLength = mb->range_length();
@@ -1326,7 +1326,7 @@
 
     int32_t generation = getDataGeneration(trackType);
     for (size_t numBuffers = 0; numBuffers < maxBuffers; ) {
-        Vector<MediaBuffer *> mediaBuffers;
+        Vector<MediaBufferBase *> mediaBuffers;
         status_t err = NO_ERROR;
 
         sp<IMediaSource> source = track->mSource;
@@ -1335,7 +1335,7 @@
             err = source->readMultiple(
                     &mediaBuffers, maxBuffers - numBuffers, &options);
         } else {
-            MediaBuffer *mbuf = NULL;
+            MediaBufferBase *mbuf = NULL;
             err = source->read(&mbuf, &options);
             if (err == OK && mbuf != NULL) {
                 mediaBuffers.push_back(mbuf);
@@ -1358,7 +1358,7 @@
 
         for (; id < count; ++id) {
             int64_t timeUs;
-            MediaBuffer *mbuf = mediaBuffers[id];
+            MediaBufferBase *mbuf = mediaBuffers[id];
             if (!mbuf->meta_data()->findInt64(kKeyTime, &timeUs)) {
                 mbuf->meta_data()->dumpToLog();
                 track->mPackets->signalEOS(ERROR_MALFORMED);
@@ -1654,7 +1654,7 @@
     return OK;
 }
 
-void NuPlayer::GenericSource::signalBufferReturned(MediaBuffer *buffer)
+void NuPlayer::GenericSource::signalBufferReturned(MediaBufferBase *buffer)
 {
     //ALOGV("signalBufferReturned %p  refCount: %d", buffer, buffer->localRefcount());
 
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.h b/media/libmediaplayerservice/nuplayer/GenericSource.h
index 2406665..065cac1 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.h
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.h
@@ -86,7 +86,7 @@
     virtual bool isStreaming() const;
 
     // Modular DRM
-    virtual void signalBufferReturned(MediaBuffer *buffer);
+    virtual void signalBufferReturned(MediaBufferBase *buffer);
 
     virtual status_t prepareDrm(
             const uint8_t uuid[16], const Vector<uint8_t> &drmSessionId, sp<ICrypto> *outCrypto);
@@ -201,7 +201,7 @@
             int32_t curGen, const sp<AnotherPacketSource>& packets, const sp<AMessage>& msg);
 
     sp<ABuffer> mediaBufferToABuffer(
-            MediaBuffer *mbuf,
+            MediaBufferBase *mbuf,
             media_track_type trackType);
 
     void postReadBuffer(media_track_type trackType);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
index 1aca96c..88594d2 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
@@ -1046,7 +1046,7 @@
         }
 
         // Modular DRM
-        MediaBuffer *mediaBuf = NULL;
+        MediaBufferBase *mediaBuf = NULL;
         NuPlayerDrm::CryptoInfo *cryptInfo = NULL;
 
         // copy into codec buffer
diff --git a/media/libstagefright/AACWriter.cpp b/media/libstagefright/AACWriter.cpp
index 281af47..d64138e 100644
--- a/media/libstagefright/AACWriter.cpp
+++ b/media/libstagefright/AACWriter.cpp
@@ -294,7 +294,7 @@
     prctl(PR_SET_NAME, (unsigned long)"AACWriterThread", 0, 0, 0);
 
     while (!mDone && err == OK) {
-        MediaBuffer *buffer;
+        MediaBufferBase *buffer;
         err = mSource->read(&buffer);
 
         if (err != OK) {
diff --git a/media/libstagefright/AMRWriter.cpp b/media/libstagefright/AMRWriter.cpp
index 910abc6..e33d3da 100644
--- a/media/libstagefright/AMRWriter.cpp
+++ b/media/libstagefright/AMRWriter.cpp
@@ -193,7 +193,7 @@
 
     prctl(PR_SET_NAME, (unsigned long)"AMRWriter", 0, 0, 0);
     while (!mDone) {
-        MediaBuffer *buffer;
+        MediaBufferBase *buffer;
         err = mSource->read(&buffer);
 
         if (err != OK) {
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 905817f..8a15a50 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -100,7 +100,6 @@
         "SkipCutBuffer.cpp",
         "StagefrightMediaScanner.cpp",
         "StagefrightMetadataRetriever.cpp",
-        "SurfaceMediaSource.cpp",
         "SurfaceUtils.cpp",
         "Utils.cpp",
         "ThrottledSource.cpp",
diff --git a/media/libstagefright/AudioSource.cpp b/media/libstagefright/AudioSource.cpp
index 70ce38c..d854582 100644
--- a/media/libstagefright/AudioSource.cpp
+++ b/media/libstagefright/AudioSource.cpp
@@ -240,7 +240,7 @@
 }
 
 status_t AudioSource::read(
-        MediaBuffer **out, const ReadOptions * /* options */) {
+        MediaBufferBase **out, const ReadOptions * /* options */) {
     Mutex::Autolock autoLock(mLock);
     *out = NULL;
 
@@ -311,7 +311,7 @@
     return OK;
 }
 
-void AudioSource::signalBufferReturned(MediaBuffer *buffer) {
+void AudioSource::signalBufferReturned(MediaBufferBase *buffer) {
     ALOGV("signalBufferReturned: %p", buffer->data());
     Mutex::Autolock autoLock(mLock);
     --mNumClientOwnedBuffers;
diff --git a/media/libstagefright/CallbackMediaSource.cpp b/media/libstagefright/CallbackMediaSource.cpp
index 6811882..ea7392e 100644
--- a/media/libstagefright/CallbackMediaSource.cpp
+++ b/media/libstagefright/CallbackMediaSource.cpp
@@ -36,7 +36,7 @@
     return mSource->getFormat();
 }
 
-status_t CallbackMediaSource::read(MediaBuffer **buffer, const ReadOptions *options) {
+status_t CallbackMediaSource::read(MediaBufferBase **buffer, const ReadOptions *options) {
     return mSource->read(buffer, reinterpret_cast<const ReadOptions*>(options));
 }
 
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 6ed0d0e..4960418 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -1040,7 +1040,7 @@
     releaseRecordingFrame(frame);
 }
 
-void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
+void CameraSource::signalBufferReturned(MediaBufferBase *buffer) {
     ALOGV("signalBufferReturned: %p", buffer->data());
     Mutex::Autolock autoLock(mLock);
     for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
@@ -1059,7 +1059,7 @@
 }
 
 status_t CameraSource::read(
-        MediaBuffer **buffer, const ReadOptions *options) {
+        MediaBufferBase **buffer, const ReadOptions *options) {
     ALOGV("read");
 
     *buffer = NULL;
diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp
index 970526a..f3f06d8 100644
--- a/media/libstagefright/CameraSourceTimeLapse.cpp
+++ b/media/libstagefright/CameraSourceTimeLapse.cpp
@@ -168,7 +168,7 @@
     return isSuccessful;
 }
 
-void CameraSourceTimeLapse::signalBufferReturned(MediaBuffer* buffer) {
+void CameraSourceTimeLapse::signalBufferReturned(MediaBufferBase* buffer) {
     ALOGV("signalBufferReturned");
     Mutex::Autolock autoLock(mQuickStopLock);
     if (mQuickStop && (buffer == mLastReadBufferCopy)) {
@@ -180,9 +180,9 @@
 }
 
 void createMediaBufferCopy(
-        const MediaBuffer& sourceBuffer,
+        const MediaBufferBase& sourceBuffer,
         int64_t frameTime,
-        MediaBuffer **newBuffer) {
+        MediaBufferBase **newBuffer) {
 
     ALOGV("createMediaBufferCopy");
     size_t sourceSize = sourceBuffer.size();
@@ -194,7 +194,7 @@
     (*newBuffer)->meta_data()->setInt64(kKeyTime, frameTime);
 }
 
-void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBuffer& sourceBuffer) {
+void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBufferBase& sourceBuffer) {
     ALOGV("fillLastReadBufferCopy");
     int64_t frameTime;
     CHECK(sourceBuffer.meta_data()->findInt64(kKeyTime, &frameTime));
@@ -204,7 +204,7 @@
 }
 
 status_t CameraSourceTimeLapse::read(
-        MediaBuffer **buffer, const ReadOptions *options) {
+        MediaBufferBase **buffer, const ReadOptions *options) {
     ALOGV("read");
     if (mLastReadBufferCopy == NULL) {
         mLastReadStatus = CameraSource::read(buffer, options);
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index b529940..6f88c0e 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -230,7 +230,7 @@
             }
             codecBuffer = inputBuffers[inputIndex];
 
-            MediaBuffer *mediaBuffer = NULL;
+            MediaBufferBase *mediaBuffer = NULL;
 
             err = mSource->read(&mediaBuffer, &options);
             options.clearSeekTo();
diff --git a/media/libstagefright/JPEGSource.cpp b/media/libstagefright/JPEGSource.cpp
index ee3aedb..10eb2d2 100644
--- a/media/libstagefright/JPEGSource.cpp
+++ b/media/libstagefright/JPEGSource.cpp
@@ -21,6 +21,7 @@
 #include <media/DataSource.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/JPEGSource.h>
+#include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaBufferGroup.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
@@ -108,7 +109,7 @@
 }
 
 status_t JPEGSource::read(
-        MediaBuffer **out, const ReadOptions *options) {
+        MediaBufferBase **out, const ReadOptions *options) {
     *out = NULL;
 
     int64_t seekTimeUs;
@@ -117,7 +118,7 @@
         return UNKNOWN_ERROR;
     }
 
-    MediaBuffer *buffer;
+    MediaBufferBase *buffer;
     mGroup->acquire_buffer(&buffer);
 
     ssize_t n = mSource->readAt(mOffset, buffer->data(), mSize - mOffset);
diff --git a/media/libstagefright/MPEG2TSWriter.cpp b/media/libstagefright/MPEG2TSWriter.cpp
index 4c85b0d..cdcd657 100644
--- a/media/libstagefright/MPEG2TSWriter.cpp
+++ b/media/libstagefright/MPEG2TSWriter.cpp
@@ -85,8 +85,8 @@
 
     void extractCodecSpecificData();
 
-    void appendAACFrames(MediaBuffer *buffer);
-    void appendAVCFrame(MediaBuffer *buffer);
+    void appendAACFrames(MediaBufferBase *buffer);
+    void appendAVCFrame(MediaBufferBase *buffer);
 
     DISALLOW_EVIL_CONSTRUCTORS(SourceInfo);
 };
@@ -249,7 +249,7 @@
     notify->post();
 }
 
-void MPEG2TSWriter::SourceInfo::appendAVCFrame(MediaBuffer *buffer) {
+void MPEG2TSWriter::SourceInfo::appendAVCFrame(MediaBufferBase *buffer) {
     sp<AMessage> notify = mNotify->dup();
     notify->setInt32("what", kNotifyBuffer);
 
@@ -279,7 +279,7 @@
     notify->post();
 }
 
-void MPEG2TSWriter::SourceInfo::appendAACFrames(MediaBuffer *buffer) {
+void MPEG2TSWriter::SourceInfo::appendAACFrames(MediaBufferBase *buffer) {
     sp<AMessage> notify = mNotify->dup();
     notify->setInt32("what", kNotifyBuffer);
 
@@ -368,7 +368,7 @@
 
         case kWhatRead:
         {
-            MediaBuffer *buffer;
+            MediaBufferBase *buffer;
             status_t err = mSource->read(&buffer);
 
             if (err != OK && err != INFO_FORMAT_CHANGED) {
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 8db00f0..387cb13 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -2809,7 +2809,7 @@
     sp<MetaData> meta_data;
 
     status_t err = OK;
-    MediaBuffer *buffer;
+    MediaBufferBase *buffer;
     const char *trackName = getTrackType();
     while (!mDone && (err = mSource->read(&buffer)) == OK) {
         if (buffer->range_length() == 0) {
diff --git a/media/libstagefright/MediaAdapter.cpp b/media/libstagefright/MediaAdapter.cpp
index 74eb1ff..f1b6e8c 100644
--- a/media/libstagefright/MediaAdapter.cpp
+++ b/media/libstagefright/MediaAdapter.cpp
@@ -72,7 +72,7 @@
     return mOutputFormat;
 }
 
-void MediaAdapter::signalBufferReturned(MediaBuffer *buffer) {
+void MediaAdapter::signalBufferReturned(MediaBufferBase *buffer) {
     Mutex::Autolock autoLock(mAdapterLock);
     CHECK(buffer != NULL);
     buffer->setObserver(0);
@@ -82,7 +82,7 @@
 }
 
 status_t MediaAdapter::read(
-            MediaBuffer **buffer, const ReadOptions * /* options */) {
+            MediaBufferBase **buffer, const ReadOptions * /* options */) {
     Mutex::Autolock autoLock(mAdapterLock);
     if (!mStarted) {
         ALOGV("Read before even started!");
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
index 04d83af..08331ad 100644
--- a/media/libstagefright/MediaCodecSource.cpp
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -59,7 +59,7 @@
     void pause();
     void resume();
     status_t setStopTimeUs(int64_t stopTimeUs);
-    bool readBuffer(MediaBuffer **buffer);
+    bool readBuffer(MediaBufferBase **buffer);
 
 protected:
     virtual void onMessageReceived(const sp<AMessage> &msg);
@@ -86,14 +86,14 @@
         int64_t mReadPendingSince;
         bool mPaused;
         bool mPulling;
-        Vector<MediaBuffer *> mReadBuffers;
+        Vector<MediaBufferBase *> mReadBuffers;
 
         void flush();
         // if queue is empty, return false and set *|buffer| to NULL . Otherwise, pop
         // buffer from front of the queue, place it into *|buffer| and return true.
-        bool readBuffer(MediaBuffer **buffer);
+        bool readBuffer(MediaBufferBase **buffer);
         // add a buffer to the back of the queue
-        void pushBuffer(MediaBuffer *mbuf);
+        void pushBuffer(MediaBufferBase *mbuf);
     };
     Mutexed<Queue> mQueue;
 
@@ -123,11 +123,11 @@
     mLooper->stop();
 }
 
-void MediaCodecSource::Puller::Queue::pushBuffer(MediaBuffer *mbuf) {
+void MediaCodecSource::Puller::Queue::pushBuffer(MediaBufferBase *mbuf) {
     mReadBuffers.push_back(mbuf);
 }
 
-bool MediaCodecSource::Puller::Queue::readBuffer(MediaBuffer **mbuf) {
+bool MediaCodecSource::Puller::Queue::readBuffer(MediaBufferBase **mbuf) {
     if (mReadBuffers.empty()) {
         *mbuf = NULL;
         return false;
@@ -138,14 +138,14 @@
 }
 
 void MediaCodecSource::Puller::Queue::flush() {
-    MediaBuffer *mbuf;
+    MediaBufferBase *mbuf;
     while (readBuffer(&mbuf)) {
         // there are no null buffers in the queue
         mbuf->release();
     }
 }
 
-bool MediaCodecSource::Puller::readBuffer(MediaBuffer **mbuf) {
+bool MediaCodecSource::Puller::readBuffer(MediaBufferBase **mbuf) {
     Mutexed<Queue>::Locked queue(mQueue);
     return queue->readBuffer(mbuf);
 }
@@ -298,7 +298,7 @@
             }
 
             queue.unlock();
-            MediaBuffer *mbuf = NULL;
+            MediaBufferBase *mbuf = NULL;
             status_t err = mSource->read(&mbuf);
             queue.lock();
 
@@ -413,7 +413,7 @@
 }
 
 status_t MediaCodecSource::read(
-        MediaBuffer** buffer, const ReadOptions* /* options */) {
+        MediaBufferBase** buffer, const ReadOptions* /* options */) {
     Mutexed<Output>::Locked output(mOutput);
 
     *buffer = NULL;
@@ -428,7 +428,7 @@
     return output->mErrorCode;
 }
 
-void MediaCodecSource::signalBufferReturned(MediaBuffer *buffer) {
+void MediaCodecSource::signalBufferReturned(MediaBufferBase *buffer) {
     buffer->setObserver(0);
     buffer->release();
 }
@@ -636,7 +636,7 @@
         if (!reachedEOS) {
             ALOGV("encoder (%s) reached EOS", mIsVideo ? "video" : "audio");
             // release all unread media buffers
-            for (List<MediaBuffer*>::iterator it = output->mBufferQueue.begin();
+            for (List<MediaBufferBase*>::iterator it = output->mBufferQueue.begin();
                     it != output->mBufferQueue.end(); it++) {
                 (*it)->release();
             }
@@ -682,7 +682,7 @@
 }
 
 status_t MediaCodecSource::feedEncoderInputBuffers() {
-    MediaBuffer* mbuf = NULL;
+    MediaBufferBase* mbuf = NULL;
     while (!mAvailEncoderInputIndices.empty() && mPuller->readBuffer(&mbuf)) {
         size_t bufferIndex = *mAvailEncoderInputIndices.begin();
         mAvailEncoderInputIndices.erase(mAvailEncoderInputIndices.begin());
@@ -906,7 +906,7 @@
                 break;
             }
 
-            MediaBuffer *mbuf = new MediaBuffer(outbuf->size());
+            MediaBufferBase *mbuf = new MediaBuffer(outbuf->size());
             mbuf->setObserver(this);
             mbuf->add_ref();
 
diff --git a/media/libstagefright/NuMediaExtractor.cpp b/media/libstagefright/NuMediaExtractor.cpp
index d96f7e0..c6cbb2f 100644
--- a/media/libstagefright/NuMediaExtractor.cpp
+++ b/media/libstagefright/NuMediaExtractor.cpp
@@ -45,7 +45,7 @@
       mSampleTimeUs(-1ll) {
 }
 
-NuMediaExtractor::Sample::Sample(MediaBuffer *buffer, int64_t timeUs)
+NuMediaExtractor::Sample::Sample(MediaBufferBase *buffer, int64_t timeUs)
     : mBuffer(buffer),
       mSampleTimeUs(timeUs) {
 }
@@ -488,12 +488,12 @@
     }
 
     status_t err = OK;
-    Vector<MediaBuffer *> mediaBuffers;
+    Vector<MediaBufferBase *> mediaBuffers;
     if (info->mSource->supportReadMultiple()) {
         options.setNonBlocking();
         err = info->mSource->readMultiple(&mediaBuffers, info->mMaxFetchCount, &options);
     } else {
-        MediaBuffer *mbuf = NULL;
+        MediaBufferBase *mbuf = NULL;
         err = info->mSource->read(&mbuf, &options);
         if (err == OK && mbuf != NULL) {
             mediaBuffers.push_back(mbuf);
@@ -505,7 +505,7 @@
         ALOGW("read on track %zu failed with error %d", info->mTrackIndex, err);
         size_t count = mediaBuffers.size();
         for (size_t id = 0; id < count; ++id) {
-            MediaBuffer *mbuf = mediaBuffers[id];
+            MediaBufferBase *mbuf = mediaBuffers[id];
             if (mbuf != NULL) {
                 mbuf->release();
             }
@@ -517,7 +517,7 @@
     bool releaseRemaining = false;
     for (size_t id = 0; id < count; ++id) {
         int64_t timeUs;
-        MediaBuffer *mbuf = mediaBuffers[id];
+        MediaBufferBase *mbuf = mediaBuffers[id];
         if (mbuf == NULL) {
             continue;
         }
@@ -565,7 +565,8 @@
     return OK;
 }
 
-status_t NuMediaExtractor::appendVorbisNumPageSamples(MediaBuffer *mbuf, const sp<ABuffer> &buffer) {
+status_t NuMediaExtractor::appendVorbisNumPageSamples(
+        MediaBufferBase *mbuf, const sp<ABuffer> &buffer) {
     int32_t numPageSamples;
     if (!mbuf->meta_data()->findInt32(
             kKeyValidSamples, &numPageSamples)) {
diff --git a/media/libstagefright/RemoteMediaExtractor.cpp b/media/libstagefright/RemoteMediaExtractor.cpp
index 5bb0953..7efb91c 100644
--- a/media/libstagefright/RemoteMediaExtractor.cpp
+++ b/media/libstagefright/RemoteMediaExtractor.cpp
@@ -121,14 +121,6 @@
     return mExtractor->flags();
 }
 
-char* RemoteMediaExtractor::getDrmTrackInfo(size_t trackID, int * len) {
-    return mExtractor->getDrmTrackInfo(trackID, len);
-}
-
-void RemoteMediaExtractor::setUID(uid_t uid) {
-    return mExtractor->setUID(uid);
-}
-
 status_t RemoteMediaExtractor::setMediaCas(const HInterfaceToken &casToken) {
     return mExtractor->setMediaCas((uint8_t*)casToken.data(), casToken.size());
 }
diff --git a/media/libstagefright/RemoteMediaSource.cpp b/media/libstagefright/RemoteMediaSource.cpp
index 6b48ce8..d038454 100644
--- a/media/libstagefright/RemoteMediaSource.cpp
+++ b/media/libstagefright/RemoteMediaSource.cpp
@@ -45,7 +45,8 @@
     return mSource->getFormat();
 }
 
-status_t RemoteMediaSource::read(MediaBuffer **buffer, const MediaSource::ReadOptions *options) {
+status_t RemoteMediaSource::read(
+        MediaBufferBase **buffer, const MediaSource::ReadOptions *options) {
     return mSource->read(buffer, reinterpret_cast<const MediaSource::ReadOptions*>(options));
 }
 
diff --git a/media/libstagefright/SimpleDecodingSource.cpp b/media/libstagefright/SimpleDecodingSource.cpp
index 9b2fb4f..f93a0b7 100644
--- a/media/libstagefright/SimpleDecodingSource.cpp
+++ b/media/libstagefright/SimpleDecodingSource.cpp
@@ -200,7 +200,7 @@
 }
 
 status_t SimpleDecodingSource::read(
-        MediaBuffer **buffer, const ReadOptions *options) {
+        MediaBufferBase **buffer, const ReadOptions *options) {
     *buffer = NULL;
 
     Mutexed<ProtectedState>::Locked me(mProtectedState);
@@ -221,7 +221,7 @@
 }
 
 status_t SimpleDecodingSource::doRead(
-        Mutexed<ProtectedState>::Locked &me, MediaBuffer **buffer, const ReadOptions *options) {
+        Mutexed<ProtectedState>::Locked &me, MediaBufferBase **buffer, const ReadOptions *options) {
     // |me| is always locked on entry, but is allowed to be unlocked on exit
     CHECK_EQ(me->mState, STARTED);
 
@@ -267,7 +267,7 @@
                 return UNKNOWN_ERROR;
             }
 
-            MediaBuffer *in_buf;
+            MediaBufferBase *in_buf;
             while (true) {
                 in_buf = NULL;
                 me.unlock();
diff --git a/media/libstagefright/SurfaceMediaSource.cpp b/media/libstagefright/SurfaceMediaSource.cpp
deleted file mode 100644
index d14e86b..0000000
--- a/media/libstagefright/SurfaceMediaSource.cpp
+++ /dev/null
@@ -1,477 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-//#define LOG_NDEBUG 0
-#define LOG_TAG "SurfaceMediaSource"
-
-#include <inttypes.h>
-
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/SurfaceMediaSource.h>
-#include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MetaData.h>
-#include <OMX_IVCommon.h>
-#include <media/hardware/HardwareAPI.h>
-#include <media/hardware/MetadataBufferType.h>
-
-#include <ui/GraphicBuffer.h>
-#include <gui/BufferItem.h>
-#include <gui/ISurfaceComposer.h>
-#include <OMX_Component.h>
-
-#include <utils/Log.h>
-#include <utils/String8.h>
-
-#include <private/gui/ComposerService.h>
-
-namespace android {
-
-SurfaceMediaSource::SurfaceMediaSource(uint32_t bufferWidth, uint32_t bufferHeight) :
-    mWidth(bufferWidth),
-    mHeight(bufferHeight),
-    mCurrentSlot(BufferQueue::INVALID_BUFFER_SLOT),
-    mNumPendingBuffers(0),
-    mCurrentTimestamp(0),
-    mFrameRate(30),
-    mStarted(false),
-    mNumFramesReceived(0),
-    mNumFramesEncoded(0),
-    mFirstFrameTimestamp(0),
-    mMaxAcquiredBufferCount(4),  // XXX double-check the default
-    mUseAbsoluteTimestamps(false) {
-    ALOGV("SurfaceMediaSource");
-
-    if (bufferWidth == 0 || bufferHeight == 0) {
-        ALOGE("Invalid dimensions %dx%d", bufferWidth, bufferHeight);
-    }
-
-    BufferQueue::createBufferQueue(&mProducer, &mConsumer);
-    mConsumer->setDefaultBufferSize(bufferWidth, bufferHeight);
-    mConsumer->setConsumerUsageBits(GRALLOC_USAGE_HW_VIDEO_ENCODER |
-            GRALLOC_USAGE_HW_TEXTURE);
-
-    sp<ISurfaceComposer> composer(ComposerService::getComposerService());
-
-    // Note that we can't create an sp<...>(this) in a ctor that will not keep a
-    // reference once the ctor ends, as that would cause the refcount of 'this'
-    // dropping to 0 at the end of the ctor.  Since all we need is a wp<...>
-    // that's what we create.
-    wp<ConsumerListener> listener = static_cast<ConsumerListener*>(this);
-    sp<BufferQueue::ProxyConsumerListener> proxy = new BufferQueue::ProxyConsumerListener(listener);
-
-    status_t err = mConsumer->consumerConnect(proxy, false);
-    if (err != NO_ERROR) {
-        ALOGE("SurfaceMediaSource: error connecting to BufferQueue: %s (%d)",
-                strerror(-err), err);
-    }
-}
-
-SurfaceMediaSource::~SurfaceMediaSource() {
-    ALOGV("~SurfaceMediaSource");
-    CHECK(!mStarted);
-}
-
-nsecs_t SurfaceMediaSource::getTimestamp() {
-    ALOGV("getTimestamp");
-    Mutex::Autolock lock(mMutex);
-    return mCurrentTimestamp;
-}
-
-void SurfaceMediaSource::setFrameAvailableListener(
-        const sp<FrameAvailableListener>& listener) {
-    ALOGV("setFrameAvailableListener");
-    Mutex::Autolock lock(mMutex);
-    mFrameAvailableListener = listener;
-}
-
-void SurfaceMediaSource::dumpState(String8& result) const
-{
-    char buffer[1024];
-    dumpState(result, "", buffer, 1024);
-}
-
-void SurfaceMediaSource::dumpState(
-        String8& result,
-        const char* /* prefix */,
-        char* buffer,
-        size_t /* SIZE */) const
-{
-    Mutex::Autolock lock(mMutex);
-
-    result.append(buffer);
-    mConsumer->dumpState(result, "");
-}
-
-status_t SurfaceMediaSource::setFrameRate(int32_t fps)
-{
-    ALOGV("setFrameRate");
-    Mutex::Autolock lock(mMutex);
-    const int MAX_FRAME_RATE = 60;
-    if (fps < 0 || fps > MAX_FRAME_RATE) {
-        return BAD_VALUE;
-    }
-    mFrameRate = fps;
-    return OK;
-}
-
-MetadataBufferType SurfaceMediaSource::metaDataStoredInVideoBuffers() const {
-    ALOGV("isMetaDataStoredInVideoBuffers");
-    return kMetadataBufferTypeANWBuffer;
-}
-
-int32_t SurfaceMediaSource::getFrameRate( ) const {
-    ALOGV("getFrameRate");
-    Mutex::Autolock lock(mMutex);
-    return mFrameRate;
-}
-
-status_t SurfaceMediaSource::start(MetaData *params)
-{
-    ALOGV("start");
-
-    Mutex::Autolock lock(mMutex);
-
-    CHECK(!mStarted);
-
-    mStartTimeNs = 0;
-    int64_t startTimeUs;
-    int32_t bufferCount = 0;
-    if (params) {
-        if (params->findInt64(kKeyTime, &startTimeUs)) {
-            mStartTimeNs = startTimeUs * 1000;
-        }
-
-        if (!params->findInt32(kKeyNumBuffers, &bufferCount)) {
-            ALOGE("Failed to find the advertised buffer count");
-            return UNKNOWN_ERROR;
-        }
-
-        if (bufferCount <= 1) {
-            ALOGE("bufferCount %d is too small", bufferCount);
-            return BAD_VALUE;
-        }
-
-        mMaxAcquiredBufferCount = bufferCount;
-    }
-
-    CHECK_GT(mMaxAcquiredBufferCount, 1u);
-
-    status_t err =
-        mConsumer->setMaxAcquiredBufferCount(mMaxAcquiredBufferCount);
-
-    if (err != OK) {
-        return err;
-    }
-
-    mNumPendingBuffers = 0;
-    mStarted = true;
-
-    return OK;
-}
-
-status_t SurfaceMediaSource::setMaxAcquiredBufferCount(size_t count) {
-    ALOGV("setMaxAcquiredBufferCount(%zu)", count);
-    Mutex::Autolock lock(mMutex);
-
-    CHECK_GT(count, 1u);
-    mMaxAcquiredBufferCount = count;
-
-    return OK;
-}
-
-status_t SurfaceMediaSource::setUseAbsoluteTimestamps() {
-    ALOGV("setUseAbsoluteTimestamps");
-    Mutex::Autolock lock(mMutex);
-    mUseAbsoluteTimestamps = true;
-
-    return OK;
-}
-
-status_t SurfaceMediaSource::stop()
-{
-    ALOGV("stop");
-    Mutex::Autolock lock(mMutex);
-
-    if (!mStarted) {
-        return OK;
-    }
-
-    mStarted = false;
-    mFrameAvailableCondition.signal();
-
-    while (mNumPendingBuffers > 0) {
-        ALOGI("Still waiting for %zu buffers to be returned.",
-                mNumPendingBuffers);
-
-#if DEBUG_PENDING_BUFFERS
-        for (size_t i = 0; i < mPendingBuffers.size(); ++i) {
-            ALOGI("%d: %p", i, mPendingBuffers.itemAt(i));
-        }
-#endif
-
-        mMediaBuffersAvailableCondition.wait(mMutex);
-    }
-
-    mMediaBuffersAvailableCondition.signal();
-
-    return mConsumer->consumerDisconnect();
-}
-
-sp<MetaData> SurfaceMediaSource::getFormat()
-{
-    ALOGV("getFormat");
-
-    Mutex::Autolock lock(mMutex);
-    sp<MetaData> meta = new MetaData;
-
-    meta->setInt32(kKeyWidth, mWidth);
-    meta->setInt32(kKeyHeight, mHeight);
-    // The encoder format is set as an opaque colorformat
-    // The encoder will later find out the actual colorformat
-    // from the GL Frames itself.
-    meta->setInt32(kKeyColorFormat, OMX_COLOR_FormatAndroidOpaque);
-    meta->setInt32(kKeyStride, mWidth);
-    meta->setInt32(kKeySliceHeight, mHeight);
-    meta->setInt32(kKeyFrameRate, mFrameRate);
-    meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
-    return meta;
-}
-
-// Pass the data to the MediaBuffer. Pass in only the metadata
-// Note: Call only when you have the lock
-void SurfaceMediaSource::passMetadataBuffer_l(MediaBuffer **buffer,
-        ANativeWindowBuffer *bufferHandle) const {
-    *buffer = new MediaBuffer(sizeof(VideoNativeMetadata));
-    VideoNativeMetadata *data = (VideoNativeMetadata *)(*buffer)->data();
-    if (data == NULL) {
-        ALOGE("Cannot allocate memory for metadata buffer!");
-        return;
-    }
-    data->eType = metaDataStoredInVideoBuffers();
-    data->pBuffer = bufferHandle;
-    data->nFenceFd = -1;
-    ALOGV("handle = %p, offset = %zu, length = %zu",
-            bufferHandle, (*buffer)->range_length(), (*buffer)->range_offset());
-}
-
-status_t SurfaceMediaSource::read(
-        MediaBuffer **buffer, const ReadOptions * /* options */) {
-    ALOGV("read");
-    Mutex::Autolock lock(mMutex);
-
-    *buffer = NULL;
-
-    while (mStarted && mNumPendingBuffers == mMaxAcquiredBufferCount) {
-        mMediaBuffersAvailableCondition.wait(mMutex);
-    }
-
-    // Update the current buffer info
-    // TODO: mCurrentSlot can be made a bufferstate since there
-    // can be more than one "current" slots.
-
-    BufferItem item;
-    // If the recording has started and the queue is empty, then just
-    // wait here till the frames come in from the client side
-    while (mStarted) {
-
-        status_t err = mConsumer->acquireBuffer(&item, 0);
-        if (err == BufferQueue::NO_BUFFER_AVAILABLE) {
-            // wait for a buffer to be queued
-            mFrameAvailableCondition.wait(mMutex);
-        } else if (err == OK) {
-            err = item.mFence->waitForever("SurfaceMediaSource::read");
-            if (err) {
-                ALOGW("read: failed to wait for buffer fence: %d", err);
-            }
-
-            // First time seeing the buffer?  Added it to the SMS slot
-            if (item.mGraphicBuffer != NULL) {
-                mSlots[item.mSlot].mGraphicBuffer = item.mGraphicBuffer;
-            }
-            mSlots[item.mSlot].mFrameNumber = item.mFrameNumber;
-
-            // check for the timing of this buffer
-            if (mNumFramesReceived == 0 && !mUseAbsoluteTimestamps) {
-                mFirstFrameTimestamp = item.mTimestamp;
-                // Initial delay
-                if (mStartTimeNs > 0) {
-                    if (item.mTimestamp < mStartTimeNs) {
-                        // This frame predates start of record, discard
-                        mConsumer->releaseBuffer(
-                                item.mSlot, item.mFrameNumber, EGL_NO_DISPLAY,
-                                EGL_NO_SYNC_KHR, Fence::NO_FENCE);
-                        continue;
-                    }
-                    mStartTimeNs = item.mTimestamp - mStartTimeNs;
-                }
-            }
-            item.mTimestamp = mStartTimeNs + (item.mTimestamp - mFirstFrameTimestamp);
-
-            mNumFramesReceived++;
-
-            break;
-        } else {
-            ALOGE("read: acquire failed with error code %d", err);
-            return ERROR_END_OF_STREAM;
-        }
-
-    }
-
-    // If the loop was exited as a result of stopping the recording,
-    // it is OK
-    if (!mStarted) {
-        ALOGV("Read: SurfaceMediaSource is stopped. Returning ERROR_END_OF_STREAM.");
-        return ERROR_END_OF_STREAM;
-    }
-
-    mCurrentSlot = item.mSlot;
-
-    // First time seeing the buffer?  Added it to the SMS slot
-    if (item.mGraphicBuffer != NULL) {
-        mSlots[item.mSlot].mGraphicBuffer = item.mGraphicBuffer;
-    }
-    mSlots[item.mSlot].mFrameNumber = item.mFrameNumber;
-
-    mCurrentBuffers.push_back(mSlots[mCurrentSlot].mGraphicBuffer);
-    int64_t prevTimeStamp = mCurrentTimestamp;
-    mCurrentTimestamp = item.mTimestamp;
-
-    mNumFramesEncoded++;
-    // Pass the data to the MediaBuffer. Pass in only the metadata
-
-    passMetadataBuffer_l(buffer, mSlots[mCurrentSlot].mGraphicBuffer->getNativeBuffer());
-
-    (*buffer)->setObserver(this);
-    (*buffer)->add_ref();
-    (*buffer)->meta_data()->setInt64(kKeyTime, mCurrentTimestamp / 1000);
-    ALOGV("Frames encoded = %d, timestamp = %" PRId64 ", time diff = %" PRId64,
-            mNumFramesEncoded, mCurrentTimestamp / 1000,
-            mCurrentTimestamp / 1000 - prevTimeStamp / 1000);
-
-    ++mNumPendingBuffers;
-
-#if DEBUG_PENDING_BUFFERS
-    mPendingBuffers.push_back(*buffer);
-#endif
-
-    ALOGV("returning mbuf %p", *buffer);
-
-    return OK;
-}
-
-static buffer_handle_t getMediaBufferHandle(MediaBuffer *buffer) {
-    // need to convert to char* for pointer arithmetic and then
-    // copy the byte stream into our handle
-    buffer_handle_t bufferHandle;
-    memcpy(&bufferHandle, (char*)(buffer->data()) + 4, sizeof(buffer_handle_t));
-    return bufferHandle;
-}
-
-void SurfaceMediaSource::signalBufferReturned(MediaBuffer *buffer) {
-    ALOGV("signalBufferReturned");
-
-    bool foundBuffer = false;
-
-    Mutex::Autolock lock(mMutex);
-
-    buffer_handle_t bufferHandle = getMediaBufferHandle(buffer);
-
-    for (size_t i = 0; i < mCurrentBuffers.size(); i++) {
-        if (mCurrentBuffers[i]->handle == bufferHandle) {
-            mCurrentBuffers.removeAt(i);
-            foundBuffer = true;
-            break;
-        }
-    }
-
-    if (!foundBuffer) {
-        ALOGW("returned buffer was not found in the current buffer list");
-    }
-
-    for (int id = 0; id < BufferQueue::NUM_BUFFER_SLOTS; id++) {
-        if (mSlots[id].mGraphicBuffer == NULL) {
-            continue;
-        }
-
-        if (bufferHandle == mSlots[id].mGraphicBuffer->handle) {
-            ALOGV("Slot %d returned, matches handle = %p", id,
-                    mSlots[id].mGraphicBuffer->handle);
-
-            mConsumer->releaseBuffer(id, mSlots[id].mFrameNumber,
-                                        EGL_NO_DISPLAY, EGL_NO_SYNC_KHR,
-                    Fence::NO_FENCE);
-
-            buffer->setObserver(0);
-            buffer->release();
-
-            foundBuffer = true;
-            break;
-        }
-    }
-
-    if (!foundBuffer) {
-        CHECK(!"signalBufferReturned: bogus buffer");
-    }
-
-#if DEBUG_PENDING_BUFFERS
-    for (size_t i = 0; i < mPendingBuffers.size(); ++i) {
-        if (mPendingBuffers.itemAt(i) == buffer) {
-            mPendingBuffers.removeAt(i);
-            break;
-        }
-    }
-#endif
-
-    --mNumPendingBuffers;
-    mMediaBuffersAvailableCondition.broadcast();
-}
-
-// Part of the BufferQueue::ConsumerListener
-void SurfaceMediaSource::onFrameAvailable(const BufferItem& /* item */) {
-    ALOGV("onFrameAvailable");
-
-    sp<FrameAvailableListener> listener;
-    { // scope for the lock
-        Mutex::Autolock lock(mMutex);
-        mFrameAvailableCondition.broadcast();
-        listener = mFrameAvailableListener;
-    }
-
-    if (listener != NULL) {
-        ALOGV("actually calling onFrameAvailable");
-        listener->onFrameAvailable();
-    }
-}
-
-// SurfaceMediaSource hijacks this event to assume
-// the prodcuer is disconnecting from the BufferQueue
-// and that it should stop the recording
-void SurfaceMediaSource::onBuffersReleased() {
-    ALOGV("onBuffersReleased");
-
-    Mutex::Autolock lock(mMutex);
-
-    mFrameAvailableCondition.signal();
-
-    for (int i = 0; i < BufferQueue::NUM_BUFFER_SLOTS; i++) {
-       mSlots[i].mGraphicBuffer = 0;
-    }
-}
-
-void SurfaceMediaSource::onSidebandStreamChanged() {
-    ALOG_ASSERT(false, "SurfaceMediaSource can't consume sideband streams");
-}
-
-} // end of namespace android
diff --git a/media/libstagefright/codecs/cmds/codec2.cpp b/media/libstagefright/codecs/cmds/codec2.cpp
index d95bb07..8022b84 100644
--- a/media/libstagefright/codecs/cmds/codec2.cpp
+++ b/media/libstagefright/codecs/cmds/codec2.cpp
@@ -295,7 +295,7 @@
         size_t size = 0u;
         void *data = nullptr;
         int64_t timestamp = 0u;
-        MediaBuffer *buffer = nullptr;
+        MediaBufferBase *buffer = nullptr;
         sp<ABuffer> csd;
         if (csd0 != nullptr) {
             csd = csd0;
diff --git a/media/libstagefright/include/media/stagefright/AudioPlayer.h b/media/libstagefright/include/media/stagefright/AudioPlayer.h
index e971762..7c2c36f 100644
--- a/media/libstagefright/include/media/stagefright/AudioPlayer.h
+++ b/media/libstagefright/include/media/stagefright/AudioPlayer.h
@@ -69,7 +69,7 @@
     sp<MediaSource> mSource;
     sp<AudioTrack> mAudioTrack;
 
-    MediaBuffer *mInputBuffer;
+    MediaBufferBase *mInputBuffer;
 
     int mSampleRate;
     int64_t mLatencyUs;
@@ -91,7 +91,7 @@
 
     bool mIsFirstBuffer;
     status_t mFirstBufferResult;
-    MediaBuffer *mFirstBuffer;
+    MediaBufferBase *mFirstBuffer;
 
     sp<MediaPlayerBase::AudioSink> mAudioSink;
 
diff --git a/media/libstagefright/include/media/stagefright/AudioSource.h b/media/libstagefright/include/media/stagefright/AudioSource.h
index f66b92d..206d322 100644
--- a/media/libstagefright/include/media/stagefright/AudioSource.h
+++ b/media/libstagefright/include/media/stagefright/AudioSource.h
@@ -56,11 +56,11 @@
     int16_t getMaxAmplitude();
 
     virtual status_t read(
-            MediaBuffer **buffer, const ReadOptions *options = NULL);
+            MediaBufferBase **buffer, const ReadOptions *options = NULL);
     virtual status_t setStopTimeUs(int64_t stopTimeUs);
 
     status_t dataCallback(const AudioRecord::Buffer& buffer);
-    virtual void signalBufferReturned(MediaBuffer *buffer);
+    virtual void signalBufferReturned(MediaBufferBase *buffer);
 
     status_t setInputDevice(audio_port_handle_t deviceId);
     status_t getRoutedDeviceId(audio_port_handle_t* deviceId);
diff --git a/media/libstagefright/include/media/stagefright/CallbackMediaSource.h b/media/libstagefright/include/media/stagefright/CallbackMediaSource.h
index 944d951..33453fa 100644
--- a/media/libstagefright/include/media/stagefright/CallbackMediaSource.h
+++ b/media/libstagefright/include/media/stagefright/CallbackMediaSource.h
@@ -33,7 +33,7 @@
     virtual status_t stop();
     virtual sp<MetaData> getFormat();
     virtual status_t read(
-            MediaBuffer **buffer, const ReadOptions *options = NULL);
+            MediaBufferBase **buffer, const ReadOptions *options = NULL);
     virtual status_t pause();
 
 private:
diff --git a/media/libstagefright/include/media/stagefright/CameraSource.h b/media/libstagefright/include/media/stagefright/CameraSource.h
index 945e1be..475976b 100644
--- a/media/libstagefright/include/media/stagefright/CameraSource.h
+++ b/media/libstagefright/include/media/stagefright/CameraSource.h
@@ -97,7 +97,7 @@
     virtual status_t start(MetaData *params = NULL);
     virtual status_t stop() { return reset(); }
     virtual status_t read(
-            MediaBuffer **buffer, const ReadOptions *options = NULL);
+            MediaBufferBase **buffer, const ReadOptions *options = NULL);
     virtual status_t setStopTimeUs(int64_t stopTimeUs);
 
     /**
@@ -127,7 +127,7 @@
      */
     MetadataBufferType metaDataStoredInVideoBuffers() const;
 
-    virtual void signalBufferReturned(MediaBuffer* buffer);
+    virtual void signalBufferReturned(MediaBufferBase* buffer);
 
 protected:
 
diff --git a/media/libstagefright/include/media/stagefright/CameraSourceTimeLapse.h b/media/libstagefright/include/media/stagefright/CameraSourceTimeLapse.h
index b066f9a..533e33b 100644
--- a/media/libstagefright/include/media/stagefright/CameraSourceTimeLapse.h
+++ b/media/libstagefright/include/media/stagefright/CameraSourceTimeLapse.h
@@ -107,7 +107,7 @@
 
     // Stores a copy of the MediaBuffer read in the last read() call after
     // mQuickStop was true.
-    MediaBuffer* mLastReadBufferCopy;
+    MediaBufferBase* mLastReadBufferCopy;
 
     // Status code for last read.
     status_t mLastReadStatus;
@@ -128,10 +128,10 @@
     // Wrapper over CameraSource::signalBufferReturned() to implement quick stop.
     // It only handles the case when mLastReadBufferCopy is signalled. Otherwise
     // it calls the base class' function.
-    virtual void signalBufferReturned(MediaBuffer* buffer);
+    virtual void signalBufferReturned(MediaBufferBase* buffer);
 
     // Wrapper over CameraSource::read() to implement quick stop.
-    virtual status_t read(MediaBuffer **buffer, const ReadOptions *options = NULL);
+    virtual status_t read(MediaBufferBase **buffer, const ReadOptions *options = NULL);
 
     // mSkipCurrentFrame is set to true in dataCallbackTimestamp() if the current
     // frame needs to be skipped and this function just returns the value of mSkipCurrentFrame.
@@ -170,7 +170,7 @@
 
     // Convenience function to fill mLastReadBufferCopy from the just read
     // buffer.
-    void fillLastReadBufferCopy(MediaBuffer& sourceBuffer);
+    void fillLastReadBufferCopy(MediaBufferBase& sourceBuffer);
 
     // If the passed in size (width x height) is a supported video/preview size,
     // the function sets the camera's video/preview size to it and returns true.
diff --git a/media/libstagefright/include/media/stagefright/JPEGSource.h b/media/libstagefright/include/media/stagefright/JPEGSource.h
index 9fcbfc2..8ab3d11 100644
--- a/media/libstagefright/include/media/stagefright/JPEGSource.h
+++ b/media/libstagefright/include/media/stagefright/JPEGSource.h
@@ -33,7 +33,7 @@
     virtual sp<MetaData> getFormat();
 
     virtual status_t read(
-            MediaBuffer **buffer, const ReadOptions *options = NULL);
+            MediaBufferBase **buffer, const ReadOptions *options = NULL);
 
 protected:
     virtual ~JPEGSource();
diff --git a/media/libstagefright/include/media/stagefright/MediaAdapter.h b/media/libstagefright/include/media/stagefright/MediaAdapter.h
index 4b47160..589c827 100644
--- a/media/libstagefright/include/media/stagefright/MediaAdapter.h
+++ b/media/libstagefright/include/media/stagefright/MediaAdapter.h
@@ -40,13 +40,13 @@
     virtual status_t stop();
     virtual sp<MetaData> getFormat();
     virtual status_t read(
-            MediaBuffer **buffer, const ReadOptions *options = NULL);
+            MediaBufferBase **buffer, const ReadOptions *options = NULL);
 
     /////////////////////////////////////////////////
     // Inherited functions from MediaBufferObserver
     /////////////////////////////////////////////////
 
-    virtual void signalBufferReturned(MediaBuffer *buffer);
+    virtual void signalBufferReturned(MediaBufferBase *buffer);
 
     /////////////////////////////////////////////////
     // Non-inherited functions:
diff --git a/media/libstagefright/include/media/stagefright/MediaBufferBase.h b/media/libstagefright/include/media/stagefright/MediaBufferBase.h
new file mode 120000
index 0000000..80e49b0
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/MediaBufferBase.h
@@ -0,0 +1 @@
+../../../../libmediaextractor/include/media/stagefright/MediaBufferBase.h
\ No newline at end of file
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecSource.h b/media/libstagefright/include/media/stagefright/MediaCodecSource.h
index eec115e..a68cc19 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecSource.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecSource.h
@@ -57,13 +57,13 @@
     virtual status_t pause(MetaData *params);
     virtual sp<MetaData> getFormat();
     virtual status_t read(
-            MediaBuffer **buffer,
+            MediaBufferBase **buffer,
             const ReadOptions *options = NULL);
     virtual status_t setStopTimeUs(int64_t stopTimeUs);
 
 
     // MediaBufferObserver
-    virtual void signalBufferReturned(MediaBuffer *buffer);
+    virtual void signalBufferReturned(MediaBufferBase *buffer);
 
     // for AHandlerReflector
     void onMessageReceived(const sp<AMessage> &msg);
@@ -136,7 +136,7 @@
     sp<AMessage> mEncoderActivityNotify;
     sp<IGraphicBufferProducer> mGraphicBufferProducer;
     sp<PersistentSurface> mPersistentSurface;
-    List<MediaBuffer *> mInputBufferQueue;
+    List<MediaBufferBase *> mInputBufferQueue;
     List<size_t> mAvailEncoderInputIndices;
     List<int64_t> mDecodingTimeQueue; // decoding time (us) for video
     int64_t mInputBufferTimeOffsetUs;
@@ -149,7 +149,7 @@
 
     struct Output {
         Output();
-        List<MediaBuffer*> mBufferQueue;
+        List<MediaBufferBase*> mBufferQueue;
         bool mEncoderReachedEOS;
         status_t mErrorCode;
         Condition mCond;
diff --git a/media/libstagefright/include/media/stagefright/NuMediaExtractor.h b/media/libstagefright/include/media/stagefright/NuMediaExtractor.h
index 6a2e39b..5e5ef6e 100644
--- a/media/libstagefright/include/media/stagefright/NuMediaExtractor.h
+++ b/media/libstagefright/include/media/stagefright/NuMediaExtractor.h
@@ -107,8 +107,8 @@
 
     struct Sample {
         Sample();
-        Sample(MediaBuffer *buffer, int64_t timeUs);
-        MediaBuffer *mBuffer;
+        Sample(MediaBufferBase *buffer, int64_t timeUs);
+        MediaBufferBase *mBuffer;
         int64_t mSampleTimeUs;
     };
 
@@ -150,7 +150,7 @@
 
     bool getTotalBitrate(int64_t *bitRate) const;
     status_t updateDurationAndBitrate();
-    status_t appendVorbisNumPageSamples(MediaBuffer *mbuf, const sp<ABuffer> &buffer);
+    status_t appendVorbisNumPageSamples(MediaBufferBase *mbuf, const sp<ABuffer> &buffer);
 
     DISALLOW_EVIL_CONSTRUCTORS(NuMediaExtractor);
 };
diff --git a/media/libstagefright/include/media/stagefright/RemoteMediaExtractor.h b/media/libstagefright/include/media/stagefright/RemoteMediaExtractor.h
index 22a8210..509e669 100644
--- a/media/libstagefright/include/media/stagefright/RemoteMediaExtractor.h
+++ b/media/libstagefright/include/media/stagefright/RemoteMediaExtractor.h
@@ -40,8 +40,6 @@
     virtual sp<MetaData> getMetaData();
     virtual status_t getMetrics(Parcel *reply);
     virtual uint32_t flags() const;
-    virtual char* getDrmTrackInfo(size_t trackID, int * len);
-    virtual void setUID(uid_t uid);
     virtual status_t setMediaCas(const HInterfaceToken &casToken);
     virtual const char * name();
 
diff --git a/media/libstagefright/include/media/stagefright/RemoteMediaSource.h b/media/libstagefright/include/media/stagefright/RemoteMediaSource.h
index d1afa6a..a9bf820 100644
--- a/media/libstagefright/include/media/stagefright/RemoteMediaSource.h
+++ b/media/libstagefright/include/media/stagefright/RemoteMediaSource.h
@@ -35,7 +35,7 @@
     virtual status_t stop();
     virtual sp<MetaData> getFormat();
     virtual status_t read(
-            MediaBuffer **buffer,
+            MediaBufferBase **buffer,
             const MediaSource::ReadOptions *options = NULL);
     virtual status_t pause();
     virtual status_t setStopTimeUs(int64_t stopTimeUs);
diff --git a/media/libstagefright/include/media/stagefright/SimpleDecodingSource.h b/media/libstagefright/include/media/stagefright/SimpleDecodingSource.h
index 3006b45..23defb4 100644
--- a/media/libstagefright/include/media/stagefright/SimpleDecodingSource.h
+++ b/media/libstagefright/include/media/stagefright/SimpleDecodingSource.h
@@ -65,7 +65,7 @@
     virtual sp<MetaData> getFormat();
 
     // reads from the source. This call always blocks.
-    virtual status_t read(MediaBuffer **buffer, const ReadOptions *options);
+    virtual status_t read(MediaBufferBase **buffer, const ReadOptions *options);
 
     // unsupported methods
     virtual status_t pause() { return INVALID_OPERATION; }
@@ -104,7 +104,8 @@
 
     // do the actual reading
     status_t doRead(
-            Mutexed<ProtectedState>::Locked &me, MediaBuffer **buffer, const ReadOptions *options);
+            Mutexed<ProtectedState>::Locked &me, MediaBufferBase **buffer,
+            const ReadOptions *options);
 };
 
 } // namespace android
diff --git a/media/libstagefright/include/media/stagefright/SurfaceMediaSource.h b/media/libstagefright/include/media/stagefright/SurfaceMediaSource.h
deleted file mode 100644
index 2e495f9..0000000
--- a/media/libstagefright/include/media/stagefright/SurfaceMediaSource.h
+++ /dev/null
@@ -1,248 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_GUI_SURFACEMEDIASOURCE_H
-#define ANDROID_GUI_SURFACEMEDIASOURCE_H
-
-#include <gui/IGraphicBufferProducer.h>
-#include <gui/BufferQueue.h>
-
-#include <utils/threads.h>
-#include <utils/Vector.h>
-#include <media/MediaSource.h>
-#include <media/stagefright/MediaBuffer.h>
-
-#include <media/hardware/MetadataBufferType.h>
-
-#include "foundation/ABase.h"
-
-namespace android {
-// ----------------------------------------------------------------------------
-
-class String8;
-class GraphicBuffer;
-
-// ASSUMPTIONS
-// 1. SurfaceMediaSource is initialized with width*height which
-// can never change.  However, deqeueue buffer does not currently
-// enforce this as in BufferQueue, dequeue can be used by Surface
-// which can modify the default width and heght.  Also neither the width
-// nor height can be 0.
-// 2. setSynchronousMode is never used (basically no one should call
-// setSynchronousMode(false)
-// 3. setCrop, setTransform, setScalingMode should never be used
-// 4. queueBuffer returns a filled buffer to the SurfaceMediaSource. In addition, a
-// timestamp must be provided for the buffer. The timestamp is in
-// nanoseconds, and must be monotonically increasing. Its other semantics
-// (zero point, etc) are client-dependent and should be documented by the
-// client.
-// 5. Once disconnected, SurfaceMediaSource can be reused (can not
-// connect again)
-// 6. Stop is a hard stop, the last few frames held by the encoder
-// may be dropped.  It is possible to wait for the buffers to be
-// returned (but not implemented)
-
-#define DEBUG_PENDING_BUFFERS   0
-
-class SurfaceMediaSource : public MediaSource,
-                                public MediaBufferObserver,
-                                protected ConsumerListener {
-public:
-    enum { MIN_UNDEQUEUED_BUFFERS = 4};
-
-    struct FrameAvailableListener : public virtual RefBase {
-        // onFrameAvailable() is called from queueBuffer() is the FIFO is
-        // empty. You can use SurfaceMediaSource::getQueuedCount() to
-        // figure out if there are more frames waiting.
-        // This is called without any lock held can be called concurrently by
-        // multiple threads.
-        virtual void onFrameAvailable() = 0;
-    };
-
-    SurfaceMediaSource(uint32_t bufferWidth, uint32_t bufferHeight);
-
-    virtual ~SurfaceMediaSource();
-
-    // For the MediaSource interface for use by StageFrightRecorder:
-    virtual status_t start(MetaData *params = NULL);
-    virtual status_t stop();
-    virtual status_t read(MediaBuffer **buffer,
-            const ReadOptions *options = NULL);
-    virtual sp<MetaData> getFormat();
-
-    // Get / Set the frame rate used for encoding. Default fps = 30
-    status_t setFrameRate(int32_t fps) ;
-    int32_t getFrameRate( ) const;
-
-    // The call for the StageFrightRecorder to tell us that
-    // it is done using the MediaBuffer data so that its state
-    // can be set to FREE for dequeuing
-    virtual void signalBufferReturned(MediaBuffer* buffer);
-    // end of MediaSource interface
-
-    // getTimestamp retrieves the timestamp associated with the image
-    // set by the most recent call to read()
-    //
-    // The timestamp is in nanoseconds, and is monotonically increasing. Its
-    // other semantics (zero point, etc) are source-dependent and should be
-    // documented by the source.
-    int64_t getTimestamp();
-
-    // setFrameAvailableListener sets the listener object that will be notified
-    // when a new frame becomes available.
-    void setFrameAvailableListener(const sp<FrameAvailableListener>& listener);
-
-    // dump our state in a String
-    void dumpState(String8& result) const;
-    void dumpState(String8& result, const char* prefix, char* buffer,
-                                                    size_t SIZE) const;
-
-    // metaDataStoredInVideoBuffers tells the encoder what kind of metadata
-    // is passed through the buffers. Currently, it is set to ANWBuffer
-    MetadataBufferType metaDataStoredInVideoBuffers() const;
-
-    sp<IGraphicBufferProducer> getProducer() const { return mProducer; }
-
-    // To be called before start()
-    status_t setMaxAcquiredBufferCount(size_t count);
-
-    // To be called before start()
-    status_t setUseAbsoluteTimestamps();
-
-protected:
-
-    // Implementation of the BufferQueue::ConsumerListener interface.  These
-    // calls are used to notify the Surface of asynchronous events in the
-    // BufferQueue.
-    virtual void onFrameAvailable(const BufferItem& item);
-
-    // Used as a hook to BufferQueue::disconnect()
-    // This is called by the client side when it is done
-    // TODO: Currently, this also sets mStopped to true which
-    // is needed for unblocking the encoder which might be
-    // waiting to read more frames. So if on the client side,
-    // the same thread supplies the frames and also calls stop
-    // on the encoder, the client has to call disconnect before
-    // it calls stop.
-    // In the case of the camera,
-    // that need not be required since the thread supplying the
-    // frames is separate than the one calling stop.
-    virtual void onBuffersReleased();
-
-    // SurfaceMediaSource can't handle sideband streams, so this is not expected
-    // to ever be called. Does nothing.
-    virtual void onSidebandStreamChanged();
-
-    static bool isExternalFormat(uint32_t format);
-
-private:
-    // A BufferQueue, represented by these interfaces, is the exchange point
-    // between the producer and this consumer
-    sp<IGraphicBufferProducer> mProducer;
-    sp<IGraphicBufferConsumer> mConsumer;
-
-    struct SlotData {
-        sp<GraphicBuffer> mGraphicBuffer;
-        uint64_t mFrameNumber;
-    };
-
-    // mSlots caches GraphicBuffers and frameNumbers from the buffer queue
-    SlotData mSlots[BufferQueue::NUM_BUFFER_SLOTS];
-
-    // The permenent width and height of SMS buffers
-    int mWidth;
-    int mHeight;
-
-    // mCurrentSlot is the buffer slot index of the buffer that is currently
-    // being used by buffer consumer
-    // (e.g. StageFrightRecorder in the case of SurfaceMediaSource or GLTexture
-    // in the case of Surface).
-    // It is initialized to INVALID_BUFFER_SLOT,
-    // indicating that no buffer slot is currently bound to the texture. Note,
-    // however, that a value of INVALID_BUFFER_SLOT does not necessarily mean
-    // that no buffer is bound to the texture. A call to setBufferCount will
-    // reset mCurrentTexture to INVALID_BUFFER_SLOT.
-    int mCurrentSlot;
-
-    // mCurrentBuffers is a list of the graphic buffers that are being used by
-    // buffer consumer (i.e. the video encoder). It's possible that these
-    // buffers are not associated with any buffer slots, so we must track them
-    // separately.  Buffers are added to this list in read, and removed from
-    // this list in signalBufferReturned
-    Vector<sp<GraphicBuffer> > mCurrentBuffers;
-
-    size_t mNumPendingBuffers;
-
-#if DEBUG_PENDING_BUFFERS
-    Vector<MediaBuffer *> mPendingBuffers;
-#endif
-
-    // mCurrentTimestamp is the timestamp for the current texture. It
-    // gets set to mLastQueuedTimestamp each time updateTexImage is called.
-    int64_t mCurrentTimestamp;
-
-    // mFrameAvailableListener is the listener object that will be called when a
-    // new frame becomes available. If it is not NULL it will be called from
-    // queueBuffer.
-    sp<FrameAvailableListener> mFrameAvailableListener;
-
-    // mMutex is the mutex used to prevent concurrent access to the member
-    // variables of SurfaceMediaSource objects. It must be locked whenever the
-    // member variables are accessed.
-    mutable Mutex mMutex;
-
-    ////////////////////////// For MediaSource
-    // Set to a default of 30 fps if not specified by the client side
-    int32_t mFrameRate;
-
-    // mStarted is a flag to check if the recording is going on
-    bool mStarted;
-
-    // mNumFramesReceived indicates the number of frames recieved from
-    // the client side
-    int mNumFramesReceived;
-    // mNumFramesEncoded indicates the number of frames passed on to the
-    // encoder
-    int mNumFramesEncoded;
-
-    // mFirstFrameTimestamp is the timestamp of the first received frame.
-    // It is used to offset the output timestamps so recording starts at time 0.
-    int64_t mFirstFrameTimestamp;
-    // mStartTimeNs is the start time passed into the source at start, used to
-    // offset timestamps.
-    int64_t mStartTimeNs;
-
-    size_t mMaxAcquiredBufferCount;
-
-    bool mUseAbsoluteTimestamps;
-
-    // mFrameAvailableCondition condition used to indicate whether there
-    // is a frame available for dequeuing
-    Condition mFrameAvailableCondition;
-
-    Condition mMediaBuffersAvailableCondition;
-
-    // Allocate and return a new MediaBuffer and pass the ANW buffer as metadata into it.
-    void passMetadataBuffer_l(MediaBuffer **buffer, ANativeWindowBuffer *bufferHandle) const;
-
-    // Avoid copying and equating and default constructor
-    DISALLOW_EVIL_CONSTRUCTORS(SurfaceMediaSource);
-};
-
-// ----------------------------------------------------------------------------
-}; // namespace android
-
-#endif // ANDROID_GUI_SURFACEMEDIASOURCE_H
diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
index 1dac171..d0b17e0 100644
--- a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
+++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
@@ -163,7 +163,7 @@
 }
 
 status_t AnotherPacketSource::read(
-        MediaBuffer **out, const ReadOptions *) {
+        MediaBufferBase **out, const ReadOptions *) {
     *out = NULL;
 
     Mutex::Autolock autoLock(mLock);
@@ -202,7 +202,7 @@
             seg.mMaxDequeTimeUs = timeUs;
         }
 
-        MediaBuffer *mediaBuffer = new MediaBuffer(buffer);
+        MediaBufferBase *mediaBuffer = new MediaBuffer(buffer);
         sp<MetaData> bufmeta = mediaBuffer->meta_data();
 
         bufmeta->setInt64(kKeyTime, timeUs);
diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.h b/media/libstagefright/mpeg2ts/AnotherPacketSource.h
index 3abd573..f4a6acb 100644
--- a/media/libstagefright/mpeg2ts/AnotherPacketSource.h
+++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.h
@@ -39,7 +39,7 @@
     virtual sp<MetaData> getFormat();
 
     virtual status_t read(
-            MediaBuffer **buffer, const ReadOptions *options = NULL);
+            MediaBufferBase **buffer, const ReadOptions *options = NULL);
 
     void clear();
 
diff --git a/media/libstagefright/omx/tests/OMXHarness.cpp b/media/libstagefright/omx/tests/OMXHarness.cpp
index 86c7211..3d9c791 100644
--- a/media/libstagefright/omx/tests/OMXHarness.cpp
+++ b/media/libstagefright/omx/tests/OMXHarness.cpp
@@ -658,7 +658,7 @@
                      requestedSeekTimeUs, requestedSeekTimeUs / 1E6);
             }
 
-            MediaBuffer *buffer = NULL;
+            MediaBufferBase *buffer = NULL;
             options.setSeekTo(
                     requestedSeekTimeUs, MediaSource::ReadOptions::SEEK_NEXT_SYNC);
 
@@ -679,7 +679,7 @@
         }
 
         status_t err;
-        MediaBuffer *buffer;
+        MediaBufferBase *buffer;
         for (;;) {
             err = codec->read(&buffer, &options);
             options.clearSeekTo();
diff --git a/media/libstagefright/rtsp/ARTPWriter.cpp b/media/libstagefright/rtsp/ARTPWriter.cpp
index 8604b69..4ce8a0c 100644
--- a/media/libstagefright/rtsp/ARTPWriter.cpp
+++ b/media/libstagefright/rtsp/ARTPWriter.cpp
@@ -173,7 +173,7 @@
     return OK;
 }
 
-static void StripStartcode(MediaBuffer *buffer) {
+static void StripStartcode(MediaBufferBase *buffer) {
     if (buffer->range_length() < 4) {
         return;
     }
@@ -195,7 +195,7 @@
 
 #if 0
             if (mMode == H264) {
-                MediaBuffer *buffer;
+                MediaBufferBase *buffer;
                 CHECK_EQ(mSource->read(&buffer), (status_t)OK);
 
                 StripStartcode(buffer);
@@ -265,7 +265,7 @@
 }
 
 void ARTPWriter::onRead(const sp<AMessage> &msg) {
-    MediaBuffer *mediaBuf;
+    MediaBufferBase *mediaBuf;
     status_t err = mSource->read(&mediaBuf);
 
     if (err != OK) {
@@ -523,7 +523,7 @@
     ALOGI("%s", sdp.c_str());
 }
 
-void ARTPWriter::makeH264SPropParamSets(MediaBuffer *buffer) {
+void ARTPWriter::makeH264SPropParamSets(MediaBufferBase *buffer) {
     static const char kStartCode[] = "\x00\x00\x00\x01";
 
     const uint8_t *data =
@@ -567,7 +567,7 @@
     send(buffer, true /* isRTCP */);
 }
 
-void ARTPWriter::sendAVCData(MediaBuffer *mediaBuf) {
+void ARTPWriter::sendAVCData(MediaBufferBase *mediaBuf) {
     // 12 bytes RTP header + 2 bytes for the FU-indicator and FU-header.
     CHECK_GE(kMaxPacketSize, 12u + 2u);
 
@@ -663,7 +663,7 @@
     mLastNTPTime = GetNowNTP();
 }
 
-void ARTPWriter::sendH263Data(MediaBuffer *mediaBuf) {
+void ARTPWriter::sendH263Data(MediaBufferBase *mediaBuf) {
     CHECK_GE(kMaxPacketSize, 12u + 2u);
 
     int64_t timeUs;
@@ -741,7 +741,7 @@
     return frameSize;
 }
 
-void ARTPWriter::sendAMRData(MediaBuffer *mediaBuf) {
+void ARTPWriter::sendAMRData(MediaBufferBase *mediaBuf) {
     const uint8_t *mediaData =
         (const uint8_t *)mediaBuf->data() + mediaBuf->range_offset();
 
diff --git a/media/libstagefright/rtsp/ARTPWriter.h b/media/libstagefright/rtsp/ARTPWriter.h
index 92a64f2..2f13486 100644
--- a/media/libstagefright/rtsp/ARTPWriter.h
+++ b/media/libstagefright/rtsp/ARTPWriter.h
@@ -110,13 +110,13 @@
     void addSR(const sp<ABuffer> &buffer);
     void addSDES(const sp<ABuffer> &buffer);
 
-    void makeH264SPropParamSets(MediaBuffer *buffer);
+    void makeH264SPropParamSets(MediaBufferBase *buffer);
     void dumpSessionDesc();
 
     void sendBye();
-    void sendAVCData(MediaBuffer *mediaBuf);
-    void sendH263Data(MediaBuffer *mediaBuf);
-    void sendAMRData(MediaBuffer *mediaBuf);
+    void sendAVCData(MediaBufferBase *mediaBuf);
+    void sendH263Data(MediaBufferBase *mediaBuf);
+    void sendAMRData(MediaBufferBase *mediaBuf);
 
     void send(const sp<ABuffer> &buffer, bool isRTCP);
 
diff --git a/media/libstagefright/tests/Android.bp b/media/libstagefright/tests/Android.bp
index e67a949..be10fdc 100644
--- a/media/libstagefright/tests/Android.bp
+++ b/media/libstagefright/tests/Android.bp
@@ -1,46 +1,6 @@
 // Build the unit tests.
 
 cc_test {
-    name: "SurfaceMediaSource_test",
-
-    srcs: [
-        "SurfaceMediaSource_test.cpp",
-        "DummyRecorder.cpp",
-    ],
-
-    shared_libs: [
-        "libEGL",
-        "libGLESv2",
-        "libbinder",
-        "libcutils",
-        "libgui",
-        "libmedia",
-        "libmediaextractor",
-        "libstagefright",
-        "libstagefright_foundation",
-        "libstagefright_omx",
-        "libsync",
-        "libui",
-        "libutils",
-        "liblog",
-    ],
-
-    include_dirs: [
-        "frameworks/av/media/libstagefright",
-        "frameworks/av/media/libstagefright/include",
-        "frameworks/native/include/media/openmax",
-        "frameworks/native/include/media/hardware",
-    ],
-
-    cflags: [
-        "-Werror",
-        "-Wall",
-    ],
-
-    compile_multilib: "32",
-}
-
-cc_test {
     name: "MediaCodecListOverrides_test",
 
     srcs: ["MediaCodecListOverrides_test.cpp"],
diff --git a/media/libstagefright/tests/DummyRecorder.cpp b/media/libstagefright/tests/DummyRecorder.cpp
deleted file mode 100644
index 4f560cb..0000000
--- a/media/libstagefright/tests/DummyRecorder.cpp
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "DummyRecorder"
-// #define LOG_NDEBUG 0
-
-#include <media/MediaSource.h>
-#include <media/stagefright/MediaBuffer.h>
-#include "DummyRecorder.h"
-
-#include <utils/Log.h>
-
-namespace android {
-
-// static
-void *DummyRecorder::threadWrapper(void *pthis) {
-    ALOGV("ThreadWrapper: %p", pthis);
-    DummyRecorder *writer = static_cast<DummyRecorder *>(pthis);
-    writer->readFromSource();
-    return NULL;
-}
-
-
-status_t DummyRecorder::start() {
-    ALOGV("Start");
-    mStarted = true;
-
-    mSource->start();
-
-    pthread_attr_t attr;
-    pthread_attr_init(&attr);
-    pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
-    int err = pthread_create(&mThread, &attr, threadWrapper, this);
-    pthread_attr_destroy(&attr);
-
-    if (err) {
-        ALOGE("Error creating thread!");
-        return -ENODEV;
-    }
-    return OK;
-}
-
-
-status_t DummyRecorder::stop() {
-    ALOGV("Stop");
-    mStarted = false;
-
-    mSource->stop();
-    void *dummy;
-    pthread_join(mThread, &dummy);
-    status_t err = static_cast<status_t>(reinterpret_cast<uintptr_t>(dummy));
-
-    ALOGV("Ending the reading thread");
-    return err;
-}
-
-// pretend to read the source buffers
-void DummyRecorder::readFromSource() {
-    ALOGV("ReadFromSource");
-    if (!mStarted) {
-        return;
-    }
-
-    status_t err = OK;
-    MediaBuffer *buffer;
-    ALOGV("A fake writer accessing the frames");
-    while (mStarted && (err = mSource->read(&buffer)) == OK){
-        // if not getting a valid buffer from source, then exit
-        if (buffer == NULL) {
-            return;
-        }
-        buffer->release();
-        buffer = NULL;
-    }
-}
-
-
-} // end of namespace android
diff --git a/media/libstagefright/tests/DummyRecorder.h b/media/libstagefright/tests/DummyRecorder.h
deleted file mode 100644
index 0759777..0000000
--- a/media/libstagefright/tests/DummyRecorder.h
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef DUMMY_RECORDER_H_
-#define DUMMY_RECORDER_H_
-
-#include <pthread.h>
-#include <utils/String8.h>
-#include <media/stagefright/foundation/ABase.h>
-
-
-namespace android {
-
-struct MediaSource;
-class MediaBuffer;
-
-class DummyRecorder {
-    public:
-    // The media source from which this will receive frames
-    sp<MediaSource> mSource;
-    bool mStarted;
-    pthread_t mThread;
-
-    status_t start();
-    status_t stop();
-
-    // actual entry point for the thread
-    void readFromSource();
-
-    // static function to wrap the actual thread entry point
-    static void *threadWrapper(void *pthis);
-
-    explicit DummyRecorder(const sp<MediaSource> &source) : mSource(source)
-                                                    , mStarted(false) {}
-    ~DummyRecorder( ) {}
-
-    private:
-
-    DISALLOW_EVIL_CONSTRUCTORS(DummyRecorder);
-};
-
-} // end of namespace android
-#endif
-
-
diff --git a/media/libstagefright/tests/SurfaceMediaSource_test.cpp b/media/libstagefright/tests/SurfaceMediaSource_test.cpp
deleted file mode 100644
index 051108f..0000000
--- a/media/libstagefright/tests/SurfaceMediaSource_test.cpp
+++ /dev/null
@@ -1,944 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "SurfaceMediaSource_test"
-
-#include <gtest/gtest.h>
-#include <utils/String8.h>
-#include <utils/String16.h>
-#include <utils/Errors.h>
-#include <fcntl.h>
-#include <unistd.h>
-
-#include <GLES2/gl2.h>
-
-#include <media/stagefright/SurfaceMediaSource.h>
-#include <media/mediarecorder.h>
-
-#include <ui/GraphicBuffer.h>
-#include <gui/Surface.h>
-#include <gui/ISurfaceComposer.h>
-#include <gui/Surface.h>
-#include <gui/SurfaceComposerClient.h>
-
-#include <binder/ProcessState.h>
-
-#include <media/stagefright/foundation/ADebug.h>
-#include <OMX_Component.h>
-
-#include "DummyRecorder.h"
-
-
-namespace android {
-
-class GLTest : public ::testing::Test {
-protected:
-
-    GLTest():
-            mEglDisplay(EGL_NO_DISPLAY),
-            mEglSurface(EGL_NO_SURFACE),
-            mEglContext(EGL_NO_CONTEXT) {
-    }
-
-    virtual void SetUp() {
-        ALOGV("GLTest::SetUp()");
-        mEglDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY);
-        ASSERT_EQ(EGL_SUCCESS, eglGetError());
-        ASSERT_NE(EGL_NO_DISPLAY, mEglDisplay);
-
-        EGLint majorVersion;
-        EGLint minorVersion;
-        EXPECT_TRUE(eglInitialize(mEglDisplay, &majorVersion, &minorVersion));
-        ASSERT_EQ(EGL_SUCCESS, eglGetError());
-        RecordProperty("EglVersionMajor", majorVersion);
-        RecordProperty("EglVersionMajor", minorVersion);
-
-        EGLint numConfigs = 0;
-        EXPECT_TRUE(eglChooseConfig(mEglDisplay, getConfigAttribs(), &mGlConfig,
-                1, &numConfigs));
-        ASSERT_EQ(EGL_SUCCESS, eglGetError());
-
-        char* displaySecsEnv = getenv("GLTEST_DISPLAY_SECS");
-        if (displaySecsEnv != NULL) {
-            mDisplaySecs = atoi(displaySecsEnv);
-            if (mDisplaySecs < 0) {
-                mDisplaySecs = 0;
-            }
-        } else {
-            mDisplaySecs = 0;
-        }
-
-        if (mDisplaySecs > 0) {
-            mComposerClient = new SurfaceComposerClient;
-            ASSERT_EQ(NO_ERROR, mComposerClient->initCheck());
-
-            mSurfaceControl = mComposerClient->createSurface(
-                    String8("Test Surface"),
-                    getSurfaceWidth(), getSurfaceHeight(),
-                    PIXEL_FORMAT_RGB_888, 0);
-
-            ASSERT_TRUE(mSurfaceControl != NULL);
-            ASSERT_TRUE(mSurfaceControl->isValid());
-
-            SurfaceComposerClient::Transaction{}
-                    .setLayer(mSurfaceControl, 0x7FFFFFFF)
-                    .show(mSurfaceControl)
-                    .apply();
-
-            sp<ANativeWindow> window = mSurfaceControl->getSurface();
-            mEglSurface = eglCreateWindowSurface(mEglDisplay, mGlConfig,
-                    window.get(), NULL);
-        } else {
-            ALOGV("No actual display. Choosing EGLSurface based on SurfaceMediaSource");
-            sp<IGraphicBufferProducer> sms = (new SurfaceMediaSource(
-                    getSurfaceWidth(), getSurfaceHeight()))->getProducer();
-            sp<Surface> stc = new Surface(sms);
-            sp<ANativeWindow> window = stc;
-
-            mEglSurface = eglCreateWindowSurface(mEglDisplay, mGlConfig,
-                    window.get(), NULL);
-        }
-        ASSERT_EQ(EGL_SUCCESS, eglGetError());
-        ASSERT_NE(EGL_NO_SURFACE, mEglSurface);
-
-        mEglContext = eglCreateContext(mEglDisplay, mGlConfig, EGL_NO_CONTEXT,
-                getContextAttribs());
-        ASSERT_EQ(EGL_SUCCESS, eglGetError());
-        ASSERT_NE(EGL_NO_CONTEXT, mEglContext);
-
-        EXPECT_TRUE(eglMakeCurrent(mEglDisplay, mEglSurface, mEglSurface,
-                mEglContext));
-        ASSERT_EQ(EGL_SUCCESS, eglGetError());
-
-        EGLint w, h;
-        EXPECT_TRUE(eglQuerySurface(mEglDisplay, mEglSurface, EGL_WIDTH, &w));
-        ASSERT_EQ(EGL_SUCCESS, eglGetError());
-        EXPECT_TRUE(eglQuerySurface(mEglDisplay, mEglSurface, EGL_HEIGHT, &h));
-        ASSERT_EQ(EGL_SUCCESS, eglGetError());
-        RecordProperty("EglSurfaceWidth", w);
-        RecordProperty("EglSurfaceHeight", h);
-
-        glViewport(0, 0, w, h);
-        ASSERT_EQ(GLenum(GL_NO_ERROR), glGetError());
-    }
-
-    virtual void TearDown() {
-        // Display the result
-        if (mDisplaySecs > 0 && mEglSurface != EGL_NO_SURFACE) {
-            eglSwapBuffers(mEglDisplay, mEglSurface);
-            sleep(mDisplaySecs);
-        }
-
-        if (mComposerClient != NULL) {
-            mComposerClient->dispose();
-        }
-        if (mEglContext != EGL_NO_CONTEXT) {
-            eglDestroyContext(mEglDisplay, mEglContext);
-        }
-        if (mEglSurface != EGL_NO_SURFACE) {
-            eglDestroySurface(mEglDisplay, mEglSurface);
-        }
-        if (mEglDisplay != EGL_NO_DISPLAY) {
-            eglTerminate(mEglDisplay);
-        }
-        ASSERT_EQ(EGL_SUCCESS, eglGetError());
-    }
-
-    virtual EGLint const* getConfigAttribs() {
-        ALOGV("GLTest getConfigAttribs");
-        static EGLint sDefaultConfigAttribs[] = {
-            EGL_SURFACE_TYPE, EGL_PBUFFER_BIT,
-            EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
-            EGL_RED_SIZE, 8,
-            EGL_GREEN_SIZE, 8,
-            EGL_BLUE_SIZE, 8,
-            EGL_ALPHA_SIZE, 8,
-            EGL_DEPTH_SIZE, 16,
-            EGL_STENCIL_SIZE, 8,
-            EGL_NONE };
-
-        return sDefaultConfigAttribs;
-    }
-
-    virtual EGLint const* getContextAttribs() {
-        static EGLint sDefaultContextAttribs[] = {
-            EGL_CONTEXT_CLIENT_VERSION, 2,
-            EGL_NONE };
-
-        return sDefaultContextAttribs;
-    }
-
-    virtual EGLint getSurfaceWidth() {
-        return 512;
-    }
-
-    virtual EGLint getSurfaceHeight() {
-        return 512;
-    }
-
-    void loadShader(GLenum shaderType, const char* pSource, GLuint* outShader) {
-        GLuint shader = glCreateShader(shaderType);
-        ASSERT_EQ(GLenum(GL_NO_ERROR), glGetError());
-        if (shader) {
-            glShaderSource(shader, 1, &pSource, NULL);
-            ASSERT_EQ(GLenum(GL_NO_ERROR), glGetError());
-            glCompileShader(shader);
-            ASSERT_EQ(GLenum(GL_NO_ERROR), glGetError());
-            GLint compiled = 0;
-            glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
-            ASSERT_EQ(GLenum(GL_NO_ERROR), glGetError());
-            if (!compiled) {
-                GLint infoLen = 0;
-                glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
-                ASSERT_EQ(GLenum(GL_NO_ERROR), glGetError());
-                if (infoLen) {
-                    char* buf = (char*) malloc(infoLen);
-                    if (buf) {
-                        glGetShaderInfoLog(shader, infoLen, NULL, buf);
-                        printf("Shader compile log:\n%s\n", buf);
-                        free(buf);
-                        FAIL();
-                    }
-                } else {
-                    char* buf = (char*) malloc(0x1000);
-                    if (buf) {
-                        glGetShaderInfoLog(shader, 0x1000, NULL, buf);
-                        printf("Shader compile log:\n%s\n", buf);
-                        free(buf);
-                        FAIL();
-                    }
-                }
-                glDeleteShader(shader);
-                shader = 0;
-            }
-        }
-        ASSERT_TRUE(shader != 0);
-        *outShader = shader;
-    }
-
-    void createProgram(const char* pVertexSource, const char* pFragmentSource,
-            GLuint* outPgm) {
-        GLuint vertexShader, fragmentShader;
-        {
-            SCOPED_TRACE("compiling vertex shader");
-            loadShader(GL_VERTEX_SHADER, pVertexSource, &vertexShader);
-            if (HasFatalFailure()) {
-                return;
-            }
-        }
-        {
-            SCOPED_TRACE("compiling fragment shader");
-            loadShader(GL_FRAGMENT_SHADER, pFragmentSource, &fragmentShader);
-            if (HasFatalFailure()) {
-                return;
-            }
-        }
-
-        GLuint program = glCreateProgram();
-        ASSERT_EQ(GLenum(GL_NO_ERROR), glGetError());
-        if (program) {
-            glAttachShader(program, vertexShader);
-            ASSERT_EQ(GLenum(GL_NO_ERROR), glGetError());
-            glAttachShader(program, fragmentShader);
-            ASSERT_EQ(GLenum(GL_NO_ERROR), glGetError());
-            glLinkProgram(program);
-            GLint linkStatus = GL_FALSE;
-            glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
-            if (linkStatus != GL_TRUE) {
-                GLint bufLength = 0;
-                glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength);
-                if (bufLength) {
-                    char* buf = (char*) malloc(bufLength);
-                    if (buf) {
-                        glGetProgramInfoLog(program, bufLength, NULL, buf);
-                        printf("Program link log:\n%s\n", buf);
-                        free(buf);
-                        FAIL();
-                    }
-                }
-                glDeleteProgram(program);
-                program = 0;
-            }
-        }
-        glDeleteShader(vertexShader);
-        glDeleteShader(fragmentShader);
-        ASSERT_TRUE(program != 0);
-        *outPgm = program;
-    }
-
-    static int abs(int value) {
-        return value > 0 ? value : -value;
-    }
-
-    ::testing::AssertionResult checkPixel(int x, int y, int r,
-            int g, int b, int a, int tolerance=2) {
-        GLubyte pixel[4];
-        String8 msg;
-        glReadPixels(x, y, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixel);
-        GLenum err = glGetError();
-        if (err != GL_NO_ERROR) {
-            msg += String8::format("error reading pixel: %#x", err);
-            while ((err = glGetError()) != GL_NO_ERROR) {
-                msg += String8::format(", %#x", err);
-            }
-            fprintf(stderr, "pixel check failure: %s\n", msg.string());
-            return ::testing::AssertionFailure(
-                    ::testing::Message(msg.string()));
-        }
-        if (r >= 0 && abs(r - int(pixel[0])) > tolerance) {
-            msg += String8::format("r(%d isn't %d)", pixel[0], r);
-        }
-        if (g >= 0 && abs(g - int(pixel[1])) > tolerance) {
-            if (!msg.isEmpty()) {
-                msg += " ";
-            }
-            msg += String8::format("g(%d isn't %d)", pixel[1], g);
-        }
-        if (b >= 0 && abs(b - int(pixel[2])) > tolerance) {
-            if (!msg.isEmpty()) {
-                msg += " ";
-            }
-            msg += String8::format("b(%d isn't %d)", pixel[2], b);
-        }
-        if (a >= 0 && abs(a - int(pixel[3])) > tolerance) {
-            if (!msg.isEmpty()) {
-                msg += " ";
-            }
-            msg += String8::format("a(%d isn't %d)", pixel[3], a);
-        }
-        if (!msg.isEmpty()) {
-            fprintf(stderr, "pixel check failure: %s\n", msg.string());
-            return ::testing::AssertionFailure(
-                    ::testing::Message(msg.string()));
-        } else {
-            return ::testing::AssertionSuccess();
-        }
-    }
-
-    int mDisplaySecs;
-    sp<SurfaceComposerClient> mComposerClient;
-    sp<SurfaceControl> mSurfaceControl;
-
-    EGLDisplay mEglDisplay;
-    EGLSurface mEglSurface;
-    EGLContext mEglContext;
-    EGLConfig  mGlConfig;
-};
-
-///////////////////////////////////////////////////////////////////////
-//    Class for  the NON-GL tests
-///////////////////////////////////////////////////////////////////////
-class SurfaceMediaSourceTest : public ::testing::Test {
-public:
-
-    SurfaceMediaSourceTest( ): mYuvTexWidth(176), mYuvTexHeight(144) { }
-    void oneBufferPass(int width, int height );
-    void oneBufferPassNoFill(int width, int height );
-    static void fillYV12Buffer(uint8_t* buf, int w, int h, int stride) ;
-    static void fillYV12BufferRect(uint8_t* buf, int w, int h,
-                        int stride, const android_native_rect_t& rect) ;
-protected:
-
-    virtual void SetUp() {
-        android::ProcessState::self()->startThreadPool();
-        mSMS = new SurfaceMediaSource(mYuvTexWidth, mYuvTexHeight);
-        mSTC = new Surface(mSMS->getProducer());
-        mANW = mSTC;
-    }
-
-    virtual void TearDown() {
-        mSMS.clear();
-        mSTC.clear();
-        mANW.clear();
-    }
-
-    const int mYuvTexWidth;
-    const int mYuvTexHeight;
-
-    sp<SurfaceMediaSource> mSMS;
-    sp<Surface> mSTC;
-    sp<ANativeWindow> mANW;
-};
-
-///////////////////////////////////////////////////////////////////////
-//    Class for  the GL tests
-///////////////////////////////////////////////////////////////////////
-class SurfaceMediaSourceGLTest : public GLTest {
-public:
-
-    SurfaceMediaSourceGLTest( ): mYuvTexWidth(176), mYuvTexHeight(144) { }
-    virtual EGLint const* getConfigAttribs();
-    void oneBufferPassGL(int num = 0);
-    static sp<MediaRecorder> setUpMediaRecorder(int fileDescriptor, int videoSource,
-        int outputFormat, int videoEncoder, int width, int height, int fps);
-protected:
-
-    virtual void SetUp() {
-        ALOGV("SMS-GLTest::SetUp()");
-        android::ProcessState::self()->startThreadPool();
-        mSMS = new SurfaceMediaSource(mYuvTexWidth, mYuvTexHeight);
-        mSTC = new Surface(mSMS->getProducer());
-        mANW = mSTC;
-
-        // Doing the setup related to the GL Side
-        GLTest::SetUp();
-    }
-
-    virtual void TearDown() {
-        mSMS.clear();
-        mSTC.clear();
-        mANW.clear();
-        GLTest::TearDown();
-    }
-
-    void setUpEGLSurfaceFromMediaRecorder(sp<MediaRecorder>& mr);
-
-    const int mYuvTexWidth;
-    const int mYuvTexHeight;
-
-    sp<SurfaceMediaSource> mSMS;
-    sp<Surface> mSTC;
-    sp<ANativeWindow> mANW;
-};
-
-/////////////////////////////////////////////////////////////////////
-// Methods in SurfaceMediaSourceGLTest
-/////////////////////////////////////////////////////////////////////
-EGLint const* SurfaceMediaSourceGLTest::getConfigAttribs() {
-        ALOGV("SurfaceMediaSourceGLTest getConfigAttribs");
-    static EGLint sDefaultConfigAttribs[] = {
-        EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
-        EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
-        EGL_RED_SIZE, 8,
-        EGL_GREEN_SIZE, 8,
-        EGL_BLUE_SIZE, 8,
-        EGL_RECORDABLE_ANDROID, EGL_TRUE,
-        EGL_NONE };
-
-    return sDefaultConfigAttribs;
-}
-
-// One pass of dequeuing and queuing a GLBuffer
-void SurfaceMediaSourceGLTest::oneBufferPassGL(int num) {
-    int d = num % 50;
-    float f = 0.2f; // 0.1f * d;
-
-    glClearColor(0, 0.3, 0, 0.6);
-    glClear(GL_COLOR_BUFFER_BIT);
-
-    glEnable(GL_SCISSOR_TEST);
-    glScissor(4 + d, 4 + d, 4, 4);
-    glClearColor(1.0 - f, f, f, 1.0);
-    glClear(GL_COLOR_BUFFER_BIT);
-
-    glScissor(24 + d, 48 + d, 4, 4);
-    glClearColor(f, 1.0 - f, f, 1.0);
-    glClear(GL_COLOR_BUFFER_BIT);
-
-    glScissor(37 + d, 17 + d, 4, 4);
-    glClearColor(f, f, 1.0 - f, 1.0);
-    glClear(GL_COLOR_BUFFER_BIT);
-
-    // The following call dequeues and queues the buffer
-    eglSwapBuffers(mEglDisplay, mEglSurface);
-    ASSERT_EQ(EGL_SUCCESS, eglGetError());
-    glDisable(GL_SCISSOR_TEST);
-}
-
-// Set up the MediaRecorder which runs in the same process as mediaserver
-sp<MediaRecorder> SurfaceMediaSourceGLTest::setUpMediaRecorder(int fd, int videoSource,
-        int outputFormat, int videoEncoder, int width, int height, int fps) {
-    sp<MediaRecorder> mr = new MediaRecorder(String16());
-    mr->setVideoSource(videoSource);
-    mr->setOutputFormat(outputFormat);
-    mr->setVideoEncoder(videoEncoder);
-    mr->setOutputFile(fd);
-    mr->setVideoSize(width, height);
-    mr->setVideoFrameRate(fps);
-    mr->prepare();
-    ALOGV("Starting MediaRecorder...");
-    CHECK_EQ((status_t)OK, mr->start());
-    return mr;
-}
-
-// query the mediarecorder for a surfacemeidasource and create an egl surface with that
-void SurfaceMediaSourceGLTest::setUpEGLSurfaceFromMediaRecorder(sp<MediaRecorder>& mr) {
-    sp<IGraphicBufferProducer> iST = mr->querySurfaceMediaSourceFromMediaServer();
-    mSTC = new Surface(iST);
-    mANW = mSTC;
-
-    if (mEglSurface != EGL_NO_SURFACE) {
-        EXPECT_TRUE(eglDestroySurface(mEglDisplay, mEglSurface));
-        mEglSurface = EGL_NO_SURFACE;
-    }
-    mEglSurface = eglCreateWindowSurface(mEglDisplay, mGlConfig,
-                                mANW.get(), NULL);
-    ASSERT_EQ(EGL_SUCCESS, eglGetError());
-    ASSERT_NE(EGL_NO_SURFACE, mEglSurface) ;
-
-    EXPECT_TRUE(eglMakeCurrent(mEglDisplay, mEglSurface, mEglSurface,
-            mEglContext));
-    ASSERT_EQ(EGL_SUCCESS, eglGetError());
-}
-
-
-/////////////////////////////////////////////////////////////////////
-// Methods in SurfaceMediaSourceTest
-/////////////////////////////////////////////////////////////////////
-
-// One pass of dequeuing and queuing the buffer. Fill it in with
-// cpu YV12 buffer
-void SurfaceMediaSourceTest::oneBufferPass(int width, int height ) {
-    ANativeWindowBuffer* anb;
-    ASSERT_EQ(NO_ERROR, native_window_dequeue_buffer_and_wait(mANW.get(), &anb));
-    ASSERT_TRUE(anb != NULL);
-
-
-    // Fill the buffer with the a checkerboard pattern
-    uint8_t* img = NULL;
-    sp<GraphicBuffer> buf(GraphicBuffer::from(anb));
-    buf->lock(GRALLOC_USAGE_SW_WRITE_OFTEN, (void**)(&img));
-    SurfaceMediaSourceTest::fillYV12Buffer(img, width, height, buf->getStride());
-    buf->unlock();
-
-    ASSERT_EQ(NO_ERROR, mANW->queueBuffer(mANW.get(), buf->getNativeBuffer(),
-            -1));
-}
-
-// Dequeuing and queuing the buffer without really filling it in.
-void SurfaceMediaSourceTest::oneBufferPassNoFill(
-        int /* width */, int /* height  */) {
-    ANativeWindowBuffer* anb;
-    ASSERT_EQ(NO_ERROR, native_window_dequeue_buffer_and_wait(mANW.get(), &anb));
-    ASSERT_TRUE(anb != NULL);
-
-    // We do not fill the buffer in. Just queue it back.
-    sp<GraphicBuffer> buf(GraphicBuffer::from(anb));
-    ASSERT_EQ(NO_ERROR, mANW->queueBuffer(mANW.get(), buf->getNativeBuffer(),
-            -1));
-}
-
-// Fill a YV12 buffer with a multi-colored checkerboard pattern
-void SurfaceMediaSourceTest::fillYV12Buffer(uint8_t* buf, int w, int h, int stride) {
-    const int blockWidth = w > 16 ? w / 16 : 1;
-    const int blockHeight = h > 16 ? h / 16 : 1;
-    const int yuvTexOffsetY = 0;
-    int yuvTexStrideY = stride;
-    int yuvTexOffsetV = yuvTexStrideY * h;
-    int yuvTexStrideV = (yuvTexStrideY/2 + 0xf) & ~0xf;
-    int yuvTexOffsetU = yuvTexOffsetV + yuvTexStrideV * h/2;
-    int yuvTexStrideU = yuvTexStrideV;
-    for (int x = 0; x < w; x++) {
-        for (int y = 0; y < h; y++) {
-            int parityX = (x / blockWidth) & 1;
-            int parityY = (y / blockHeight) & 1;
-            unsigned char intensity = (parityX ^ parityY) ? 63 : 191;
-            buf[yuvTexOffsetY + (y * yuvTexStrideY) + x] = intensity;
-            if (x < w / 2 && y < h / 2) {
-                buf[yuvTexOffsetU + (y * yuvTexStrideU) + x] = intensity;
-                if (x * 2 < w / 2 && y * 2 < h / 2) {
-                    buf[yuvTexOffsetV + (y*2 * yuvTexStrideV) + x*2 + 0] =
-                    buf[yuvTexOffsetV + (y*2 * yuvTexStrideV) + x*2 + 1] =
-                    buf[yuvTexOffsetV + ((y*2+1) * yuvTexStrideV) + x*2 + 0] =
-                    buf[yuvTexOffsetV + ((y*2+1) * yuvTexStrideV) + x*2 + 1] =
-                        intensity;
-                }
-            }
-        }
-    }
-}
-
-// Fill a YV12 buffer with red outside a given rectangle and green inside it.
-void SurfaceMediaSourceTest::fillYV12BufferRect(uint8_t* buf, int w,
-                  int h, int stride, const android_native_rect_t& rect) {
-    const int yuvTexOffsetY = 0;
-    int yuvTexStrideY = stride;
-    int yuvTexOffsetV = yuvTexStrideY * h;
-    int yuvTexStrideV = (yuvTexStrideY/2 + 0xf) & ~0xf;
-    int yuvTexOffsetU = yuvTexOffsetV + yuvTexStrideV * h/2;
-    int yuvTexStrideU = yuvTexStrideV;
-    for (int x = 0; x < w; x++) {
-        for (int y = 0; y < h; y++) {
-            bool inside = rect.left <= x && x < rect.right &&
-                    rect.top <= y && y < rect.bottom;
-            buf[yuvTexOffsetY + (y * yuvTexStrideY) + x] = inside ? 240 : 64;
-            if (x < w / 2 && y < h / 2) {
-                bool inside = rect.left <= 2*x && 2*x < rect.right &&
-                        rect.top <= 2*y && 2*y < rect.bottom;
-                buf[yuvTexOffsetU + (y * yuvTexStrideU) + x] = 16;
-                buf[yuvTexOffsetV + (y * yuvTexStrideV) + x] =
-                                                inside ? 16 : 255;
-            }
-        }
-    }
-}  ///////// End of class SurfaceMediaSourceTest
-
-///////////////////////////////////////////////////////////////////
-// Class to imitate the recording     /////////////////////////////
-// ////////////////////////////////////////////////////////////////
-struct SimpleDummyRecorder {
-        sp<MediaSource> mSource;
-
-        explicit SimpleDummyRecorder
-                (const sp<MediaSource> &source): mSource(source) {}
-
-        status_t start() { return mSource->start();}
-        status_t stop()  { return mSource->stop();}
-
-        // fakes reading from a media source
-        status_t readFromSource() {
-            MediaBuffer *buffer;
-            status_t err = mSource->read(&buffer);
-            if (err != OK) {
-                return err;
-            }
-            buffer->release();
-            buffer = NULL;
-            return OK;
-        }
-};
-///////////////////////////////////////////////////////////////////
-//           TESTS
-// SurfaceMediaSourceTest class contains tests that fill the buffers
-// using the cpu calls
-// SurfaceMediaSourceGLTest class contains tests that fill the buffers
-// using the GL calls.
-// TODO: None of the tests actually verify the encoded images.. so at this point,
-// these are mostly functionality tests + visual inspection
-//////////////////////////////////////////////////////////////////////
-
-// Just pass one buffer from the native_window to the SurfaceMediaSource
-// Dummy Encoder
-static int testId = 1;
-TEST_F(SurfaceMediaSourceTest, DISABLED_DummyEncodingFromCpuFilledYV12BufferNpotOneBufferPass) {
-    ALOGV("Test # %d", testId++);
-    ALOGV("Testing OneBufferPass ******************************");
-
-    ASSERT_EQ(NO_ERROR, native_window_set_buffers_format(mANW.get(),
-            HAL_PIXEL_FORMAT_YV12));
-    oneBufferPass(mYuvTexWidth, mYuvTexHeight);
-}
-
-// Pass the buffer with the wrong height and weight and should not be accepted
-// Dummy Encoder
-TEST_F(SurfaceMediaSourceTest, DISABLED_DummyEncodingFromCpuFilledYV12BufferNpotWrongSizeBufferPass) {
-    ALOGV("Test # %d", testId++);
-    ALOGV("Testing Wrong size BufferPass ******************************");
-
-    // setting the client side buffer size different than the server size
-    ASSERT_EQ(NO_ERROR, native_window_set_buffers_dimensions(mANW.get(),
-             10, 10));
-    ASSERT_EQ(NO_ERROR, native_window_set_buffers_format(mANW.get(),
-            HAL_PIXEL_FORMAT_YV12));
-
-    ANativeWindowBuffer* anb;
-
-    // Note: make sure we get an ERROR back when dequeuing!
-    ASSERT_NE(NO_ERROR, native_window_dequeue_buffer_and_wait(mANW.get(), &anb));
-}
-
-// pass multiple buffers from the native_window the SurfaceMediaSource
-// Dummy Encoder
-TEST_F(SurfaceMediaSourceTest,  DISABLED_DummyEncodingFromCpuFilledYV12BufferNpotMultiBufferPass) {
-    ALOGV("Test # %d", testId++);
-    ALOGV("Testing MultiBufferPass, Dummy Recorder *********************");
-    ASSERT_EQ(NO_ERROR, native_window_set_buffers_format(mANW.get(),
-            HAL_PIXEL_FORMAT_YV12));
-
-    SimpleDummyRecorder writer(mSMS);
-    writer.start();
-
-    int32_t nFramesCount = 0;
-    while (nFramesCount < 300) {
-        oneBufferPass(mYuvTexWidth, mYuvTexHeight);
-
-        ASSERT_EQ(NO_ERROR, writer.readFromSource());
-
-        nFramesCount++;
-    }
-    writer.stop();
-}
-
-// Delayed pass of multiple buffers from the native_window the SurfaceMediaSource
-// Dummy Encoder
-TEST_F(SurfaceMediaSourceTest,  DummyLagEncodingFromCpuFilledYV12BufferNpotMultiBufferPass) {
-    ALOGV("Test # %d", testId++);
-    ALOGV("Testing MultiBufferPass, Dummy Recorder Lagging **************");
-
-    ASSERT_EQ(NO_ERROR, native_window_set_buffers_format(mANW.get(),
-            HAL_PIXEL_FORMAT_YV12));
-
-    SimpleDummyRecorder writer(mSMS);
-    writer.start();
-
-    int32_t nFramesCount = 1;
-    const int FRAMES_LAG = SurfaceMediaSource::MIN_UNDEQUEUED_BUFFERS;
-
-    while (nFramesCount <= 300) {
-        ALOGV("Frame: %d", nFramesCount);
-        oneBufferPass(mYuvTexWidth, mYuvTexHeight);
-        // Forcing the writer to lag behind a few frames
-        if (nFramesCount > FRAMES_LAG) {
-            ASSERT_EQ(NO_ERROR, writer.readFromSource());
-        }
-        nFramesCount++;
-    }
-    writer.stop();
-}
-
-// pass multiple buffers from the native_window the SurfaceMediaSource
-// A dummy writer (MULTITHREADED) is used to simulate actual MPEG4Writer
-TEST_F(SurfaceMediaSourceTest, DummyThreadedEncodingFromCpuFilledYV12BufferNpotMultiBufferPass) {
-    ALOGV("Test # %d", testId++);
-    ALOGV("Testing MultiBufferPass, Dummy Recorder Multi-Threaded **********");
-    ASSERT_EQ(NO_ERROR, native_window_set_buffers_format(mANW.get(),
-            HAL_PIXEL_FORMAT_YV12));
-
-    DummyRecorder writer(mSMS);
-    writer.start();
-
-    int32_t nFramesCount = 0;
-    while (nFramesCount <= 300) {
-        ALOGV("Frame: %d", nFramesCount);
-        oneBufferPass(mYuvTexWidth, mYuvTexHeight);
-
-        nFramesCount++;
-    }
-    writer.stop();
-}
-
-// Test to examine actual encoding using mediarecorder
-// We use the mediaserver to create a mediarecorder and send
-// it back to us. So SurfaceMediaSource lives in the same process
-// as the mediaserver.
-// Very close to the actual camera, except that the
-// buffers are filled and queueud by the CPU instead of GL.
-TEST_F(SurfaceMediaSourceTest, DISABLED_EncodingFromCpuYV12BufferNpotWriteMediaServer) {
-    ALOGV("Test # %d", testId++);
-    ALOGV("************** Testing the whole pipeline with actual MediaRecorder ***********");
-    ALOGV("************** SurfaceMediaSource is same process as mediaserver    ***********");
-
-    const char *fileName = "/sdcard/outputSurfEncMSource.mp4";
-    int fd = open(fileName, O_RDWR | O_CREAT, 0744);
-    if (fd < 0) {
-        ALOGE("ERROR: Could not open the the file %s, fd = %d !!", fileName, fd);
-    }
-    CHECK(fd >= 0);
-
-    sp<MediaRecorder> mr = SurfaceMediaSourceGLTest::setUpMediaRecorder(fd,
-            VIDEO_SOURCE_SURFACE, OUTPUT_FORMAT_MPEG_4, VIDEO_ENCODER_H264,
-            mYuvTexWidth, mYuvTexHeight, 30);
-    // get the reference to the surfacemediasource living in
-    // mediaserver that is created by stagefrightrecorder
-    sp<IGraphicBufferProducer> iST = mr->querySurfaceMediaSourceFromMediaServer();
-    mSTC = new Surface(iST);
-    mANW = mSTC;
-    ASSERT_EQ(NO_ERROR, native_window_api_connect(mANW.get(), NATIVE_WINDOW_API_CPU));
-    ASSERT_EQ(NO_ERROR, native_window_set_buffers_format(mANW.get(),
-                                                HAL_PIXEL_FORMAT_YV12));
-
-    int32_t nFramesCount = 0;
-    while (nFramesCount <= 300) {
-        oneBufferPassNoFill(mYuvTexWidth, mYuvTexHeight);
-        nFramesCount++;
-        ALOGV("framesCount = %d", nFramesCount);
-    }
-
-    ASSERT_EQ(NO_ERROR, native_window_api_disconnect(mANW.get(), NATIVE_WINDOW_API_CPU));
-    ALOGV("Stopping MediaRecorder...");
-    CHECK_EQ((status_t)OK, mr->stop());
-    mr.clear();
-    close(fd);
-}
-
-//////////////////////////////////////////////////////////////////////
-// GL tests
-/////////////////////////////////////////////////////////////////////
-
-// Test to examine whether we can choose the Recordable Android GLConfig
-// DummyRecorder used- no real encoding here
-TEST_F(SurfaceMediaSourceGLTest, ChooseAndroidRecordableEGLConfigDummyWriter) {
-    ALOGV("Test # %d", testId++);
-    ALOGV("Verify creating a surface w/ right config + dummy writer*********");
-
-    mSMS = new SurfaceMediaSource(mYuvTexWidth, mYuvTexHeight);
-    mSTC = new Surface(mSMS->getProducer());
-    mANW = mSTC;
-
-    DummyRecorder writer(mSMS);
-    writer.start();
-
-    if (mEglSurface != EGL_NO_SURFACE) {
-        EXPECT_TRUE(eglDestroySurface(mEglDisplay, mEglSurface));
-        mEglSurface = EGL_NO_SURFACE;
-    }
-
-    mEglSurface = eglCreateWindowSurface(mEglDisplay, mGlConfig,
-                                mANW.get(), NULL);
-    ASSERT_EQ(EGL_SUCCESS, eglGetError());
-    ASSERT_NE(EGL_NO_SURFACE, mEglSurface) ;
-
-    EXPECT_TRUE(eglMakeCurrent(mEglDisplay, mEglSurface, mEglSurface,
-            mEglContext));
-    ASSERT_EQ(EGL_SUCCESS, eglGetError());
-
-    int32_t nFramesCount = 0;
-    while (nFramesCount <= 300) {
-        oneBufferPassGL();
-        nFramesCount++;
-        ALOGV("framesCount = %d", nFramesCount);
-    }
-
-    EXPECT_TRUE(eglMakeCurrent(mEglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE,
-            EGL_NO_CONTEXT));
-    ASSERT_EQ(EGL_SUCCESS, eglGetError());
-    eglDestroySurface(mEglDisplay, mEglSurface);
-    mEglSurface = EGL_NO_SURFACE;
-
-    writer.stop();
-}
-// Test to examine whether we can render GL buffers in to the surface
-// created with the native window handle
-TEST_F(SurfaceMediaSourceGLTest, RenderingToRecordableEGLSurfaceWorks) {
-    ALOGV("Test # %d", testId++);
-    ALOGV("RenderingToRecordableEGLSurfaceWorks *********************");
-    // Do the producer side of things
-    glClearColor(0.6, 0.6, 0.6, 0.6);
-    glClear(GL_COLOR_BUFFER_BIT);
-
-    glEnable(GL_SCISSOR_TEST);
-    glScissor(4, 4, 4, 4);
-    glClearColor(1.0, 0.0, 0.0, 1.0);
-    glClear(GL_COLOR_BUFFER_BIT);
-
-    glScissor(24, 48, 4, 4);
-    glClearColor(0.0, 1.0, 0.0, 1.0);
-    glClear(GL_COLOR_BUFFER_BIT);
-
-    glScissor(37, 17, 4, 4);
-    glClearColor(0.0, 0.0, 1.0, 1.0);
-    glClear(GL_COLOR_BUFFER_BIT);
-
-    EXPECT_TRUE(checkPixel( 0,  0, 153, 153, 153, 153));
-    EXPECT_TRUE(checkPixel(63,  0, 153, 153, 153, 153));
-    EXPECT_TRUE(checkPixel(63, 63, 153, 153, 153, 153));
-    EXPECT_TRUE(checkPixel( 0, 63, 153, 153, 153, 153));
-
-    EXPECT_TRUE(checkPixel( 4,  7, 255,   0,   0, 255));
-    EXPECT_TRUE(checkPixel(25, 51,   0, 255,   0, 255));
-    EXPECT_TRUE(checkPixel(40, 19,   0,   0, 255, 255));
-    EXPECT_TRUE(checkPixel(29, 51, 153, 153, 153, 153));
-    EXPECT_TRUE(checkPixel( 5, 32, 153, 153, 153, 153));
-    EXPECT_TRUE(checkPixel(13,  8, 153, 153, 153, 153));
-    EXPECT_TRUE(checkPixel(46,  3, 153, 153, 153, 153));
-    EXPECT_TRUE(checkPixel(30, 33, 153, 153, 153, 153));
-    EXPECT_TRUE(checkPixel( 6, 52, 153, 153, 153, 153));
-    EXPECT_TRUE(checkPixel(55, 33, 153, 153, 153, 153));
-    EXPECT_TRUE(checkPixel(16, 29, 153, 153, 153, 153));
-    EXPECT_TRUE(checkPixel( 1, 30, 153, 153, 153, 153));
-    EXPECT_TRUE(checkPixel(41, 37, 153, 153, 153, 153));
-    EXPECT_TRUE(checkPixel(46, 29, 153, 153, 153, 153));
-    EXPECT_TRUE(checkPixel(15, 25, 153, 153, 153, 153));
-    EXPECT_TRUE(checkPixel( 3, 52, 153, 153, 153, 153));
-}
-
-// Test to examine the actual encoding with GL buffers
-// Actual encoder, Actual GL Buffers Filled SurfaceMediaSource
-// The same pattern is rendered every frame
-TEST_F(SurfaceMediaSourceGLTest, EncodingFromGLRgbaSameImageEachBufNpotWrite) {
-    ALOGV("Test # %d", testId++);
-    ALOGV("************** Testing the whole pipeline with actual Recorder ***********");
-    ALOGV("************** GL Filling the buffers ***********");
-    // Note: No need to set the colorformat for the buffers. The colorformat is
-    // in the GRAlloc buffers itself.
-
-    const char *fileName = "/sdcard/outputSurfEncMSourceGL.mp4";
-    int fd = open(fileName, O_RDWR | O_CREAT, 0744);
-    if (fd < 0) {
-        ALOGE("ERROR: Could not open the the file %s, fd = %d !!", fileName, fd);
-    }
-    CHECK(fd >= 0);
-
-    sp<MediaRecorder> mr = setUpMediaRecorder(fd, VIDEO_SOURCE_SURFACE,
-            OUTPUT_FORMAT_MPEG_4, VIDEO_ENCODER_H264, mYuvTexWidth, mYuvTexHeight, 30);
-
-    // get the reference to the surfacemediasource living in
-    // mediaserver that is created by stagefrightrecorder
-    setUpEGLSurfaceFromMediaRecorder(mr);
-
-    int32_t nFramesCount = 0;
-    while (nFramesCount <= 300) {
-        oneBufferPassGL();
-        nFramesCount++;
-        ALOGV("framesCount = %d", nFramesCount);
-    }
-
-    EXPECT_TRUE(eglMakeCurrent(mEglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE,
-            EGL_NO_CONTEXT));
-    ASSERT_EQ(EGL_SUCCESS, eglGetError());
-    eglDestroySurface(mEglDisplay, mEglSurface);
-    mEglSurface = EGL_NO_SURFACE;
-
-    ALOGV("Stopping MediaRecorder...");
-    CHECK_EQ((status_t)OK, mr->stop());
-    mr.clear();
-    close(fd);
-}
-
-// Test to examine the actual encoding from the GL Buffers
-// Actual encoder, Actual GL Buffers Filled SurfaceMediaSource
-// A different pattern is rendered every frame
-TEST_F(SurfaceMediaSourceGLTest, EncodingFromGLRgbaDiffImageEachBufNpotWrite) {
-    ALOGV("Test # %d", testId++);
-    ALOGV("************** Testing the whole pipeline with actual Recorder ***********");
-    ALOGV("************** Diff GL Filling the buffers ***********");
-    // Note: No need to set the colorformat for the buffers. The colorformat is
-    // in the GRAlloc buffers itself.
-
-    const char *fileName = "/sdcard/outputSurfEncMSourceGLDiff.mp4";
-    int fd = open(fileName, O_RDWR | O_CREAT, 0744);
-    if (fd < 0) {
-        ALOGE("ERROR: Could not open the the file %s, fd = %d !!", fileName, fd);
-    }
-    CHECK(fd >= 0);
-
-    sp<MediaRecorder> mr = setUpMediaRecorder(fd, VIDEO_SOURCE_SURFACE,
-            OUTPUT_FORMAT_MPEG_4, VIDEO_ENCODER_H264, mYuvTexWidth, mYuvTexHeight, 30);
-
-    // get the reference to the surfacemediasource living in
-    // mediaserver that is created by stagefrightrecorder
-    setUpEGLSurfaceFromMediaRecorder(mr);
-
-    int32_t nFramesCount = 0;
-    while (nFramesCount <= 300) {
-        oneBufferPassGL(nFramesCount);
-        nFramesCount++;
-        ALOGV("framesCount = %d", nFramesCount);
-    }
-
-    EXPECT_TRUE(eglMakeCurrent(mEglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE,
-            EGL_NO_CONTEXT));
-    ASSERT_EQ(EGL_SUCCESS, eglGetError());
-    eglDestroySurface(mEglDisplay, mEglSurface);
-    mEglSurface = EGL_NO_SURFACE;
-
-    ALOGV("Stopping MediaRecorder...");
-    CHECK_EQ((status_t)OK, mr->stop());
-    mr.clear();
-    close(fd);
-}
-} // namespace android
diff --git a/media/libstagefright/webm/WebmFrame.cpp b/media/libstagefright/webm/WebmFrame.cpp
index e5134ed..4b0d47c 100644
--- a/media/libstagefright/webm/WebmFrame.cpp
+++ b/media/libstagefright/webm/WebmFrame.cpp
@@ -27,7 +27,7 @@
 using namespace webm;
 
 namespace {
-sp<ABuffer> toABuffer(MediaBuffer *mbuf) {
+sp<ABuffer> toABuffer(MediaBufferBase *mbuf) {
     sp<ABuffer> abuf = new ABuffer(mbuf->range_length());
     memcpy(abuf->data(), (uint8_t*) mbuf->data() + mbuf->range_offset(), mbuf->range_length());
     return abuf;
@@ -46,7 +46,7 @@
       mEos(true) {
 }
 
-WebmFrame::WebmFrame(int type, bool key, uint64_t absTimecode, MediaBuffer *mbuf)
+WebmFrame::WebmFrame(int type, bool key, uint64_t absTimecode, MediaBufferBase *mbuf)
     : mType(type),
       mKey(key),
       mAbsTimecode(absTimecode),
diff --git a/media/libstagefright/webm/WebmFrame.h b/media/libstagefright/webm/WebmFrame.h
index 4f0b055..a410a87 100644
--- a/media/libstagefright/webm/WebmFrame.h
+++ b/media/libstagefright/webm/WebmFrame.h
@@ -30,7 +30,7 @@
     const bool mEos;
 
     WebmFrame();
-    WebmFrame(int type, bool key, uint64_t absTimecode, MediaBuffer *buf);
+    WebmFrame(int type, bool key, uint64_t absTimecode, MediaBufferBase *buf);
     ~WebmFrame() {}
 
     sp<WebmElement> SimpleBlock(uint64_t baseTimecode) const;
diff --git a/media/libstagefright/webm/WebmFrameThread.cpp b/media/libstagefright/webm/WebmFrameThread.cpp
index 420890b..0d4c699 100644
--- a/media/libstagefright/webm/WebmFrameThread.cpp
+++ b/media/libstagefright/webm/WebmFrameThread.cpp
@@ -337,7 +337,7 @@
     mStartTimeUs = kUninitialized;
 
     status_t err = OK;
-    MediaBuffer *buffer;
+    MediaBufferBase *buffer;
     while (!mDone && (err = mSource->read(&buffer, NULL)) == OK) {
         if (buffer->range_length() == 0) {
             buffer->release();
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index cd2174d..78a184b 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -133,6 +133,9 @@
             // Note: called after changeRefCount(-1);
             void stop();
             void close();
+            status_t openDuplicating(const sp<SwAudioOutputDescriptor>& output1,
+                                     const sp<SwAudioOutputDescriptor>& output2,
+                                     audio_io_handle_t *ioHandle);
 
     const sp<IOProfile> mProfile;          // I/O profile this output derives from
     audio_io_handle_t mIoHandle;           // output handle
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 17fc272..caaa0f7 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -222,7 +222,7 @@
 SwAudioOutputDescriptor::SwAudioOutputDescriptor(const sp<IOProfile>& profile,
                                                  AudioPolicyClientInterface *clientInterface)
     : AudioOutputDescriptor(profile, clientInterface),
-    mProfile(profile), mIoHandle(0), mLatency(0),
+    mProfile(profile), mIoHandle(AUDIO_IO_HANDLE_NONE), mLatency(0),
     mFlags((audio_output_flags_t)0), mPolicyMix(NULL),
     mOutput1(0), mOutput2(0), mDirectOpenCount(0),
     mDirectClientSession(AUDIO_SESSION_NONE), mGlobalRefCount(0)
@@ -509,6 +509,30 @@
     }
 }
 
+status_t SwAudioOutputDescriptor::openDuplicating(const sp<SwAudioOutputDescriptor>& output1,
+                                                  const sp<SwAudioOutputDescriptor>& output2,
+                                                  audio_io_handle_t *ioHandle)
+{
+    // open a duplicating output thread for the new output and the primary output
+    // Note: openDuplicateOutput() API expects the output handles in the reverse order from the
+    // numbering in SwAudioOutputDescriptor mOutput1 and mOutput2
+    *ioHandle = mClientInterface->openDuplicateOutput(output2->mIoHandle, output1->mIoHandle);
+    if (*ioHandle == AUDIO_IO_HANDLE_NONE) {
+        return INVALID_OPERATION;
+    }
+
+    mId = AudioPort::getNextUniqueId();
+    mIoHandle = *ioHandle;
+    mOutput1 = output1;
+    mOutput2 = output2;
+    mSamplingRate = output2->mSamplingRate;
+    mFormat = output2->mFormat;
+    mChannelMask = output2->mChannelMask;
+    mLatency = output2->mLatency;
+
+    return NO_ERROR;
+}
+
 // HwAudioOutputDescriptor implementation
 HwAudioOutputDescriptor::HwAudioOutputDescriptor(const sp<AudioSourceDescriptor>& source,
                                                  AudioPolicyClientInterface *clientInterface)
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 54bfcbc..e8416d4 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -67,7 +67,9 @@
                                                       const char *device_address,
                                                       const char *device_name)
 {
-    return setDeviceConnectionStateInt(device, state, device_address, device_name);
+    status_t status = setDeviceConnectionStateInt(device, state, device_address, device_name);
+    nextAudioPortGeneration();
+    return status;
 }
 
 void AudioPolicyManager::broadcastDeviceConnectionState(audio_devices_t device,
@@ -1158,6 +1160,12 @@
     bool force = !outputDesc->isActive() &&
             (outputDesc->getPatchHandle() == AUDIO_PATCH_HANDLE_NONE);
 
+    // requiresMuteCheck is false when we can bypass mute strategy.
+    // It covers a common case when there is no materially active audio
+    // and muting would result in unnecessary delay and dropped audio.
+    const uint32_t outputLatencyMs = outputDesc->latency();
+    bool requiresMuteCheck = outputDesc->isActive(outputLatencyMs * 2);  // account for drain
+
     // increment usage count for this stream on the requested output:
     // NOTE that the usage count is the same for duplicated output and hardware output which is
     // necessary for a correct control of hardware output routing by startOutput() and stopOutput()
@@ -1181,29 +1189,44 @@
         for (size_t i = 0; i < mOutputs.size(); i++) {
             sp<AudioOutputDescriptor> desc = mOutputs.valueAt(i);
             if (desc != outputDesc) {
+                // An output has a shared device if
+                // - managed by the same hw module
+                // - supports the currently selected device
+                const bool sharedDevice = outputDesc->sharesHwModuleWith(desc)
+                        && (desc->supportedDevices() & device) != AUDIO_DEVICE_NONE;
+
                 // force a device change if any other output is:
                 // - managed by the same hw module
-                // - has a current device selection that differs from selected device.
                 // - supports currently selected device
+                // - has a current device selection that differs from selected device.
                 // - has an active audio patch
                 // In this case, the audio HAL must receive the new device selection so that it can
-                // change the device currently selected by the other active output.
-                if (outputDesc->sharesHwModuleWith(desc) &&
+                // change the device currently selected by the other output.
+                if (sharedDevice &&
                         desc->device() != device &&
-                        desc->supportedDevices() & device &&
                         desc->getPatchHandle() != AUDIO_PATCH_HANDLE_NONE) {
                     force = true;
                 }
                 // wait for audio on other active outputs to be presented when starting
                 // a notification so that audio focus effect can propagate, or that a mute/unmute
                 // event occurred for beacon
-                uint32_t latency = desc->latency();
-                if (shouldWait && desc->isActive(latency * 2) && (waitMs < latency)) {
-                    waitMs = latency;
+                const uint32_t latencyMs = desc->latency();
+                const bool isActive = desc->isActive(latencyMs * 2);  // account for drain
+
+                if (shouldWait && isActive && (waitMs < latencyMs)) {
+                    waitMs = latencyMs;
                 }
+
+                // Require mute check if another output is on a shared device
+                // and currently active to have proper drain and avoid pops.
+                // Note restoring AudioTracks onto this output needs to invoke
+                // a volume ramp if there is no mute.
+                requiresMuteCheck |= sharedDevice && isActive;
             }
         }
-        uint32_t muteWaitMs = setOutputDevice(outputDesc, device, force, 0, NULL, address);
+
+        const uint32_t muteWaitMs =
+                setOutputDevice(outputDesc, device, force, 0, NULL, address, requiresMuteCheck);
 
         // handle special case for sonification while in call
         if (isInCall()) {
@@ -1228,6 +1251,14 @@
         if (waitMs > muteWaitMs) {
             *delayMs = waitMs - muteWaitMs;
         }
+
+        // FIXME: A device change (muteWaitMs > 0) likely introduces a volume change.
+        // A volume change enacted by APM with 0 delay is not synchronous, as it goes
+        // via AudioCommandThread to AudioFlinger.  Hence it is possible that the volume
+        // change occurs after the MixerThread starts and causes a stream volume
+        // glitch.
+        //
+        // We do not introduce additional delay here.
     }
 
     return NO_ERROR;
@@ -3930,19 +3961,12 @@
                         //TODO: configure audio effect output stage here
 
                         // open a duplicating output thread for the new output and the primary output
-                        duplicatedOutput =
-                                mpClientInterface->openDuplicateOutput(output,
-                                                                       mPrimaryOutput->mIoHandle);
-                        if (duplicatedOutput != AUDIO_IO_HANDLE_NONE) {
+                        sp<SwAudioOutputDescriptor> dupOutputDesc =
+                                new SwAudioOutputDescriptor(NULL, mpClientInterface);
+                        status_t status = dupOutputDesc->openDuplicating(mPrimaryOutput, desc,
+                                                                         &duplicatedOutput);
+                        if (status == NO_ERROR) {
                             // add duplicated output descriptor
-                            sp<SwAudioOutputDescriptor> dupOutputDesc =
-                                    new SwAudioOutputDescriptor(NULL, mpClientInterface);
-                            dupOutputDesc->mOutput1 = mPrimaryOutput;
-                            dupOutputDesc->mOutput2 = desc;
-                            dupOutputDesc->mSamplingRate = desc->mSamplingRate;
-                            dupOutputDesc->mFormat = desc->mFormat;
-                            dupOutputDesc->mChannelMask = desc->mChannelMask;
-                            dupOutputDesc->mLatency = desc->mLatency;
                             addOutput(duplicatedOutput, dupOutputDesc);
                             applyStreamVolumes(dupOutputDesc, device, 0, true);
                         } else {
@@ -4742,21 +4766,24 @@
                                              bool force,
                                              int delayMs,
                                              audio_patch_handle_t *patchHandle,
-                                             const char* address)
+                                             const char *address,
+                                             bool requiresMuteCheck)
 {
     ALOGV("setOutputDevice() device %04x delayMs %d", device, delayMs);
     AudioParameter param;
     uint32_t muteWaitMs;
 
     if (outputDesc->isDuplicated()) {
-        muteWaitMs = setOutputDevice(outputDesc->subOutput1(), device, force, delayMs);
-        muteWaitMs += setOutputDevice(outputDesc->subOutput2(), device, force, delayMs);
+        muteWaitMs = setOutputDevice(outputDesc->subOutput1(), device, force, delayMs,
+                nullptr /* patchHandle */, nullptr /* address */, requiresMuteCheck);
+        muteWaitMs += setOutputDevice(outputDesc->subOutput2(), device, force, delayMs,
+                nullptr /* patchHandle */, nullptr /* address */, requiresMuteCheck);
         return muteWaitMs;
     }
     // no need to proceed if new device is not AUDIO_DEVICE_NONE and not supported by current
     // output profile
     if ((device != AUDIO_DEVICE_NONE) &&
-            ((device & outputDesc->supportedDevices()) == 0)) {
+            ((device & outputDesc->supportedDevices()) == AUDIO_DEVICE_NONE)) {
         return 0;
     }
 
@@ -4770,7 +4797,14 @@
     if (device != AUDIO_DEVICE_NONE) {
         outputDesc->mDevice = device;
     }
-    muteWaitMs = checkDeviceMuteStrategies(outputDesc, prevDevice, delayMs);
+
+    // if the outputs are not materially active, there is no need to mute.
+    if (requiresMuteCheck) {
+        muteWaitMs = checkDeviceMuteStrategies(outputDesc, prevDevice, delayMs);
+    } else {
+        ALOGV("%s: suppressing checkDeviceMuteStrategies", __func__);
+        muteWaitMs = 0;
+    }
 
     // Do not change the routing if:
     //      the requested device is AUDIO_DEVICE_NONE
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index ee339e7..2b68882 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -317,7 +317,8 @@
                              bool force = false,
                              int delayMs = 0,
                              audio_patch_handle_t *patchHandle = NULL,
-                             const char* address = NULL);
+                             const char *address = nullptr,
+                             bool requiresMuteCheck = true);
         status_t resetOutputDevice(const sp<AudioOutputDescriptor>& outputDesc,
                                    int delayMs = 0,
                                    audio_patch_handle_t *patchHandle = NULL);
diff --git a/services/audiopolicy/service/AudioPolicyEffects.cpp b/services/audiopolicy/service/AudioPolicyEffects.cpp
index 7fe363d..c7dfe0f 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.cpp
+++ b/services/audiopolicy/service/AudioPolicyEffects.cpp
@@ -399,11 +399,12 @@
         while (pos + size > *totSize) {
             *totSize += ((*totSize + 7) / 8) * 4;
         }
-        *param = (char *)realloc(*param, *totSize);
-        if (*param == NULL) {
+        char *newParam = (char *)realloc(*param, *totSize);
+        if (newParam == NULL) {
             ALOGE("%s realloc error for size %zu", __func__, *totSize);
             return 0;
         }
+        *param = newParam;
     }
     *curSize = pos + size;
     return pos;
diff --git a/services/camera/libcameraservice/CameraFlashlight.cpp b/services/camera/libcameraservice/CameraFlashlight.cpp
index e06a81f..394701a 100644
--- a/services/camera/libcameraservice/CameraFlashlight.cpp
+++ b/services/camera/libcameraservice/CameraFlashlight.cpp
@@ -32,13 +32,15 @@
 
 namespace android {
 
+using hardware::camera::common::V1_0::TorchModeStatus;
+
 /////////////////////////////////////////////////////////////////////
 // CameraFlashlight implementation begins
 // used by camera service to control flashflight.
 /////////////////////////////////////////////////////////////////////
 
 CameraFlashlight::CameraFlashlight(sp<CameraProviderManager> providerManager,
-        camera_module_callbacks_t* callbacks) :
+        CameraProviderManager::StatusListener* callbacks) :
         mProviderManager(providerManager),
         mCallbacks(callbacks),
         mFlashlightMapInitialized(false) {
@@ -59,7 +61,7 @@
     } else {
         // Only HAL1 devices do not support setTorchMode
         mFlashControl =
-                new CameraHardwareInterfaceFlashControl(mProviderManager, *mCallbacks);
+                new CameraHardwareInterfaceFlashControl(mProviderManager, mCallbacks);
     }
 
     return OK;
@@ -119,7 +121,8 @@
 }
 
 int CameraFlashlight::getNumberOfCameras() {
-    return mProviderManager->getAPI1CompatibleCameraCount();
+    size_t len = mProviderManager->getAPI1CompatibleCameraDeviceIds().size();
+    return static_cast<int>(len);
 }
 
 status_t CameraFlashlight::findFlashUnits() {
@@ -221,9 +224,8 @@
             int numCameras = getNumberOfCameras();
             for (int i = 0; i < numCameras; i++) {
                 if (hasFlashUnitLocked(String8::format("%d", i))) {
-                    mCallbacks->torch_mode_status_change(mCallbacks,
-                            String8::format("%d", i).string(),
-                            TORCH_MODE_STATUS_NOT_AVAILABLE);
+                    mCallbacks->onTorchStatusChanged(
+                            String8::format("%d", i), TorchModeStatus::NOT_AVAILABLE);
                 }
             }
         }
@@ -266,9 +268,8 @@
         int numCameras = getNumberOfCameras();
         for (int i = 0; i < numCameras; i++) {
             if (hasFlashUnitLocked(String8::format("%d", i))) {
-                mCallbacks->torch_mode_status_change(mCallbacks,
-                        String8::format("%d", i).string(),
-                        TORCH_MODE_STATUS_AVAILABLE_OFF);
+                mCallbacks->onTorchStatusChanged(
+                        String8::format("%d", i), TorchModeStatus::AVAILABLE_OFF);
             }
         }
     }
@@ -315,9 +316,9 @@
 
 CameraHardwareInterfaceFlashControl::CameraHardwareInterfaceFlashControl(
         sp<CameraProviderManager> manager,
-        const camera_module_callbacks_t& callbacks) :
+        CameraProviderManager::StatusListener* callbacks) :
         mProviderManager(manager),
-        mCallbacks(&callbacks),
+        mCallbacks(callbacks),
         mTorchEnabled(false) {
 }
 
@@ -333,8 +334,7 @@
         if (mCallbacks) {
             ALOGV("%s: notify the framework that torch was turned off",
                     __FUNCTION__);
-            mCallbacks->torch_mode_status_change(mCallbacks,
-                    mCameraId.string(), TORCH_MODE_STATUS_AVAILABLE_OFF);
+            mCallbacks->onTorchStatusChanged(mCameraId, TorchModeStatus::AVAILABLE_OFF);
         }
     }
 }
@@ -368,8 +368,7 @@
         // disabling the torch mode of currently opened device
         disconnectCameraDevice();
         mTorchEnabled = false;
-        mCallbacks->torch_mode_status_change(mCallbacks,
-            cameraId.string(), TORCH_MODE_STATUS_AVAILABLE_OFF);
+        mCallbacks->onTorchStatusChanged(cameraId, TorchModeStatus::AVAILABLE_OFF);
         return OK;
     }
 
@@ -379,8 +378,7 @@
     }
 
     mTorchEnabled = true;
-    mCallbacks->torch_mode_status_change(mCallbacks,
-            cameraId.string(), TORCH_MODE_STATUS_AVAILABLE_ON);
+    mCallbacks->onTorchStatusChanged(cameraId, TorchModeStatus::AVAILABLE_ON);
     return OK;
 }
 
diff --git a/services/camera/libcameraservice/CameraFlashlight.h b/services/camera/libcameraservice/CameraFlashlight.h
index c86ee85..07ce829 100644
--- a/services/camera/libcameraservice/CameraFlashlight.h
+++ b/services/camera/libcameraservice/CameraFlashlight.h
@@ -19,7 +19,6 @@
 
 #include <gui/GLConsumer.h>
 #include <gui/Surface.h>
-#include <hardware/camera_common.h>
 #include <utils/KeyedVector.h>
 #include <utils/SortedVector.h>
 #include "common/CameraProviderManager.h"
@@ -55,7 +54,7 @@
 class CameraFlashlight : public virtual VirtualLightRefBase {
     public:
         CameraFlashlight(sp<CameraProviderManager> providerManager,
-                camera_module_callbacks_t* callbacks);
+                CameraProviderManager::StatusListener* callbacks);
         virtual ~CameraFlashlight();
 
         // Find all flash units. This must be called before other methods. All
@@ -99,7 +98,7 @@
 
         sp<CameraProviderManager> mProviderManager;
 
-        const camera_module_callbacks_t *mCallbacks;
+        CameraProviderManager::StatusListener* mCallbacks;
         SortedVector<String8> mOpenedCameraIds;
 
         // camera id -> if it has a flash unit
@@ -134,7 +133,7 @@
     public:
         CameraHardwareInterfaceFlashControl(
                 sp<CameraProviderManager> manager,
-                const camera_module_callbacks_t& callbacks);
+                CameraProviderManager::StatusListener* callbacks);
         virtual ~CameraHardwareInterfaceFlashControl();
 
         // FlashControlBase
@@ -166,7 +165,7 @@
         status_t hasFlashUnitLocked(const String8& cameraId, bool *hasFlash, bool keepDeviceOpen);
 
         sp<CameraProviderManager> mProviderManager;
-        const camera_module_callbacks_t *mCallbacks;
+        CameraProviderManager::StatusListener* mCallbacks;
         sp<CameraHardwareInterface> mDevice;
         String8 mCameraId;
         CameraParameters mParameters;
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index e7609ed..89ca6bb 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -107,79 +107,13 @@
 
 // ----------------------------------------------------------------------------
 
-extern "C" {
-static void camera_device_status_change(
-        const struct camera_module_callbacks* callbacks,
-        int camera_id,
-        int new_status) {
-    sp<CameraService> cs = const_cast<CameraService*>(
-            static_cast<const CameraService*>(callbacks));
-    String8 id = String8::format("%d", camera_id);
-
-    CameraDeviceStatus newStatus{CameraDeviceStatus::NOT_PRESENT};
-    switch (new_status) {
-        case CAMERA_DEVICE_STATUS_NOT_PRESENT:
-            newStatus = CameraDeviceStatus::NOT_PRESENT;
-            break;
-        case CAMERA_DEVICE_STATUS_PRESENT:
-            newStatus = CameraDeviceStatus::PRESENT;
-            break;
-        case CAMERA_DEVICE_STATUS_ENUMERATING:
-            newStatus = CameraDeviceStatus::ENUMERATING;
-            break;
-        default:
-            ALOGW("Unknown device status change to %d", new_status);
-            break;
-    }
-    cs->onDeviceStatusChanged(id, newStatus);
-}
-
-static void torch_mode_status_change(
-        const struct camera_module_callbacks* callbacks,
-        const char* camera_id,
-        int new_status) {
-    if (!callbacks || !camera_id) {
-        ALOGE("%s invalid parameters. callbacks %p, camera_id %p", __FUNCTION__,
-                callbacks, camera_id);
-    }
-    sp<CameraService> cs = const_cast<CameraService*>(
-                                static_cast<const CameraService*>(callbacks));
-
-    TorchModeStatus status;
-    switch (new_status) {
-        case TORCH_MODE_STATUS_NOT_AVAILABLE:
-            status = TorchModeStatus::NOT_AVAILABLE;
-            break;
-        case TORCH_MODE_STATUS_AVAILABLE_OFF:
-            status = TorchModeStatus::AVAILABLE_OFF;
-            break;
-        case TORCH_MODE_STATUS_AVAILABLE_ON:
-            status = TorchModeStatus::AVAILABLE_ON;
-            break;
-        default:
-            ALOGE("Unknown torch status %d", new_status);
-            return;
-    }
-
-    cs->onTorchStatusChanged(
-        String8(camera_id),
-        status);
-}
-} // extern "C"
-
-// ----------------------------------------------------------------------------
-
 static const String16 sManageCameraPermission("android.permission.MANAGE_CAMERA");
 
 CameraService::CameraService() :
         mEventLog(DEFAULT_EVENT_LOG_LENGTH),
-        mNumberOfCameras(0), mNumberOfNormalCameras(0),
+        mNumberOfCameras(0),
         mSoundRef(0), mInitialized(false) {
     ALOGI("CameraService started (pid=%d)", getpid());
-
-    this->camera_device_status_change = android::camera_device_status_change;
-    this->torch_mode_status_change = android::torch_mode_status_change;
-
     mServiceLockWrapper = std::make_shared<WaitableMutexWrapper>(&mServiceLock);
 }
 
@@ -209,52 +143,42 @@
 
 status_t CameraService::enumerateProviders() {
     status_t res;
-    Mutex::Autolock l(mServiceLock);
 
-    if (nullptr == mCameraProviderManager.get()) {
-        mCameraProviderManager = new CameraProviderManager();
-        res = mCameraProviderManager->initialize(this);
-        if (res != OK) {
-            ALOGE("%s: Unable to initialize camera provider manager: %s (%d)",
-                    __FUNCTION__, strerror(-res), res);
-            return res;
-        }
-    }
+    std::vector<std::string> deviceIds;
+    {
+        Mutex::Autolock l(mServiceLock);
 
-    mNumberOfCameras = mCameraProviderManager->getCameraCount();
-    mNumberOfNormalCameras =
-            mCameraProviderManager->getAPI1CompatibleCameraCount();
-
-    // Setup vendor tags before we call get_camera_info the first time
-    // because HAL might need to setup static vendor keys in get_camera_info
-    // TODO: maybe put this into CameraProviderManager::initialize()?
-    mCameraProviderManager->setUpVendorTags();
-
-    if (nullptr == mFlashlight.get()) {
-        mFlashlight = new CameraFlashlight(mCameraProviderManager, this);
-    }
-
-    res = mFlashlight->findFlashUnits();
-    if (res != OK) {
-        ALOGE("Failed to enumerate flash units: %s (%d)", strerror(-res), res);
-    }
-
-    for (auto& cameraId : mCameraProviderManager->getCameraDeviceIds()) {
-        String8 id8 = String8(cameraId.c_str());
-        bool cameraFound = false;
-        {
-
-            Mutex::Autolock lock(mCameraStatesLock);
-            auto iter = mCameraStates.find(id8);
-            if (iter != mCameraStates.end()) {
-                cameraFound = true;
+        if (nullptr == mCameraProviderManager.get()) {
+            mCameraProviderManager = new CameraProviderManager();
+            res = mCameraProviderManager->initialize(this);
+            if (res != OK) {
+                ALOGE("%s: Unable to initialize camera provider manager: %s (%d)",
+                        __FUNCTION__, strerror(-res), res);
+                return res;
             }
         }
 
-        if (!cameraFound) {
-            addStates(id8);
+
+        // Setup vendor tags before we call get_camera_info the first time
+        // because HAL might need to setup static vendor keys in get_camera_info
+        // TODO: maybe put this into CameraProviderManager::initialize()?
+        mCameraProviderManager->setUpVendorTags();
+
+        if (nullptr == mFlashlight.get()) {
+            mFlashlight = new CameraFlashlight(mCameraProviderManager, this);
         }
 
+        res = mFlashlight->findFlashUnits();
+        if (res != OK) {
+            ALOGE("Failed to enumerate flash units: %s (%d)", strerror(-res), res);
+        }
+
+        deviceIds = mCameraProviderManager->getCameraDeviceIds();
+    }
+
+
+    for (auto& cameraId : deviceIds) {
+        String8 id8 = String8(cameraId.c_str());
         onDeviceStatusChanged(id8, CameraDeviceStatus::PRESENT);
     }
 
@@ -291,6 +215,13 @@
     enumerateProviders();
 }
 
+void CameraService::updateCameraNumAndIds() {
+    Mutex::Autolock l(mServiceLock);
+    mNumberOfCameras = mCameraProviderManager->getCameraCount();
+    mNormalDeviceIds =
+            mCameraProviderManager->getAPI1CompatibleCameraDeviceIds();
+}
+
 void CameraService::addStates(const String8 id) {
     std::string cameraId(id.c_str());
     hardware::camera::common::V1_0::CameraResourceCost cost;
@@ -313,10 +244,13 @@
     if (mFlashlight->hasFlashUnit(id)) {
         mTorchStatusMap.add(id, TorchModeStatus::AVAILABLE_OFF);
     }
+
+    updateCameraNumAndIds();
     logDeviceAdded(id, "Device added");
 }
 
 void CameraService::removeStates(const String8 id) {
+    updateCameraNumAndIds();
     if (mFlashlight->hasFlashUnit(id)) {
         mTorchStatusMap.removeItem(id);
     }
@@ -361,15 +295,16 @@
     if (newStatus == StatusInternal::NOT_PRESENT) {
         logDeviceRemoved(id, String8::format("Device status changed from %d to %d", oldStatus,
                 newStatus));
+
+        // Set the device status to NOT_PRESENT, clients will no longer be able to connect
+        // to this device until the status changes
+        updateStatus(StatusInternal::NOT_PRESENT, id);
+
         sp<BasicClient> clientToDisconnect;
         {
             // Don't do this in updateStatus to avoid deadlock over mServiceLock
             Mutex::Autolock lock(mServiceLock);
 
-            // Set the device status to NOT_PRESENT, clients will no longer be able to connect
-            // to this device until the status changes
-            updateStatus(StatusInternal::NOT_PRESENT, id);
-
             // Remove cached shim parameters
             state->setShimParams(CameraParameters());
 
@@ -472,7 +407,7 @@
     Mutex::Autolock l(mServiceLock);
     switch (type) {
         case CAMERA_TYPE_BACKWARD_COMPATIBLE:
-            *numCameras = mNumberOfNormalCameras;
+            *numCameras = static_cast<int>(mNormalDeviceIds.size());
             break;
         case CAMERA_TYPE_ALL:
             *numCameras = mNumberOfCameras;
@@ -502,7 +437,8 @@
     }
 
     Status ret = Status::ok();
-    status_t err = mCameraProviderManager->getCameraInfo(std::to_string(cameraId), cameraInfo);
+    status_t err = mCameraProviderManager->getCameraInfo(
+            cameraIdIntToStrLocked(cameraId), cameraInfo);
     if (err != OK) {
         ret = STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
                 "Error retrieving camera info from device %d: %s (%d)", cameraId,
@@ -512,13 +448,19 @@
     return ret;
 }
 
-int CameraService::cameraIdToInt(const String8& cameraId) {
-    int id;
-    bool success = base::ParseInt(cameraId.string(), &id, 0);
-    if (!success) {
-        return -1;
+std::string CameraService::cameraIdIntToStrLocked(int cameraIdInt) {
+    if (cameraIdInt < 0 || cameraIdInt >= static_cast<int>(mNormalDeviceIds.size())) {
+        ALOGE("%s: input id %d invalid: valid range  (0, %zu)",
+                __FUNCTION__, cameraIdInt, mNormalDeviceIds.size());
+        return std::string{};
     }
-    return id;
+
+    return mNormalDeviceIds[cameraIdInt];
+}
+
+String8 CameraService::cameraIdIntToStr(int cameraIdInt) {
+    Mutex::Autolock lock(mServiceLock);
+    return String8(cameraIdIntToStrLocked(cameraIdInt).c_str());
 }
 
 Status CameraService::getCameraCharacteristics(const String16& cameraId,
@@ -635,8 +577,8 @@
 
 Status CameraService::makeClient(const sp<CameraService>& cameraService,
         const sp<IInterface>& cameraCb, const String16& packageName, const String8& cameraId,
-        int facing, int clientPid, uid_t clientUid, int servicePid, bool legacyMode,
-        int halVersion, int deviceVersion, apiLevel effectiveApiLevel,
+        int api1CameraId, int facing, int clientPid, uid_t clientUid, int servicePid,
+        bool legacyMode, int halVersion, int deviceVersion, apiLevel effectiveApiLevel,
         /*out*/sp<BasicClient>* client) {
 
     if (halVersion < 0 || halVersion == deviceVersion) {
@@ -646,8 +588,9 @@
           case CAMERA_DEVICE_API_VERSION_1_0:
             if (effectiveApiLevel == API_1) {  // Camera1 API route
                 sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
-                *client = new CameraClient(cameraService, tmp, packageName, cameraIdToInt(cameraId),
-                        facing, clientPid, clientUid, getpid(), legacyMode);
+                *client = new CameraClient(cameraService, tmp, packageName,
+                        api1CameraId, facing, clientPid, clientUid,
+                        getpid(), legacyMode);
             } else { // Camera2 API route
                 ALOGW("Camera using old HAL version: %d", deviceVersion);
                 return STATUS_ERROR_FMT(ERROR_DEPRECATED_HAL,
@@ -662,8 +605,10 @@
           case CAMERA_DEVICE_API_VERSION_3_4:
             if (effectiveApiLevel == API_1) { // Camera1 API route
                 sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
-                *client = new Camera2Client(cameraService, tmp, packageName, cameraIdToInt(cameraId),
-                        facing, clientPid, clientUid, servicePid, legacyMode);
+                *client = new Camera2Client(cameraService, tmp, packageName,
+                        cameraId, api1CameraId,
+                        facing, clientPid, clientUid,
+                        servicePid, legacyMode);
             } else { // Camera2 API route
                 sp<hardware::camera2::ICameraDeviceCallbacks> tmp =
                         static_cast<hardware::camera2::ICameraDeviceCallbacks*>(cameraCb.get());
@@ -685,8 +630,9 @@
             halVersion == CAMERA_DEVICE_API_VERSION_1_0) {
             // Only support higher HAL version device opened as HAL1.0 device.
             sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
-            *client = new CameraClient(cameraService, tmp, packageName, cameraIdToInt(cameraId),
-                    facing, clientPid, clientUid, servicePid, legacyMode);
+            *client = new CameraClient(cameraService, tmp, packageName,
+                    api1CameraId, facing, clientPid, clientUid,
+                    servicePid, legacyMode);
         } else {
             // Other combinations (e.g. HAL3.x open as HAL2.x) are not supported yet.
             ALOGE("Invalid camera HAL version %x: HAL %x device can only be"
@@ -782,7 +728,8 @@
     Status ret = Status::ok();
     sp<Client> tmp = nullptr;
     if (!(ret = connectHelper<ICameraClient,Client>(
-            sp<ICameraClient>{nullptr}, id, static_cast<int>(CAMERA_HAL_API_VERSION_UNSPECIFIED),
+            sp<ICameraClient>{nullptr}, id, cameraId,
+            static_cast<int>(CAMERA_HAL_API_VERSION_UNSPECIFIED),
             internalPackageName, uid, USE_CALLING_PID,
             API_1, /*legacyMode*/ false, /*shimUpdateOnly*/ true,
             /*out*/ tmp)
@@ -1235,7 +1182,7 @@
 
 Status CameraService::connect(
         const sp<ICameraClient>& cameraClient,
-        int cameraId,
+        int api1CameraId,
         const String16& clientPackageName,
         int clientUid,
         int clientPid,
@@ -1244,9 +1191,10 @@
 
     ATRACE_CALL();
     Status ret = Status::ok();
-    String8 id = String8::format("%d", cameraId);
+
+    String8 id = cameraIdIntToStr(api1CameraId);
     sp<Client> client = nullptr;
-    ret = connectHelper<ICameraClient,Client>(cameraClient, id,
+    ret = connectHelper<ICameraClient,Client>(cameraClient, id, api1CameraId,
             CAMERA_HAL_API_VERSION_UNSPECIFIED, clientPackageName, clientUid, clientPid, API_1,
             /*legacyMode*/ false, /*shimUpdateOnly*/ false,
             /*out*/client);
@@ -1263,18 +1211,18 @@
 
 Status CameraService::connectLegacy(
         const sp<ICameraClient>& cameraClient,
-        int cameraId, int halVersion,
+        int api1CameraId, int halVersion,
         const String16& clientPackageName,
         int clientUid,
         /*out*/
         sp<ICamera>* device) {
 
     ATRACE_CALL();
-    String8 id = String8::format("%d", cameraId);
+    String8 id = cameraIdIntToStr(api1CameraId);
 
     Status ret = Status::ok();
     sp<Client> client = nullptr;
-    ret = connectHelper<ICameraClient,Client>(cameraClient, id, halVersion,
+    ret = connectHelper<ICameraClient,Client>(cameraClient, id, api1CameraId, halVersion,
             clientPackageName, clientUid, USE_CALLING_PID, API_1,
             /*legacyMode*/ true, /*shimUpdateOnly*/ false,
             /*out*/client);
@@ -1302,6 +1250,7 @@
     String8 id = String8(cameraId);
     sp<CameraDeviceClient> client = nullptr;
     ret = connectHelper<hardware::camera2::ICameraDeviceCallbacks,CameraDeviceClient>(cameraCb, id,
+            /*api1CameraId*/-1,
             CAMERA_HAL_API_VERSION_UNSPECIFIED, clientPackageName,
             clientUid, USE_CALLING_PID, API_2,
             /*legacyMode*/ false, /*shimUpdateOnly*/ false,
@@ -1319,8 +1268,8 @@
 
 template<class CALLBACK, class CLIENT>
 Status CameraService::connectHelper(const sp<CALLBACK>& cameraCb, const String8& cameraId,
-        int halVersion, const String16& clientPackageName, int clientUid, int clientPid,
-        apiLevel effectiveApiLevel, bool legacyMode, bool shimUpdateOnly,
+        int api1CameraId, int halVersion, const String16& clientPackageName, int clientUid,
+        int clientPid, apiLevel effectiveApiLevel, bool legacyMode, bool shimUpdateOnly,
         /*out*/sp<CLIENT>& device) {
     binder::Status ret = binder::Status::ok();
 
@@ -1403,8 +1352,10 @@
         }
 
         sp<BasicClient> tmp = nullptr;
-        if(!(ret = makeClient(this, cameraCb, clientPackageName, cameraId, facing, clientPid,
-                clientUid, getpid(), legacyMode, halVersion, deviceVersion, effectiveApiLevel,
+        if(!(ret = makeClient(this, cameraCb, clientPackageName,
+                cameraId, api1CameraId, facing,
+                clientPid, clientUid, getpid(), legacyMode,
+                halVersion, deviceVersion, effectiveApiLevel,
                 /*out*/&tmp)).isOk()) {
             return ret;
         }
@@ -2112,7 +2063,8 @@
 CameraService::Client::Client(const sp<CameraService>& cameraService,
         const sp<ICameraClient>& cameraClient,
         const String16& clientPackageName,
-        const String8& cameraIdStr, int cameraFacing,
+        const String8& cameraIdStr,
+        int api1CameraId, int cameraFacing,
         int clientPid, uid_t clientUid,
         int servicePid) :
         CameraService::BasicClient(cameraService,
@@ -2121,7 +2073,7 @@
                 cameraIdStr, cameraFacing,
                 clientPid, clientUid,
                 servicePid),
-        mCameraId(CameraService::cameraIdToInt(cameraIdStr))
+        mCameraId(api1CameraId)
 {
     int callingPid = getCallingPid();
     LOG1("Client::Client E (pid %d, id %d)", callingPid, mCameraId);
@@ -2676,7 +2628,10 @@
     }
     dprintf(fd, "\n== Service global info: ==\n\n");
     dprintf(fd, "Number of camera devices: %d\n", mNumberOfCameras);
-    dprintf(fd, "Number of normal camera devices: %d\n", mNumberOfNormalCameras);
+    dprintf(fd, "Number of normal camera devices: %zu\n", mNormalDeviceIds.size());
+    for (size_t i = 0; i < mNormalDeviceIds.size(); i++) {
+        dprintf(fd, "    Device %zu maps to \"%s\"\n", i, mNormalDeviceIds[i].c_str());
+    }
     String8 activeClientString = mActiveClientManager.toString();
     dprintf(fd, "Active Camera Clients:\n%s", activeClientString.string());
     dprintf(fd, "Allowed user IDs: %s\n", toString(mAllowedUsers).string());
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 67db7ec..81048e6 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -62,7 +62,6 @@
     public BinderService<CameraService>,
     public virtual ::android::hardware::BnCameraService,
     public virtual IBinder::DeathRecipient,
-    public camera_module_callbacks_t,
     public virtual CameraProviderManager::StatusListener
 {
     friend class BinderService<CameraService>;
@@ -333,6 +332,7 @@
                 const sp<hardware::ICameraClient>& cameraClient,
                 const String16& clientPackageName,
                 const String8& cameraIdStr,
+                int api1CameraId,
                 int cameraFacing,
                 int clientPid,
                 uid_t clientUid,
@@ -551,7 +551,8 @@
     // Eumerate all camera providers in the system
     status_t enumerateProviders();
 
-    // Add a new camera to camera and torch state lists or remove an unplugged one
+    // Add/remove a new camera to camera and torch state lists or remove an unplugged one
+    // Caller must not hold mServiceLock
     void addStates(const String8 id);
     void removeStates(const String8 id);
 
@@ -578,7 +579,7 @@
     // Single implementation shared between the various connect calls
     template<class CALLBACK, class CLIENT>
     binder::Status connectHelper(const sp<CALLBACK>& cameraCb, const String8& cameraId,
-            int halVersion, const String16& clientPackageName,
+            int api1CameraId, int halVersion, const String16& clientPackageName,
             int clientUid, int clientPid,
             apiLevel effectiveApiLevel, bool legacyMode, bool shimUpdateOnly,
             /*out*/sp<CLIENT>& device);
@@ -640,9 +641,16 @@
     void finishConnectLocked(const sp<BasicClient>& client, const DescriptorPtr& desc);
 
     /**
-     * Returns the integer corresponding to the given camera ID string, or -1 on failure.
+     * Returns the underlying camera Id string mapped to a camera id int
+     * Empty string is returned when the cameraIdInt is invalid.
      */
-    static int cameraIdToInt(const String8& cameraId);
+    String8 cameraIdIntToStr(int cameraIdInt);
+
+    /**
+     * Returns the underlying camera Id string mapped to a camera id int
+     * Empty string is returned when the cameraIdInt is invalid.
+     */
+    std::string cameraIdIntToStrLocked(int cameraIdInt);
 
     /**
      * Remove a single client corresponding to the given camera id from the list of active clients.
@@ -710,8 +718,14 @@
      */
     void dumpEventLog(int fd);
 
+    /**
+     * This method will acquire mServiceLock
+     */
+    void updateCameraNumAndIds();
+
     int                 mNumberOfCameras;
-    int                 mNumberOfNormalCameras;
+
+    std::vector<std::string> mNormalDeviceIds;
 
     // sounds
     MediaPlayer*        newMediaPlayer(const char *file);
@@ -821,8 +835,8 @@
 
     static binder::Status makeClient(const sp<CameraService>& cameraService,
             const sp<IInterface>& cameraCb, const String16& packageName, const String8& cameraId,
-            int facing, int clientPid, uid_t clientUid, int servicePid, bool legacyMode,
-            int halVersion, int deviceVersion, apiLevel effectiveApiLevel,
+            int api1CameraId, int facing, int clientPid, uid_t clientUid, int servicePid,
+            bool legacyMode, int halVersion, int deviceVersion, apiLevel effectiveApiLevel,
             /*out*/sp<BasicClient>* client);
 
     status_t checkCameraAccess(const String16& opPackageName);
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 585d2eb..0a82cb9 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -49,16 +49,17 @@
 Camera2Client::Camera2Client(const sp<CameraService>& cameraService,
         const sp<hardware::ICameraClient>& cameraClient,
         const String16& clientPackageName,
-        int cameraId,
+        const String8& cameraDeviceId,
+        int api1CameraId,
         int cameraFacing,
         int clientPid,
         uid_t clientUid,
         int servicePid,
         bool legacyMode):
         Camera2ClientBase(cameraService, cameraClient, clientPackageName,
-                String8::format("%d", cameraId), cameraFacing,
+                cameraDeviceId, api1CameraId, cameraFacing,
                 clientPid, clientUid, servicePid),
-        mParameters(cameraId, cameraFacing)
+        mParameters(api1CameraId, cameraFacing)
 {
     ATRACE_CALL();
 
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index 5af74eb..1ebf4b0 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -91,7 +91,8 @@
     Camera2Client(const sp<CameraService>& cameraService,
             const sp<hardware::ICameraClient>& cameraClient,
             const String16& clientPackageName,
-            int cameraId,
+            const String8& cameraDeviceId,
+            int api1CameraId,
             int cameraFacing,
             int clientPid,
             uid_t clientUid,
diff --git a/services/camera/libcameraservice/api1/CameraClient.cpp b/services/camera/libcameraservice/api1/CameraClient.cpp
index e848a3f..8c6cd3d 100644
--- a/services/camera/libcameraservice/api1/CameraClient.cpp
+++ b/services/camera/libcameraservice/api1/CameraClient.cpp
@@ -42,7 +42,7 @@
         int clientPid, int clientUid,
         int servicePid, bool legacyMode):
         Client(cameraService, cameraClient, clientPackageName,
-                String8::format("%d", cameraId), cameraFacing, clientPid,
+                String8::format("%d", cameraId), cameraId, cameraFacing, clientPid,
                 clientUid, servicePid)
 {
     int callingPid = getCallingPid();
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index 050c3f7..b4c7e9d 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -759,12 +759,17 @@
     focusingAreas.clear();
     focusingAreas.add(Parameters::Area(0,0,0,0,0));
 
-    camera_metadata_ro_entry_t availableFocalLengths =
-        staticInfo(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, 0, 0, false);
-    if (!availableFocalLengths.count) return NO_INIT;
+    if (fastInfo.isExternalCamera) {
+        params.setFloat(CameraParameters::KEY_FOCAL_LENGTH, -1.0);
+    } else {
+        camera_metadata_ro_entry_t availableFocalLengths =
+            staticInfo(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, 0, 0, false);
+        if (!availableFocalLengths.count) return NO_INIT;
 
-    float minFocalLength = availableFocalLengths.data.f[0];
-    params.setFloat(CameraParameters::KEY_FOCAL_LENGTH, minFocalLength);
+        float minFocalLength = availableFocalLengths.data.f[0];
+        params.setFloat(CameraParameters::KEY_FOCAL_LENGTH, minFocalLength);
+    }
+
 
     float horizFov, vertFov;
     res = calculatePictureFovs(&horizFov, &vertFov);
@@ -1091,9 +1096,15 @@
             focusDistanceCalibration.data.u8[0] !=
             ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED);
 
+
+    camera_metadata_ro_entry_t hwLevel = staticInfo(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL);
+    if (!hwLevel.count) return NO_INIT;
+    fastInfo.isExternalCamera =
+            hwLevel.data.u8[0] == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL;
+
     camera_metadata_ro_entry_t availableFocalLengths =
-        staticInfo(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS);
-    if (!availableFocalLengths.count) return NO_INIT;
+        staticInfo(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, 0, 0, /*required*/false);
+    if (!availableFocalLengths.count && !fastInfo.isExternalCamera) return NO_INIT;
 
     SortedVector<int32_t> availableFormats = getAvailableOutputFormats();
     if (!availableFormats.size()) return NO_INIT;
@@ -1178,10 +1189,14 @@
 
     // Find smallest (widest-angle) focal length to use as basis of still
     // picture FOV reporting.
-    fastInfo.minFocalLength = availableFocalLengths.data.f[0];
-    for (size_t i = 1; i < availableFocalLengths.count; i++) {
-        if (fastInfo.minFocalLength > availableFocalLengths.data.f[i]) {
-            fastInfo.minFocalLength = availableFocalLengths.data.f[i];
+    if (fastInfo.isExternalCamera) {
+        fastInfo.minFocalLength = -1.0;
+    } else {
+        fastInfo.minFocalLength = availableFocalLengths.data.f[0];
+        for (size_t i = 1; i < availableFocalLengths.count; i++) {
+            if (fastInfo.minFocalLength > availableFocalLengths.data.f[i]) {
+                fastInfo.minFocalLength = availableFocalLengths.data.f[i];
+            }
         }
     }
 
@@ -2870,8 +2885,13 @@
         if (sc.isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
                 sc.format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED &&
                 sc.width <= limit.width && sc.height <= limit.height) {
-            Size sz = {sc.width, sc.height};
-            sizes->push(sz);
+            int64_t minFrameDuration = getMinFrameDurationNs(
+                    {sc.width, sc.height}, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
+            if (minFrameDuration > MAX_PREVIEW_RECORD_DURATION_NS) {
+                // Filter slow sizes from preview/record
+                continue;
+            }
+            sizes->push({sc.width, sc.height});
         }
     }
 
@@ -3081,6 +3101,16 @@
 
 status_t Parameters::calculatePictureFovs(float *horizFov, float *vertFov)
         const {
+    if (fastInfo.isExternalCamera) {
+        if (horizFov != NULL) {
+            *horizFov = -1.0;
+        }
+        if (vertFov != NULL) {
+            *vertFov = -1.0;
+        }
+        return OK;
+    }
+
     camera_metadata_ro_entry_t sensorSize =
             staticInfo(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, 2, 2);
     if (!sensorSize.count) return NO_INIT;
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h
index 17e3d75..fe725fd 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.h
+++ b/services/camera/libcameraservice/api1/client2/Parameters.h
@@ -207,6 +207,11 @@
     static const int32_t FPS_MARGIN = 1;
     // Max FPS for default parameters
     static const int32_t MAX_DEFAULT_FPS = 30;
+    // Minimum FPS for a size to be listed in supported preview/video sizes
+    // Set to slightly less than 30.0 to have some tolerance margin
+    static constexpr double MIN_PREVIEW_RECORD_FPS = 29.97;
+    // Maximum frame duration for a size to be listed in supported preview/video sizes
+    static constexpr int64_t MAX_PREVIEW_RECORD_DURATION_NS = 1e9 / MIN_PREVIEW_RECORD_FPS;
 
     // Full static camera info, object owned by someone else, such as
     // Camera2Device.
@@ -233,6 +238,7 @@
             }
         };
         DefaultKeyedVector<uint8_t, OverrideModes> sceneModeOverrides;
+        bool isExternalCamera;
         float minFocalLength;
         bool useFlexibleYuv;
         Size maxJpegSize;
@@ -380,6 +386,7 @@
     Vector<Size> availablePreviewSizes;
     Vector<Size> availableVideoSizes;
     // Get size list (that are no larger than limit) from static metadata.
+    // This method filtered size with minFrameDuration < MAX_PREVIEW_RECORD_DURATION_NS
     status_t getFilteredSizes(Size limit, Vector<Size> *sizes);
     // Get max size (from the size array) that matches the given aspect ratio.
     Size getMaxSizeForRatio(float ratio, const int32_t* sizeArray, size_t count);
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 4a72de0..a7cc3c3 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -49,6 +49,7 @@
         const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
         const String16& clientPackageName,
         const String8& cameraId,
+        int api1CameraId,
         int cameraFacing,
         int clientPid,
         uid_t clientUid,
@@ -62,6 +63,8 @@
             clientUid,
             servicePid),
     mRemoteCallback(remoteCallback) {
+    // We don't need it for API2 clients, but Camera2ClientBase requires it.
+    (void) api1CameraId;
 }
 
 // Interface used by CameraService
@@ -75,7 +78,8 @@
         uid_t clientUid,
         int servicePid) :
     Camera2ClientBase(cameraService, remoteCallback, clientPackageName,
-                cameraId, cameraFacing, clientPid, clientUid, servicePid),
+                cameraId, /*API1 camera ID*/ -1,
+                cameraFacing, clientPid, clientUid, servicePid),
     mInputStream(),
     mStreamingRequestId(REQUEST_ID_NONE),
     mRequestIdCounter(0) {
@@ -654,14 +658,6 @@
             return res;
 
         if (!isStreamInfoValid) {
-            // Streaming sharing is only supported for IMPLEMENTATION_DEFINED
-            // formats.
-            if (isShared && streamInfo.format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
-                String8 msg = String8::format("Camera %s: Stream sharing is only supported for "
-                        "IMPLEMENTATION_DEFINED format", mCameraIdStr.string());
-                ALOGW("%s: %s", __FUNCTION__, msg.string());
-                return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
-            }
             isStreamInfoValid = true;
         }
 
@@ -941,14 +937,6 @@
         if (!res.isOk())
             return res;
 
-        // Stream sharing is only supported for IMPLEMENTATION_DEFINED
-        // formats.
-        if (outInfo.format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
-            String8 msg = String8::format("Camera %s: Stream sharing is only supported for "
-                    "IMPLEMENTATION_DEFINED format", mCameraIdStr.string());
-            ALOGW("%s: %s", __FUNCTION__, msg.string());
-            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
-        }
         streamInfos.push_back(outInfo);
         newOutputs.push_back(surface);
     }
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index f1cd00c..4e049d8 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -45,6 +45,7 @@
             const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
             const String16& clientPackageName,
             const String8& cameraId,
+            int api1CameraId,
             int cameraFacing,
             int clientPid,
             uid_t clientUid,
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 4ce82dc..db26027 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -48,15 +48,16 @@
         const sp<TCamCallbacks>& remoteCallback,
         const String16& clientPackageName,
         const String8& cameraId,
+        int api1CameraId,
         int cameraFacing,
         int clientPid,
         uid_t clientUid,
         int servicePid):
         TClientBase(cameraService, remoteCallback, clientPackageName,
-                cameraId, cameraFacing, clientPid, clientUid, servicePid),
+                cameraId, api1CameraId, cameraFacing, clientPid, clientUid, servicePid),
         mSharedCameraCallbacks(remoteCallback),
         mDeviceVersion(cameraService->getDeviceVersion(TClientBase::mCameraIdStr)),
-        mDeviceActive(false)
+        mDeviceActive(false), mApi1CameraId(api1CameraId)
 {
     ALOGI("Camera %s: Opened. Client: %s (PID %d, UID %d)", cameraId.string(),
             String8(clientPackageName).string(), clientPid, clientUid);
@@ -329,7 +330,7 @@
 
 template <typename TClientBase>
 int Camera2ClientBase<TClientBase>::getCameraId() const {
-    return std::stoi(TClientBase::mCameraIdStr.string());
+    return mApi1CameraId;
 }
 
 template <typename TClientBase>
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index e898d5d..edeae5b 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -49,6 +49,7 @@
                       const sp<TCamCallbacks>& remoteCallback,
                       const String16& clientPackageName,
                       const String8& cameraId,
+                      int api1CameraId,
                       int cameraFacing,
                       int clientPid,
                       uid_t clientUid,
@@ -140,6 +141,8 @@
 
     bool                  mDeviceActive;
 
+    const int             mApi1CameraId; // -1 if client is API2
+
 private:
     template<typename TProviderPtr>
     status_t              initializeImpl(TProviderPtr providerPtr);
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 70e7761..b37f004 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -20,11 +20,13 @@
 
 #include "CameraProviderManager.h"
 
+#include <algorithm>
 #include <chrono>
 #include <inttypes.h>
 #include <hidl/ServiceManagement.h>
 #include <functional>
 #include <camera_metadata_hidden.h>
+#include <android-base/parseint.h>
 
 namespace android {
 
@@ -38,7 +40,7 @@
 const std::string kExternalProviderName("external/0");
 
 // Slash-separated list of provider types to consider for use via the old camera API
-const std::string kStandardProviderTypes("internal/legacy");
+const std::string kStandardProviderTypes("internal/legacy/external");
 
 } // anonymous namespace
 
@@ -79,18 +81,7 @@
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
     int count = 0;
     for (auto& provider : mProviders) {
-        count += provider->mUniqueDeviceCount;
-    }
-    return count;
-}
-
-int CameraProviderManager::getAPI1CompatibleCameraCount() const {
-    std::lock_guard<std::mutex> lock(mInterfaceMutex);
-    int count = 0;
-    for (auto& provider : mProviders) {
-        if (kStandardProviderTypes.find(provider->getType()) != std::string::npos) {
-            count += provider->mUniqueAPI1CompatibleCameraIds.size();
-        }
+        count += provider->mUniqueCameraIds.size();
     }
     return count;
 }
@@ -116,6 +107,24 @@
             }
         }
     }
+
+    std::sort(deviceIds.begin(), deviceIds.end(),
+            [](const std::string& a, const std::string& b) -> bool {
+                uint32_t aUint = 0, bUint = 0;
+                bool aIsUint = base::ParseUint(a, &aUint);
+                bool bIsUint = base::ParseUint(b, &bUint);
+
+                // Uint device IDs first
+                if (aIsUint && bIsUint) {
+                    return aUint < bUint;
+                } else if (aIsUint) {
+                    return true;
+                } else if (bIsUint) {
+                    return false;
+                }
+                // Simple string compare if both id are not uint
+                return a < b;
+            });
     return deviceIds;
 }
 
@@ -480,6 +489,8 @@
     }
     ALOGI("Connecting to new camera provider: %s, isRemote? %d",
             mProviderName.c_str(), mInterface->isRemote());
+    // cameraDeviceStatusChange callbacks may be called (and causing new devices added)
+    // before setCallback returns
     hardware::Return<Status> status = mInterface->setCallback(this);
     if (!status.isOk()) {
         ALOGE("%s: Transaction error setting up callbacks with camera provider '%s': %s",
@@ -536,17 +547,10 @@
         }
     }
 
-    for (auto& device : mDevices) {
-        mUniqueCameraIds.insert(device->mId);
-        if (device->isAPI1Compatible()) {
-            mUniqueAPI1CompatibleCameraIds.insert(device->mId);
-        }
-    }
-    mUniqueDeviceCount = mUniqueCameraIds.size();
-
     ALOGI("Camera provider %s ready with %zu camera devices",
             mProviderName.c_str(), mDevices.size());
 
+    mInitialized = true;
     return OK;
 }
 
@@ -594,9 +598,15 @@
     }
     if (deviceInfo == nullptr) return BAD_VALUE;
     deviceInfo->mStatus = initialStatus;
+    bool isAPI1Compatible = deviceInfo->isAPI1Compatible();
 
     mDevices.push_back(std::move(deviceInfo));
 
+    mUniqueCameraIds.insert(id);
+    if (isAPI1Compatible) {
+        mUniqueAPI1CompatibleCameraIds.insert(id);
+    }
+
     if (parsedId != nullptr) {
         *parsedId = id;
     }
@@ -606,6 +616,10 @@
 void CameraProviderManager::ProviderInfo::removeDevice(std::string id) {
     for (auto it = mDevices.begin(); it != mDevices.end(); it++) {
         if ((*it)->mId == id) {
+            mUniqueCameraIds.erase(id);
+            if ((*it)->isAPI1Compatible()) {
+                mUniqueAPI1CompatibleCameraIds.erase(id);
+            }
             mDevices.erase(it);
             break;
         }
@@ -671,6 +685,7 @@
         CameraDeviceStatus newStatus) {
     sp<StatusListener> listener;
     std::string id;
+    bool initialized = false;
     {
         std::lock_guard<std::mutex> lock(mLock);
         bool known = false;
@@ -697,9 +712,13 @@
             removeDevice(id);
         }
         listener = mManager->getStatusListener();
+        initialized = mInitialized;
     }
     // Call without lock held to allow reentrancy into provider manager
-    if (listener != nullptr) {
+    // Don't send the callback if providerInfo hasn't been initialized.
+    // CameraService will initialize device status after provider is
+    // initialized
+    if (listener != nullptr && initialized) {
         listener->onDeviceStatusChanged(String8(id.c_str()), newStatus);
     }
     return hardware::Void();
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index d02abb0..bbe6789 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -18,7 +18,7 @@
 #define ANDROID_SERVERS_CAMERA_CAMERAPROVIDER_H
 
 #include <vector>
-#include <set>
+#include <unordered_set>
 #include <string>
 #include <mutex>
 
@@ -125,16 +125,14 @@
      */
     int getCameraCount() const;
 
+    std::vector<std::string> getCameraDeviceIds() const;
+
     /**
      * Retrieve the number of API1 compatible cameras; these are internal and
      * backwards-compatible. This is the set of cameras that will be
-     * accessible via the old camera API, with IDs in range of
-     * [0, getAPI1CompatibleCameraCount()-1]. This value is not expected to change dynamically.
+     * accessible via the old camera API.
+     * The return value may change dynamically due to external camera hotplug.
      */
-    int getAPI1CompatibleCameraCount() const;
-
-    std::vector<std::string> getCameraDeviceIds() const;
-
     std::vector<std::string> getAPI1CompatibleCameraDeviceIds() const;
 
     /**
@@ -314,9 +312,9 @@
             static status_t setTorchMode(InterfaceT& interface, bool enabled);
         };
         std::vector<std::unique_ptr<DeviceInfo>> mDevices;
-        std::set<std::string> mUniqueCameraIds;
+        std::unordered_set<std::string> mUniqueCameraIds;
         int mUniqueDeviceCount;
-        std::set<std::string> mUniqueAPI1CompatibleCameraIds;
+        std::unordered_set<std::string> mUniqueAPI1CompatibleCameraIds;
 
         // HALv1-specific camera fields, including the actual device interface
         struct DeviceInfo1 : public DeviceInfo {
@@ -366,6 +364,8 @@
 
         CameraProviderManager *mManager;
 
+        bool mInitialized = false;
+
         // Templated method to instantiate the right kind of DeviceInfo and call the
         // right CameraProvider getCameraDeviceInterface_* method.
         template<class DeviceInfoT>