NuPlayer2: fork the native code for MediaPlayer2

Forked from fec2f93fae282ad10bbb5e3fcce9f60eff2cfb48
NuPlayer2Decoder uses NdkMediaCodec, NdkMediaCrypto.
NuPlayer2Drm uses NdkMediaDrm.
No more IMediaHTTPService and IMediaHTTPConnection in NuPlayer2.
Test: compiles
Bug: 69805888

Change-Id: Ica3c168d40dcf114fb44e8688a9ede671e8d77ef
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index 1f4bd0a..95e95de 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -236,3 +236,142 @@
         },
     },
 }
+
+cc_library_shared {
+    name: "libmedia_player2_util",
+
+    srcs: [
+        "BufferingSettings.cpp",
+        "IDataSource.cpp",
+        "IMediaExtractor.cpp",
+        "IMediaExtractorService.cpp",
+        "IMediaSource.cpp",
+        "IStreamSource.cpp",
+        "MediaUtils.cpp",
+        "Metadata.cpp",
+    ],
+
+    shared_libs: [
+        "libbinder",
+        "libcutils",
+        "libgui",
+        "liblog",
+        "libmediaextractor",
+        "libstagefright_foundation",
+        "libui",
+        "libutils",
+    ],
+
+    export_shared_lib_headers: [
+        "libbinder",
+    ],
+
+    header_libs: [
+        "media_plugin_headers",
+    ],
+
+    static_libs: [
+        "libc_malloc_debug_backtrace",  // for memory heap analysis
+
+        "libstagefright_nuplayer2",
+        "libstagefright_rtsp",
+        "libstagefright_timedtext",
+    ],
+
+    export_include_dirs: [
+        "include",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wno-error=deprecated-declarations",
+        "-Wall",
+    ],
+
+    sanitize: {
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+        diag: {
+            cfi: true,
+        },
+    },
+}
+
+cc_library_shared {
+    name: "libmedia_player2",
+
+    srcs: [
+        "AudioParameter.cpp",
+        "MediaPlayer2Factory.cpp",
+        "MediaPlayer2Manager.cpp",
+        "TestPlayerStub.cpp",
+        "TypeConverter.cpp",
+        "mediaplayer2.cpp",
+    ],
+
+    shared_libs: [
+        "libaudioclient",
+        "libbinder",
+        "libcutils",
+        "libgui",
+        "liblog",
+        "libmedia_omx",
+        "libmedia_player2_util",
+        "libmediaextractor",
+        "libstagefright_foundation",
+        "libui",
+        "libutils",
+
+        "libcrypto",
+        "libmediadrm",
+        "libmediametrics",
+        "libmediandk",
+        "libmediautils",
+        "libmemunreachable",
+        "libpowermanager",
+        "libstagefright_httplive",
+        "libstagefright_player2",
+    ],
+
+    export_shared_lib_headers: [
+        "libaudioclient",
+        "libbinder",
+        "libmedia_omx",
+    ],
+
+    header_libs: [
+        "media_plugin_headers",
+    ],
+
+    static_libs: [
+        "libc_malloc_debug_backtrace",  // for memory heap analysis
+
+        "libstagefright_nuplayer2",
+        "libstagefright_rtsp",
+        "libstagefright_timedtext",
+    ],
+
+    export_include_dirs: [
+        "include",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wno-error=deprecated-declarations",
+        "-Wall",
+    ],
+
+    sanitize: {
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+        diag: {
+            cfi: true,
+        },
+    },
+}
diff --git a/media/libmedia/MediaPlayer2Factory.cpp b/media/libmedia/MediaPlayer2Factory.cpp
new file mode 100644
index 0000000..ca7b96f
--- /dev/null
+++ b/media/libmedia/MediaPlayer2Factory.cpp
@@ -0,0 +1,262 @@
+/*
+**
+** Copyright 2017, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaPlayer2Factory"
+#include <utils/Log.h>
+
+#include <cutils/properties.h>
+#include <media/DataSource.h>
+#include <media/MediaPlayer2Engine.h>
+#include <media/stagefright/FileSource.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <utils/Errors.h>
+#include <utils/misc.h>
+
+#include "MediaPlayer2Factory.h"
+
+#include "TestPlayerStub.h"
+#include "nuplayer2/NuPlayer2Driver.h"
+
+namespace android {
+
+Mutex MediaPlayer2Factory::sLock;
+MediaPlayer2Factory::tFactoryMap MediaPlayer2Factory::sFactoryMap;
+bool MediaPlayer2Factory::sInitComplete = false;
+
+status_t MediaPlayer2Factory::registerFactory_l(IFactory* factory,
+                                                player2_type type) {
+    if (NULL == factory) {
+        ALOGE("Failed to register MediaPlayer2Factory of type %d, factory is"
+              " NULL.", type);
+        return BAD_VALUE;
+    }
+
+    if (sFactoryMap.indexOfKey(type) >= 0) {
+        ALOGE("Failed to register MediaPlayer2Factory of type %d, type is"
+              " already registered.", type);
+        return ALREADY_EXISTS;
+    }
+
+    if (sFactoryMap.add(type, factory) < 0) {
+        ALOGE("Failed to register MediaPlayer2Factory of type %d, failed to add"
+              " to map.", type);
+        return UNKNOWN_ERROR;
+    }
+
+    return OK;
+}
+
+static player2_type getDefaultPlayerType() {
+    return PLAYER2_NU_PLAYER2;
+}
+
+status_t MediaPlayer2Factory::registerFactory(IFactory* factory,
+                                              player2_type type) {
+    Mutex::Autolock lock_(&sLock);
+    return registerFactory_l(factory, type);
+}
+
+void MediaPlayer2Factory::unregisterFactory(player2_type type) {
+    Mutex::Autolock lock_(&sLock);
+    sFactoryMap.removeItem(type);
+}
+
+#define GET_PLAYER_TYPE_IMPL(a...)                      \
+    Mutex::Autolock lock_(&sLock);                      \
+                                                        \
+    player2_type ret = PLAYER2_STAGEFRIGHT_PLAYER;      \
+    float bestScore = 0.0;                              \
+                                                        \
+    for (size_t i = 0; i < sFactoryMap.size(); ++i) {   \
+                                                        \
+        IFactory* v = sFactoryMap.valueAt(i);           \
+        float thisScore;                                \
+        CHECK(v != NULL);                               \
+        thisScore = v->scoreFactory(a, bestScore);      \
+        if (thisScore > bestScore) {                    \
+            ret = sFactoryMap.keyAt(i);                 \
+            bestScore = thisScore;                      \
+        }                                               \
+    }                                                   \
+                                                        \
+    if (0.0 == bestScore) {                             \
+        ret = getDefaultPlayerType();                   \
+    }                                                   \
+                                                        \
+    return ret;
+
+player2_type MediaPlayer2Factory::getPlayerType(const sp<MediaPlayer2Engine>& client,
+                                               const char* url) {
+    GET_PLAYER_TYPE_IMPL(client, url);
+}
+
+player2_type MediaPlayer2Factory::getPlayerType(const sp<MediaPlayer2Engine>& client,
+                                                int fd,
+                                                int64_t offset,
+                                                int64_t length) {
+    GET_PLAYER_TYPE_IMPL(client, fd, offset, length);
+}
+
+player2_type MediaPlayer2Factory::getPlayerType(const sp<MediaPlayer2Engine>& client,
+                                                const sp<IStreamSource> &source) {
+    GET_PLAYER_TYPE_IMPL(client, source);
+}
+
+player2_type MediaPlayer2Factory::getPlayerType(const sp<MediaPlayer2Engine>& client,
+                                                const sp<DataSource> &source) {
+    GET_PLAYER_TYPE_IMPL(client, source);
+}
+
+#undef GET_PLAYER_TYPE_IMPL
+
+sp<MediaPlayer2Base> MediaPlayer2Factory::createPlayer(
+        player2_type playerType,
+        void* cookie,
+        notify_callback_f notifyFunc,
+        pid_t pid) {
+    sp<MediaPlayer2Base> p;
+    IFactory* factory;
+    status_t init_result;
+    Mutex::Autolock lock_(&sLock);
+
+    if (sFactoryMap.indexOfKey(playerType) < 0) {
+        ALOGE("Failed to create player object of type %d, no registered"
+              " factory", playerType);
+        return p;
+    }
+
+    factory = sFactoryMap.valueFor(playerType);
+    CHECK(NULL != factory);
+    p = factory->createPlayer(pid);
+
+    if (p == NULL) {
+        ALOGE("Failed to create player object of type %d, create failed",
+              playerType);
+        return p;
+    }
+
+    init_result = p->initCheck();
+    if (init_result == NO_ERROR) {
+        p->setNotifyCallback(cookie, notifyFunc);
+    } else {
+        ALOGE("Failed to create player object of type %d, initCheck failed"
+              " (res = %d)", playerType, init_result);
+        p.clear();
+    }
+
+    return p;
+}
+
+/*****************************************************************************
+ *                                                                           *
+ *                     Built-In Factory Implementations                      *
+ *                                                                           *
+ *****************************************************************************/
+
+class NuPlayer2Factory : public MediaPlayer2Factory::IFactory {
+  public:
+    virtual float scoreFactory(const sp<MediaPlayer2Engine>& /*client*/,
+                               const char* url,
+                               float curScore) {
+        static const float kOurScore = 0.8;
+
+        if (kOurScore <= curScore) {
+            return 0.0;
+        }
+
+        if (!strncasecmp("http://", url, 7)
+                || !strncasecmp("https://", url, 8)
+                || !strncasecmp("file://", url, 7)) {
+            size_t len = strlen(url);
+            if (len >= 5 && !strcasecmp(".m3u8", &url[len - 5])) {
+                return kOurScore;
+            }
+
+            if (strstr(url,"m3u8")) {
+                return kOurScore;
+            }
+
+            if ((len >= 4 && !strcasecmp(".sdp", &url[len - 4])) || strstr(url, ".sdp?")) {
+                return kOurScore;
+            }
+        }
+
+        if (!strncasecmp("rtsp://", url, 7)) {
+            return kOurScore;
+        }
+
+        return 0.0;
+    }
+
+    virtual float scoreFactory(const sp<MediaPlayer2Engine>& /*client*/,
+                               const sp<IStreamSource>& /*source*/,
+                               float /*curScore*/) {
+        return 1.0;
+    }
+
+    virtual float scoreFactory(const sp<MediaPlayer2Engine>& /*client*/,
+                               const sp<DataSource>& /*source*/,
+                               float /*curScore*/) {
+        // Only NuPlayer2 supports setting a DataSource source directly.
+        return 1.0;
+    }
+
+    virtual sp<MediaPlayer2Base> createPlayer(pid_t pid) {
+        ALOGV(" create NuPlayer2");
+        return new NuPlayer2Driver(pid);
+    }
+};
+
+class TestPlayerFactory : public MediaPlayer2Factory::IFactory {
+  public:
+    virtual float scoreFactory(const sp<MediaPlayer2Engine>& /*client*/,
+                               const char* url,
+                               float /*curScore*/) {
+        if (TestPlayerStub::canBeUsed(url)) {
+            return 1.0;
+        }
+
+        return 0.0;
+    }
+
+    virtual sp<MediaPlayer2Base> createPlayer(pid_t /* pid */) {
+        ALOGV("Create Test Player stub");
+        return new TestPlayerStub();
+    }
+};
+
+void MediaPlayer2Factory::registerBuiltinFactories() {
+    Mutex::Autolock lock_(&sLock);
+
+    if (sInitComplete) {
+        return;
+    }
+
+    IFactory* factory = new NuPlayer2Factory();
+    if (registerFactory_l(factory, PLAYER2_NU_PLAYER2) != OK) {
+        delete factory;
+    }
+    factory = new TestPlayerFactory();
+    if (registerFactory_l(factory, PLAYER2_TEST_PLAYER) != OK) {
+        delete factory;
+    }
+
+    sInitComplete = true;
+}
+
+}  // namespace android
diff --git a/media/libmedia/MediaPlayer2Factory.h b/media/libmedia/MediaPlayer2Factory.h
new file mode 100644
index 0000000..5813b2e
--- /dev/null
+++ b/media/libmedia/MediaPlayer2Factory.h
@@ -0,0 +1,90 @@
+/*
+**
+** Copyright 2017, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_MEDIAPLAYER2FACTORY_H
+#define ANDROID_MEDIAPLAYER2FACTORY_H
+
+#include <media/MediaPlayer2Interface.h>
+#include <media/stagefright/foundation/ABase.h>
+
+namespace android {
+
+class MediaPlayer2Factory {
+  public:
+    class IFactory {
+      public:
+        virtual ~IFactory() { }
+
+        virtual float scoreFactory(const sp<MediaPlayer2Engine>& /*client*/,
+                                   const char* /*url*/,
+                                   float /*curScore*/) { return 0.0; }
+
+        virtual float scoreFactory(const sp<MediaPlayer2Engine>& /*client*/,
+                                   int /*fd*/,
+                                   int64_t /*offset*/,
+                                   int64_t /*length*/,
+                                   float /*curScore*/) { return 0.0; }
+
+        virtual float scoreFactory(const sp<MediaPlayer2Engine>& /*client*/,
+                                   const sp<IStreamSource> &/*source*/,
+                                   float /*curScore*/) { return 0.0; }
+
+        virtual float scoreFactory(const sp<MediaPlayer2Engine>& /*client*/,
+                                   const sp<DataSource> &/*source*/,
+                                   float /*curScore*/) { return 0.0; }
+
+        virtual sp<MediaPlayer2Base> createPlayer(pid_t pid) = 0;
+    };
+
+    static status_t registerFactory(IFactory* factory,
+                                    player2_type type);
+    static void unregisterFactory(player2_type type);
+    static player2_type getPlayerType(const sp<MediaPlayer2Engine>& client,
+                                      const char* url);
+    static player2_type getPlayerType(const sp<MediaPlayer2Engine>& client,
+                                      int fd,
+                                      int64_t offset,
+                                      int64_t length);
+    static player2_type getPlayerType(const sp<MediaPlayer2Engine>& client,
+                                      const sp<IStreamSource> &source);
+    static player2_type getPlayerType(const sp<MediaPlayer2Engine>& client,
+                                      const sp<DataSource> &source);
+
+    static sp<MediaPlayer2Base> createPlayer(player2_type playerType,
+                                             void* cookie,
+                                             notify_callback_f notifyFunc,
+                                             pid_t pid);
+
+    static void registerBuiltinFactories();
+
+  private:
+    typedef KeyedVector<player2_type, IFactory*> tFactoryMap;
+
+    MediaPlayer2Factory() { }
+
+    static status_t registerFactory_l(IFactory* factory,
+                                      player2_type type);
+
+    static Mutex       sLock;
+    static tFactoryMap sFactoryMap;
+    static bool        sInitComplete;
+
+    DISALLOW_EVIL_CONSTRUCTORS(MediaPlayer2Factory);
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIAPLAYER2FACTORY_H
diff --git a/media/libmedia/MediaPlayer2Manager.cpp b/media/libmedia/MediaPlayer2Manager.cpp
new file mode 100644
index 0000000..9b7f98a
--- /dev/null
+++ b/media/libmedia/MediaPlayer2Manager.cpp
@@ -0,0 +1,2391 @@
+/*
+**
+** Copyright 2017, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+// Proxy for media player implementations
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaPlayer2Manager"
+#include <utils/Log.h>
+
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <sys/time.h>
+#include <dirent.h>
+#include <unistd.h>
+
+#include <string.h>
+
+#include <cutils/atomic.h>
+#include <cutils/properties.h> // for property_get
+
+#include <utils/misc.h>
+
+#include <binder/IPCThreadState.h>
+#include <binder/IServiceManager.h>
+#include <binder/MemoryHeapBase.h>
+#include <binder/MemoryBase.h>
+#include <gui/Surface.h>
+#include <utils/Errors.h>  // for status_t
+#include <utils/String8.h>
+#include <utils/SystemClock.h>
+#include <utils/Timers.h>
+#include <utils/Vector.h>
+
+#include <media/AudioPolicyHelper.h>
+#include <media/MediaHTTPService.h>
+#include <media/MediaPlayer2EngineClient.h>
+#include <media/MediaPlayer2Interface.h>
+#include <media/Metadata.h>
+#include <media/AudioTrack.h>
+#include <media/MemoryLeakTrackUtil.h>
+#include <media/stagefright/InterfaceUtils.h>
+#include <media/stagefright/MediaCodecList.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/Utils.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALooperRoster.h>
+#include <media/stagefright/SurfaceUtils.h>
+#include <mediautils/BatteryNotifier.h>
+
+#include <memunreachable/memunreachable.h>
+#include <system/audio.h>
+
+#include <private/android_filesystem_config.h>
+
+#include "MediaPlayer2Manager.h"
+#include "MediaPlayer2Factory.h"
+
+static const int kDumpLockRetries = 50;
+static const int kDumpLockSleepUs = 20000;
+
+namespace {
+using android::media::Metadata;
+using android::status_t;
+using android::OK;
+using android::BAD_VALUE;
+using android::NOT_ENOUGH_DATA;
+using android::Parcel;
+using android::media::VolumeShaper;
+
+// Max number of entries in the filter.
+const int kMaxFilterSize = 64;  // I pulled that out of thin air.
+
+const float kMaxRequiredSpeed = 8.0f; // for PCM tracks allow up to 8x speedup.
+
+// FIXME: Move all the metadata related function in the Metadata.cpp
+
+
+// Unmarshall a filter from a Parcel.
+// Filter format in a parcel:
+//
+//  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |                       number of entries (n)                   |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |                       metadata type 1                         |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |                       metadata type 2                         |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//  ....
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |                       metadata type n                         |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//
+// @param p Parcel that should start with a filter.
+// @param[out] filter On exit contains the list of metadata type to be
+//                    filtered.
+// @param[out] status On exit contains the status code to be returned.
+// @return true if the parcel starts with a valid filter.
+bool unmarshallFilter(const Parcel& p,
+                      Metadata::Filter *filter,
+                      status_t *status)
+{
+    int32_t val;
+    if (p.readInt32(&val) != OK)
+    {
+        ALOGE("Failed to read filter's length");
+        *status = NOT_ENOUGH_DATA;
+        return false;
+    }
+
+    if( val > kMaxFilterSize || val < 0)
+    {
+        ALOGE("Invalid filter len %d", val);
+        *status = BAD_VALUE;
+        return false;
+    }
+
+    const size_t num = val;
+
+    filter->clear();
+    filter->setCapacity(num);
+
+    size_t size = num * sizeof(Metadata::Type);
+
+
+    if (p.dataAvail() < size)
+    {
+        ALOGE("Filter too short expected %zu but got %zu", size, p.dataAvail());
+        *status = NOT_ENOUGH_DATA;
+        return false;
+    }
+
+    const Metadata::Type *data =
+            static_cast<const Metadata::Type*>(p.readInplace(size));
+
+    if (NULL == data)
+    {
+        ALOGE("Filter had no data");
+        *status = BAD_VALUE;
+        return false;
+    }
+
+    // TODO: The stl impl of vector would be more efficient here
+    // because it degenerates into a memcpy on pod types. Try to
+    // replace later or use stl::set.
+    for (size_t i = 0; i < num; ++i)
+    {
+        filter->add(*data);
+        ++data;
+    }
+    *status = OK;
+    return true;
+}
+
+// @param filter Of metadata type.
+// @param val To be searched.
+// @return true if a match was found.
+bool findMetadata(const Metadata::Filter& filter, const int32_t val)
+{
+    // Deal with empty and ANY right away
+    if (filter.isEmpty()) return false;
+    if (filter[0] == Metadata::kAny) return true;
+
+    return filter.indexOf(val) >= 0;
+}
+
+}  // anonymous namespace
+
+
+namespace {
+using android::Parcel;
+using android::String16;
+
+// marshalling tag indicating flattened utf16 tags
+// keep in sync with frameworks/base/media/java/android/media/AudioAttributes.java
+const int32_t kAudioAttributesMarshallTagFlattenTags = 1;
+
+// Audio attributes format in a parcel:
+//
+//  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |                       usage                                   |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |                       content_type                            |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |                       source                                  |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |                       flags                                   |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |                       kAudioAttributesMarshallTagFlattenTags  | // ignore tags if not found
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |                       flattened tags in UTF16                 |
+// |                         ...                                   |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//
+// @param p Parcel that contains audio attributes.
+// @param[out] attributes On exit points to an initialized audio_attributes_t structure
+// @param[out] status On exit contains the status code to be returned.
+void unmarshallAudioAttributes(const Parcel& parcel, audio_attributes_t *attributes)
+{
+    attributes->usage = (audio_usage_t) parcel.readInt32();
+    attributes->content_type = (audio_content_type_t) parcel.readInt32();
+    attributes->source = (audio_source_t) parcel.readInt32();
+    attributes->flags = (audio_flags_mask_t) parcel.readInt32();
+    const bool hasFlattenedTag = (parcel.readInt32() == kAudioAttributesMarshallTagFlattenTags);
+    if (hasFlattenedTag) {
+        // the tags are UTF16, convert to UTF8
+        String16 tags = parcel.readString16();
+        ssize_t realTagSize = utf16_to_utf8_length(tags.string(), tags.size());
+        if (realTagSize <= 0) {
+            strcpy(attributes->tags, "");
+        } else {
+            // copy the flattened string into the attributes as the destination for the conversion:
+            // copying array size -1, array for tags was calloc'd, no need to NULL-terminate it
+            size_t tagSize = realTagSize > AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1 ?
+                    AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1 : realTagSize;
+            utf16_to_utf8(tags.string(), tagSize, attributes->tags,
+                    sizeof(attributes->tags) / sizeof(attributes->tags[0]));
+        }
+    } else {
+        ALOGE("unmarshallAudioAttributes() received unflattened tags, ignoring tag values");
+        strcpy(attributes->tags, "");
+    }
+}
+} // anonymous namespace
+
+
+namespace android {
+
+extern ALooperRoster gLooperRoster;
+
+MediaPlayer2Manager gMediaPlayer2Manager;
+
+static bool checkPermission(const char* permissionString) {
+    if (getpid() == IPCThreadState::self()->getCallingPid()) return true;
+    bool ok = checkCallingPermission(String16(permissionString));
+    if (!ok) ALOGE("Request requires %s", permissionString);
+    return ok;
+}
+
+// TODO: Find real cause of Audio/Video delay in PV framework and remove this workaround
+/* static */ int MediaPlayer2Manager::AudioOutput::mMinBufferCount = 4;
+/* static */ bool MediaPlayer2Manager::AudioOutput::mIsOnEmulator = false;
+
+// static
+MediaPlayer2Manager& MediaPlayer2Manager::get() {
+    return gMediaPlayer2Manager;
+}
+
+MediaPlayer2Manager::MediaPlayer2Manager() {
+    ALOGV("MediaPlayer2Manager created");
+    // TODO: remove all unnecessary pid/uid handling.
+    mPid = IPCThreadState::self()->getCallingPid();
+    mUid = IPCThreadState::self()->getCallingUid();
+    mNextConnId = 1;
+
+    MediaPlayer2Factory::registerBuiltinFactories();
+}
+
+MediaPlayer2Manager::~MediaPlayer2Manager() {
+    ALOGV("MediaPlayer2Manager destroyed");
+}
+
+sp<MediaPlayer2Engine> MediaPlayer2Manager::create(
+        const sp<MediaPlayer2EngineClient>& client,
+        audio_session_t audioSessionId)
+{
+    int32_t connId = android_atomic_inc(&mNextConnId);
+
+    sp<Client> c = new Client(
+            mPid, connId, client, audioSessionId, mUid);
+
+    ALOGV("Create new client(%d) from pid %d, uid %d, ", connId, mPid, mUid);
+
+    wp<Client> w = c;
+    {
+        Mutex::Autolock lock(mLock);
+        mClients.add(w);
+    }
+    return c;
+}
+
+status_t MediaPlayer2Manager::AudioOutput::dump(int fd, const Vector<String16>& args) const
+{
+    const size_t SIZE = 256;
+    char buffer[SIZE];
+    String8 result;
+
+    result.append(" AudioOutput\n");
+    snprintf(buffer, 255, "  stream type(%d), left - right volume(%f, %f)\n",
+            mStreamType, mLeftVolume, mRightVolume);
+    result.append(buffer);
+    snprintf(buffer, 255, "  msec per frame(%f), latency (%d)\n",
+            mMsecsPerFrame, (mTrack != 0) ? mTrack->latency() : -1);
+    result.append(buffer);
+    snprintf(buffer, 255, "  aux effect id(%d), send level (%f)\n",
+            mAuxEffectId, mSendLevel);
+    result.append(buffer);
+
+    ::write(fd, result.string(), result.size());
+    if (mTrack != 0) {
+        mTrack->dump(fd, args);
+    }
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2Manager::Client::dump(int fd, const Vector<String16>& args)
+{
+    const size_t SIZE = 256;
+    char buffer[SIZE];
+    String8 result;
+    result.append(" Client\n");
+    snprintf(buffer, 255, "  pid(%d), connId(%d), status(%d), looping(%s)\n",
+            mPid, mConnId, mStatus, mLoop?"true": "false");
+    result.append(buffer);
+
+    sp<MediaPlayer2Base> p;
+    sp<AudioOutput> audioOutput;
+    bool locked = false;
+    for (int i = 0; i < kDumpLockRetries; ++i) {
+        if (mLock.tryLock() == NO_ERROR) {
+            locked = true;
+            break;
+        }
+        usleep(kDumpLockSleepUs);
+    }
+
+    if (locked) {
+        p = mPlayer;
+        audioOutput = mAudioOutput;
+        mLock.unlock();
+    } else {
+        result.append("  lock is taken, no dump from player and audio output\n");
+    }
+    write(fd, result.string(), result.size());
+
+    if (p != NULL) {
+        p->dump(fd, args);
+    }
+    if (audioOutput != 0) {
+        audioOutput->dump(fd, args);
+    }
+    write(fd, "\n", 1);
+    return NO_ERROR;
+}
+
+/**
+ * The only arguments this understands right now are -c, -von and -voff,
+ * which are parsed by ALooperRoster::dump()
+ */
+status_t MediaPlayer2Manager::dump(int fd, const Vector<String16>& args)
+{
+    const size_t SIZE = 256;
+    char buffer[SIZE];
+    String8 result;
+    SortedVector< sp<Client> > clients; //to serialise the mutex unlock & client destruction.
+
+    if (checkCallingPermission(String16("android.permission.DUMP")) == false) {
+        snprintf(buffer, SIZE, "Permission Denial: "
+                "can't dump MediaPlayer2Manager from pid=%d, uid=%d\n",
+                mPid, mUid);
+        result.append(buffer);
+    } else {
+        Mutex::Autolock lock(mLock);
+        for (int i = 0, n = mClients.size(); i < n; ++i) {
+            sp<Client> c = mClients[i].promote();
+            if (c != 0) c->dump(fd, args);
+            clients.add(c);
+        }
+
+        result.append(" Files opened and/or mapped:\n");
+        snprintf(buffer, SIZE, "/proc/%d/maps", getpid());
+        FILE *f = fopen(buffer, "r");
+        if (f) {
+            while (!feof(f)) {
+                fgets(buffer, SIZE, f);
+                if (strstr(buffer, " /storage/") ||
+                    strstr(buffer, " /system/sounds/") ||
+                    strstr(buffer, " /data/") ||
+                    strstr(buffer, " /system/media/")) {
+                    result.append("  ");
+                    result.append(buffer);
+                }
+            }
+            fclose(f);
+        } else {
+            result.append("couldn't open ");
+            result.append(buffer);
+            result.append("\n");
+        }
+
+        snprintf(buffer, SIZE, "/proc/%d/fd", getpid());
+        DIR *d = opendir(buffer);
+        if (d) {
+            struct dirent *ent;
+            while((ent = readdir(d)) != NULL) {
+                if (strcmp(ent->d_name,".") && strcmp(ent->d_name,"..")) {
+                    snprintf(buffer, SIZE, "/proc/%d/fd/%s", getpid(), ent->d_name);
+                    struct stat s;
+                    if (lstat(buffer, &s) == 0) {
+                        if ((s.st_mode & S_IFMT) == S_IFLNK) {
+                            char linkto[256];
+                            int len = readlink(buffer, linkto, sizeof(linkto));
+                            if(len > 0) {
+                                if(len > 255) {
+                                    linkto[252] = '.';
+                                    linkto[253] = '.';
+                                    linkto[254] = '.';
+                                    linkto[255] = 0;
+                                } else {
+                                    linkto[len] = 0;
+                                }
+                                if (strstr(linkto, "/storage/") == linkto ||
+                                    strstr(linkto, "/system/sounds/") == linkto ||
+                                    strstr(linkto, "/data/") == linkto ||
+                                    strstr(linkto, "/system/media/") == linkto) {
+                                    result.append("  ");
+                                    result.append(buffer);
+                                    result.append(" -> ");
+                                    result.append(linkto);
+                                    result.append("\n");
+                                }
+                            }
+                        } else {
+                            result.append("  unexpected type for ");
+                            result.append(buffer);
+                            result.append("\n");
+                        }
+                    }
+                }
+            }
+            closedir(d);
+        } else {
+            result.append("couldn't open ");
+            result.append(buffer);
+            result.append("\n");
+        }
+
+        gLooperRoster.dump(fd, args);
+
+        bool dumpMem = false;
+        bool unreachableMemory = false;
+        for (size_t i = 0; i < args.size(); i++) {
+            if (args[i] == String16("-m")) {
+                dumpMem = true;
+            } else if (args[i] == String16("--unreachable")) {
+                unreachableMemory = true;
+            }
+        }
+        if (dumpMem) {
+            result.append("\nDumping memory:\n");
+            std::string s = dumpMemoryAddresses(100 /* limit */);
+            result.append(s.c_str(), s.size());
+        }
+        if (unreachableMemory) {
+            result.append("\nDumping unreachable memory:\n");
+            // TODO - should limit be an argument parameter?
+            std::string s = GetUnreachableMemoryString(true /* contents */, 10000 /* limit */);
+            result.append(s.c_str(), s.size());
+        }
+    }
+    write(fd, result.string(), result.size());
+    return NO_ERROR;
+}
+
+void MediaPlayer2Manager::removeClient(const wp<Client>& client)
+{
+    Mutex::Autolock lock(mLock);
+    mClients.remove(client);
+}
+
+bool MediaPlayer2Manager::hasClient(wp<Client> client)
+{
+    Mutex::Autolock lock(mLock);
+    return mClients.indexOf(client) != NAME_NOT_FOUND;
+}
+
+MediaPlayer2Manager::Client::Client(
+        pid_t pid,
+        int32_t connId,
+        const sp<MediaPlayer2EngineClient>& client,
+        audio_session_t audioSessionId,
+        uid_t uid)
+{
+    ALOGV("Client(%d) constructor", connId);
+    mPid = pid;
+    mConnId = connId;
+    mClient = client;
+    mLoop = false;
+    mStatus = NO_INIT;
+    mAudioSessionId = audioSessionId;
+    mUid = uid;
+    mRetransmitEndpointValid = false;
+    mAudioAttributes = NULL;
+
+#if CALLBACK_ANTAGONIZER
+    ALOGD("create Antagonizer");
+    mAntagonizer = new Antagonizer(notify, this);
+#endif
+}
+
+MediaPlayer2Manager::Client::~Client()
+{
+    ALOGV("Client(%d) destructor pid = %d", mConnId, mPid);
+    mAudioOutput.clear();
+    wp<Client> client(this);
+    disconnect();
+    gMediaPlayer2Manager.removeClient(client);
+    if (mAudioAttributes != NULL) {
+        free(mAudioAttributes);
+    }
+    mAudioDeviceUpdatedListener.clear();
+}
+
+void MediaPlayer2Manager::Client::disconnect()
+{
+    ALOGV("disconnect(%d) from pid %d", mConnId, mPid);
+    // grab local reference and clear main reference to prevent future
+    // access to object
+    sp<MediaPlayer2Base> p;
+    {
+        Mutex::Autolock l(mLock);
+        p = mPlayer;
+        mClient.clear();
+        mPlayer.clear();
+    }
+
+    // clear the notification to prevent callbacks to dead client
+    // and reset the player. We assume the player will serialize
+    // access to itself if necessary.
+    if (p != 0) {
+        p->setNotifyCallback(0, 0);
+#if CALLBACK_ANTAGONIZER
+        ALOGD("kill Antagonizer");
+        mAntagonizer->kill();
+#endif
+        p->reset();
+    }
+
+    {
+        Mutex::Autolock l(mLock);
+        disconnectNativeWindow_l();
+    }
+
+    IPCThreadState::self()->flushCommands();
+}
+
+sp<MediaPlayer2Base> MediaPlayer2Manager::Client::createPlayer(player2_type playerType)
+{
+    // determine if we have the right player type
+    sp<MediaPlayer2Base> p = getPlayer();
+    if ((p != NULL) && (p->playerType() != playerType)) {
+        ALOGV("delete player");
+        p.clear();
+    }
+    if (p == NULL) {
+        p = MediaPlayer2Factory::createPlayer(playerType, this, notify, mPid);
+    }
+
+    if (p != NULL) {
+        p->setUID(mUid);
+    }
+
+    return p;
+}
+
+void MediaPlayer2Manager::Client::AudioDeviceUpdatedNotifier::onAudioDeviceUpdate(
+        audio_io_handle_t audioIo,
+        audio_port_handle_t deviceId) {
+    sp<MediaPlayer2Base> listener = mListener.promote();
+    if (listener != NULL) {
+        listener->sendEvent(MEDIA2_AUDIO_ROUTING_CHANGED, audioIo, deviceId);
+    } else {
+        ALOGW("listener for process %d death is gone", MEDIA2_AUDIO_ROUTING_CHANGED);
+    }
+}
+
+sp<MediaPlayer2Base> MediaPlayer2Manager::Client::setDataSource_pre(
+        player2_type playerType)
+{
+    ALOGV("player type = %d", playerType);
+
+    // create the right type of player
+    sp<MediaPlayer2Base> p = createPlayer(playerType);
+    if (p == NULL) {
+        return p;
+    }
+
+    Mutex::Autolock lock(mLock);
+
+    mAudioDeviceUpdatedListener = new AudioDeviceUpdatedNotifier(p);
+
+    if (!p->hardwareOutput()) {
+        mAudioOutput = new AudioOutput(mAudioSessionId, mUid,
+                mPid, mAudioAttributes, mAudioDeviceUpdatedListener);
+        static_cast<MediaPlayer2Interface*>(p.get())->setAudioSink(mAudioOutput);
+    }
+
+    return p;
+}
+
+status_t MediaPlayer2Manager::Client::setDataSource_post(
+        const sp<MediaPlayer2Base>& p,
+        status_t status)
+{
+    ALOGV(" setDataSource");
+    if (status != OK) {
+        ALOGE("  error: %d", status);
+        return status;
+    }
+
+    // Set the re-transmission endpoint if one was chosen.
+    if (mRetransmitEndpointValid) {
+        status = p->setRetransmitEndpoint(&mRetransmitEndpoint);
+        if (status != NO_ERROR) {
+            ALOGE("setRetransmitEndpoint error: %d", status);
+        }
+    }
+
+    if (status == OK) {
+        Mutex::Autolock lock(mLock);
+        mPlayer = p;
+    }
+    return status;
+}
+
+status_t MediaPlayer2Manager::Client::setDataSource(
+        const sp<MediaHTTPService> &httpService,
+        const char *url,
+        const KeyedVector<String8, String8> *headers)
+{
+    ALOGV("setDataSource(%s)", url);
+    if (url == NULL)
+        return UNKNOWN_ERROR;
+
+    if ((strncmp(url, "http://", 7) == 0) ||
+        (strncmp(url, "https://", 8) == 0) ||
+        (strncmp(url, "rtsp://", 7) == 0)) {
+        if (!checkPermission("android.permission.INTERNET")) {
+            return PERMISSION_DENIED;
+        }
+    }
+
+    if (strncmp(url, "content://", 10) == 0) {
+        ALOGE("setDataSource: content scheme is not supported here");
+        mStatus = UNKNOWN_ERROR;
+        return mStatus;
+    } else {
+        player2_type playerType = MediaPlayer2Factory::getPlayerType(this, url);
+        sp<MediaPlayer2Base> p = setDataSource_pre(playerType);
+        if (p == NULL) {
+            return NO_INIT;
+        }
+
+        return mStatus =
+                setDataSource_post(
+                p, p->setDataSource(httpService, url, headers));
+    }
+}
+
+status_t MediaPlayer2Manager::Client::setDataSource(int fd, int64_t offset, int64_t length)
+{
+    ALOGV("setDataSource fd=%d (%s), offset=%lld, length=%lld",
+            fd, nameForFd(fd).c_str(), (long long) offset, (long long) length);
+    struct stat sb;
+    int ret = fstat(fd, &sb);
+    if (ret != 0) {
+        ALOGE("fstat(%d) failed: %d, %s", fd, ret, strerror(errno));
+        return UNKNOWN_ERROR;
+    }
+
+    ALOGV("st_dev  = %llu", static_cast<unsigned long long>(sb.st_dev));
+    ALOGV("st_mode = %u", sb.st_mode);
+    ALOGV("st_uid  = %lu", static_cast<unsigned long>(sb.st_uid));
+    ALOGV("st_gid  = %lu", static_cast<unsigned long>(sb.st_gid));
+    ALOGV("st_size = %llu", static_cast<unsigned long long>(sb.st_size));
+
+    if (offset >= sb.st_size) {
+        ALOGE("offset error");
+        return UNKNOWN_ERROR;
+    }
+    if (offset + length > sb.st_size) {
+        length = sb.st_size - offset;
+        ALOGV("calculated length = %lld", (long long)length);
+    }
+
+    player2_type playerType = MediaPlayer2Factory::getPlayerType(this,
+                                                               fd,
+                                                               offset,
+                                                               length);
+    sp<MediaPlayer2Base> p = setDataSource_pre(playerType);
+    if (p == NULL) {
+        return NO_INIT;
+    }
+
+    // now set data source
+    return mStatus = setDataSource_post(p, p->setDataSource(fd, offset, length));
+}
+
+status_t MediaPlayer2Manager::Client::setDataSource(
+        const sp<IStreamSource> &source) {
+    // create the right type of player
+    player2_type playerType = MediaPlayer2Factory::getPlayerType(this, source);
+    sp<MediaPlayer2Base> p = setDataSource_pre(playerType);
+    if (p == NULL) {
+        return NO_INIT;
+    }
+
+    // now set data source
+    return mStatus = setDataSource_post(p, p->setDataSource(source));
+}
+
+status_t MediaPlayer2Manager::Client::setDataSource(
+        const sp<IDataSource> &source) {
+    sp<DataSource> dataSource = CreateDataSourceFromIDataSource(source);
+    player2_type playerType = MediaPlayer2Factory::getPlayerType(this, dataSource);
+    sp<MediaPlayer2Base> p = setDataSource_pre(playerType);
+    if (p == NULL) {
+        return NO_INIT;
+    }
+    // now set data source
+    return mStatus = setDataSource_post(p, p->setDataSource(dataSource));
+}
+
+void MediaPlayer2Manager::Client::disconnectNativeWindow_l() {
+    if (mConnectedWindow != NULL) {
+        status_t err = nativeWindowDisconnect(
+                mConnectedWindow.get(), "disconnectNativeWindow");
+
+        if (err != OK) {
+            ALOGW("nativeWindowDisconnect returned an error: %s (%d)",
+                    strerror(-err), err);
+        }
+    }
+    mConnectedWindow.clear();
+}
+
+status_t MediaPlayer2Manager::Client::setVideoSurfaceTexture(
+        const sp<IGraphicBufferProducer>& bufferProducer)
+{
+    ALOGV("[%d] setVideoSurfaceTexture(%p)", mConnId, bufferProducer.get());
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+
+    sp<IBinder> binder(IInterface::asBinder(bufferProducer));
+    if (mConnectedWindowBinder == binder) {
+        return OK;
+    }
+
+    sp<ANativeWindow> anw;
+    if (bufferProducer != NULL) {
+        anw = new Surface(bufferProducer, true /* controlledByApp */);
+        status_t err = nativeWindowConnect(anw.get(), "setVideoSurfaceTexture");
+
+        if (err != OK) {
+            ALOGE("setVideoSurfaceTexture failed: %d", err);
+            // Note that we must do the reset before disconnecting from the ANW.
+            // Otherwise queue/dequeue calls could be made on the disconnected
+            // ANW, which may result in errors.
+            reset();
+
+            Mutex::Autolock lock(mLock);
+            disconnectNativeWindow_l();
+
+            return err;
+        }
+    }
+
+    // Note that we must set the player's new GraphicBufferProducer before
+    // disconnecting the old one.  Otherwise queue/dequeue calls could be made
+    // on the disconnected ANW, which may result in errors.
+    status_t err = p->setVideoSurfaceTexture(bufferProducer);
+
+    mLock.lock();
+    disconnectNativeWindow_l();
+
+    if (err == OK) {
+        mConnectedWindow = anw;
+        mConnectedWindowBinder = binder;
+        mLock.unlock();
+    } else {
+        mLock.unlock();
+        status_t err = nativeWindowDisconnect(
+                anw.get(), "disconnectNativeWindow");
+
+        if (err != OK) {
+            ALOGW("nativeWindowDisconnect returned an error: %s (%d)",
+                    strerror(-err), err);
+        }
+    }
+
+    return err;
+}
+
+status_t MediaPlayer2Manager::Client::invoke(const Parcel& request,
+                                            Parcel *reply)
+{
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == NULL) return UNKNOWN_ERROR;
+    return p->invoke(request, reply);
+}
+
+// This call doesn't need to access the native player.
+status_t MediaPlayer2Manager::Client::setMetadataFilter(const Parcel& filter)
+{
+    status_t status;
+    media::Metadata::Filter allow, drop;
+
+    if (unmarshallFilter(filter, &allow, &status) &&
+        unmarshallFilter(filter, &drop, &status)) {
+        Mutex::Autolock lock(mLock);
+
+        mMetadataAllow = allow;
+        mMetadataDrop = drop;
+    }
+    return status;
+}
+
+status_t MediaPlayer2Manager::Client::getMetadata(
+        bool update_only, bool /*apply_filter*/, Parcel *reply)
+{
+    sp<MediaPlayer2Base> player = getPlayer();
+    if (player == 0) return UNKNOWN_ERROR;
+
+    status_t status;
+    // Placeholder for the return code, updated by the caller.
+    reply->writeInt32(-1);
+
+    media::Metadata::Filter ids;
+
+    // We don't block notifications while we fetch the data. We clear
+    // mMetadataUpdated first so we don't lose notifications happening
+    // during the rest of this call.
+    {
+        Mutex::Autolock lock(mLock);
+        if (update_only) {
+            ids = mMetadataUpdated;
+        }
+        mMetadataUpdated.clear();
+    }
+
+    media::Metadata metadata(reply);
+
+    metadata.appendHeader();
+    status = player->getMetadata(ids, reply);
+
+    if (status != OK) {
+        metadata.resetParcel();
+        ALOGE("getMetadata failed %d", status);
+        return status;
+    }
+
+    // FIXME: ement filtering on the result. Not critical since
+    // filtering takes place on the update notifications already. This
+    // would be when all the metadata are fetch and a filter is set.
+
+    // Everything is fine, update the metadata length.
+    metadata.updateLength();
+    return OK;
+}
+
+status_t MediaPlayer2Manager::Client::setBufferingSettings(
+        const BufferingSettings& buffering)
+{
+    ALOGV("[%d] setBufferingSettings{%s}",
+            mConnId, buffering.toString().string());
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    return p->setBufferingSettings(buffering);
+}
+
+status_t MediaPlayer2Manager::Client::getBufferingSettings(
+        BufferingSettings* buffering /* nonnull */)
+{
+    sp<MediaPlayer2Base> p = getPlayer();
+    // TODO: create mPlayer on demand.
+    if (p == 0) return UNKNOWN_ERROR;
+    status_t ret = p->getBufferingSettings(buffering);
+    if (ret == NO_ERROR) {
+        ALOGV("[%d] getBufferingSettings{%s}",
+                mConnId, buffering->toString().string());
+    } else {
+        ALOGE("[%d] getBufferingSettings returned %d", mConnId, ret);
+    }
+    return ret;
+}
+
+status_t MediaPlayer2Manager::Client::prepareAsync()
+{
+    ALOGV("[%d] prepareAsync", mConnId);
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    status_t ret = p->prepareAsync();
+#if CALLBACK_ANTAGONIZER
+    ALOGD("start Antagonizer");
+    if (ret == NO_ERROR) mAntagonizer->start();
+#endif
+    return ret;
+}
+
+status_t MediaPlayer2Manager::Client::start()
+{
+    ALOGV("[%d] start", mConnId);
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    p->setLooping(mLoop);
+    return p->start();
+}
+
+status_t MediaPlayer2Manager::Client::stop()
+{
+    ALOGV("[%d] stop", mConnId);
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    return p->stop();
+}
+
+status_t MediaPlayer2Manager::Client::pause()
+{
+    ALOGV("[%d] pause", mConnId);
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    return p->pause();
+}
+
+status_t MediaPlayer2Manager::Client::isPlaying(bool* state)
+{
+    *state = false;
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    *state = p->isPlaying();
+    ALOGV("[%d] isPlaying: %d", mConnId, *state);
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2Manager::Client::setPlaybackSettings(const AudioPlaybackRate& rate)
+{
+    ALOGV("[%d] setPlaybackSettings(%f, %f, %d, %d)",
+            mConnId, rate.mSpeed, rate.mPitch, rate.mFallbackMode, rate.mStretchMode);
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    return p->setPlaybackSettings(rate);
+}
+
+status_t MediaPlayer2Manager::Client::getPlaybackSettings(AudioPlaybackRate* rate /* nonnull */)
+{
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    status_t ret = p->getPlaybackSettings(rate);
+    if (ret == NO_ERROR) {
+        ALOGV("[%d] getPlaybackSettings(%f, %f, %d, %d)",
+                mConnId, rate->mSpeed, rate->mPitch, rate->mFallbackMode, rate->mStretchMode);
+    } else {
+        ALOGV("[%d] getPlaybackSettings returned %d", mConnId, ret);
+    }
+    return ret;
+}
+
+status_t MediaPlayer2Manager::Client::setSyncSettings(
+        const AVSyncSettings& sync, float videoFpsHint)
+{
+    ALOGV("[%d] setSyncSettings(%u, %u, %f, %f)",
+            mConnId, sync.mSource, sync.mAudioAdjustMode, sync.mTolerance, videoFpsHint);
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    return p->setSyncSettings(sync, videoFpsHint);
+}
+
+status_t MediaPlayer2Manager::Client::getSyncSettings(
+        AVSyncSettings* sync /* nonnull */, float* videoFps /* nonnull */)
+{
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    status_t ret = p->getSyncSettings(sync, videoFps);
+    if (ret == NO_ERROR) {
+        ALOGV("[%d] getSyncSettings(%u, %u, %f, %f)",
+                mConnId, sync->mSource, sync->mAudioAdjustMode, sync->mTolerance, *videoFps);
+    } else {
+        ALOGV("[%d] getSyncSettings returned %d", mConnId, ret);
+    }
+    return ret;
+}
+
+status_t MediaPlayer2Manager::Client::getCurrentPosition(int *msec)
+{
+    ALOGV("getCurrentPosition");
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    status_t ret = p->getCurrentPosition(msec);
+    if (ret == NO_ERROR) {
+        ALOGV("[%d] getCurrentPosition = %d", mConnId, *msec);
+    } else {
+        ALOGE("getCurrentPosition returned %d", ret);
+    }
+    return ret;
+}
+
+status_t MediaPlayer2Manager::Client::getDuration(int *msec)
+{
+    ALOGV("getDuration");
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    status_t ret = p->getDuration(msec);
+    if (ret == NO_ERROR) {
+        ALOGV("[%d] getDuration = %d", mConnId, *msec);
+    } else {
+        ALOGE("getDuration returned %d", ret);
+    }
+    return ret;
+}
+
+status_t MediaPlayer2Manager::Client::setNextPlayer(const sp<MediaPlayer2Engine>& player) {
+    ALOGV("setNextPlayer");
+    Mutex::Autolock l(mLock);
+    sp<Client> c = static_cast<Client*>(player.get());
+    if (c != NULL && !gMediaPlayer2Manager.hasClient(c)) {
+      return BAD_VALUE;
+    }
+
+    mNextClient = c;
+
+    if (c != NULL) {
+        if (mAudioOutput != NULL) {
+            mAudioOutput->setNextOutput(c->mAudioOutput);
+        } else if ((mPlayer != NULL) && !mPlayer->hardwareOutput()) {
+            ALOGE("no current audio output");
+        }
+
+        if ((mPlayer != NULL) && (mNextClient->getPlayer() != NULL)) {
+            mPlayer->setNextPlayer(mNextClient->getPlayer());
+        }
+    }
+
+    return OK;
+}
+
+VolumeShaper::Status MediaPlayer2Manager::Client::applyVolumeShaper(
+        const sp<VolumeShaper::Configuration>& configuration,
+        const sp<VolumeShaper::Operation>& operation) {
+    // for hardware output, call player instead
+    ALOGV("Client::applyVolumeShaper(%p)", this);
+    sp<MediaPlayer2Base> p = getPlayer();
+    {
+        Mutex::Autolock l(mLock);
+        if (p != 0 && p->hardwareOutput()) {
+            // TODO: investigate internal implementation
+            return VolumeShaper::Status(INVALID_OPERATION);
+        }
+        if (mAudioOutput.get() != nullptr) {
+            return mAudioOutput->applyVolumeShaper(configuration, operation);
+        }
+    }
+    return VolumeShaper::Status(INVALID_OPERATION);
+}
+
+sp<VolumeShaper::State> MediaPlayer2Manager::Client::getVolumeShaperState(int id) {
+    // for hardware output, call player instead
+    ALOGV("Client::getVolumeShaperState(%p)", this);
+    sp<MediaPlayer2Base> p = getPlayer();
+    {
+        Mutex::Autolock l(mLock);
+        if (p != 0 && p->hardwareOutput()) {
+            // TODO: investigate internal implementation.
+            return nullptr;
+        }
+        if (mAudioOutput.get() != nullptr) {
+            return mAudioOutput->getVolumeShaperState(id);
+        }
+    }
+    return nullptr;
+}
+
+status_t MediaPlayer2Manager::Client::seekTo(int msec, MediaPlayer2SeekMode mode)
+{
+    ALOGV("[%d] seekTo(%d, %d)", mConnId, msec, mode);
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    return p->seekTo(msec, mode);
+}
+
+status_t MediaPlayer2Manager::Client::reset()
+{
+    ALOGV("[%d] reset", mConnId);
+    mRetransmitEndpointValid = false;
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    return p->reset();
+}
+
+status_t MediaPlayer2Manager::Client::notifyAt(int64_t mediaTimeUs)
+{
+    ALOGV("[%d] notifyAt(%lld)", mConnId, (long long)mediaTimeUs);
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    return p->notifyAt(mediaTimeUs);
+}
+
+status_t MediaPlayer2Manager::Client::setAudioStreamType(audio_stream_type_t type)
+{
+    ALOGV("[%d] setAudioStreamType(%d)", mConnId, type);
+    // TODO: for hardware output, call player instead
+    Mutex::Autolock l(mLock);
+    if (mAudioOutput != 0) mAudioOutput->setAudioStreamType(type);
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2Manager::Client::setAudioAttributes_l(const Parcel &parcel)
+{
+    if (mAudioAttributes != NULL) { free(mAudioAttributes); }
+    mAudioAttributes = (audio_attributes_t *) calloc(1, sizeof(audio_attributes_t));
+    if (mAudioAttributes == NULL) {
+        return NO_MEMORY;
+    }
+    unmarshallAudioAttributes(parcel, mAudioAttributes);
+
+    ALOGV("setAudioAttributes_l() usage=%d content=%d flags=0x%x tags=%s",
+            mAudioAttributes->usage, mAudioAttributes->content_type, mAudioAttributes->flags,
+            mAudioAttributes->tags);
+
+    if (mAudioOutput != 0) {
+        mAudioOutput->setAudioAttributes(mAudioAttributes);
+    }
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2Manager::Client::setLooping(int loop)
+{
+    ALOGV("[%d] setLooping(%d)", mConnId, loop);
+    mLoop = loop;
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p != 0) return p->setLooping(loop);
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2Manager::Client::setVolume(float leftVolume, float rightVolume)
+{
+    ALOGV("[%d] setVolume(%f, %f)", mConnId, leftVolume, rightVolume);
+
+    // for hardware output, call player instead
+    sp<MediaPlayer2Base> p = getPlayer();
+    {
+      Mutex::Autolock l(mLock);
+      if (p != 0 && p->hardwareOutput()) {
+          MediaPlayerHWInterface* hwp =
+                  reinterpret_cast<MediaPlayerHWInterface*>(p.get());
+          return hwp->setVolume(leftVolume, rightVolume);
+      } else {
+          if (mAudioOutput != 0) mAudioOutput->setVolume(leftVolume, rightVolume);
+          return NO_ERROR;
+      }
+    }
+
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2Manager::Client::setAuxEffectSendLevel(float level)
+{
+    ALOGV("[%d] setAuxEffectSendLevel(%f)", mConnId, level);
+    Mutex::Autolock l(mLock);
+    if (mAudioOutput != 0) return mAudioOutput->setAuxEffectSendLevel(level);
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2Manager::Client::attachAuxEffect(int effectId)
+{
+    ALOGV("[%d] attachAuxEffect(%d)", mConnId, effectId);
+    Mutex::Autolock l(mLock);
+    if (mAudioOutput != 0) return mAudioOutput->attachAuxEffect(effectId);
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2Manager::Client::setParameter(int key, const Parcel &request) {
+    ALOGV("[%d] setParameter(%d)", mConnId, key);
+    switch (key) {
+    case MEDIA2_KEY_PARAMETER_AUDIO_ATTRIBUTES:
+    {
+        Mutex::Autolock l(mLock);
+        return setAudioAttributes_l(request);
+    }
+    default:
+        sp<MediaPlayer2Base> p = getPlayer();
+        if (p == 0) { return UNKNOWN_ERROR; }
+        return p->setParameter(key, request);
+    }
+}
+
+status_t MediaPlayer2Manager::Client::getParameter(int key, Parcel *reply) {
+    ALOGV("[%d] getParameter(%d)", mConnId, key);
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    return p->getParameter(key, reply);
+}
+
+status_t MediaPlayer2Manager::Client::setRetransmitEndpoint(
+        const struct sockaddr_in* endpoint) {
+
+    if (NULL != endpoint) {
+        uint32_t a = ntohl(endpoint->sin_addr.s_addr);
+        uint16_t p = ntohs(endpoint->sin_port);
+        ALOGV("[%d] setRetransmitEndpoint(%u.%u.%u.%u:%hu)", mConnId,
+                (a >> 24), (a >> 16) & 0xFF, (a >> 8) & 0xFF, (a & 0xFF), p);
+    } else {
+        ALOGV("[%d] setRetransmitEndpoint = <none>", mConnId);
+    }
+
+    sp<MediaPlayer2Base> p = getPlayer();
+
+    // Right now, the only valid time to set a retransmit endpoint is before
+    // player selection has been made (since the presence or absence of a
+    // retransmit endpoint is going to determine which player is selected during
+    // setDataSource).
+    if (p != 0) return INVALID_OPERATION;
+
+    if (NULL != endpoint) {
+        Mutex::Autolock lock(mLock);
+        mRetransmitEndpoint = *endpoint;
+        mRetransmitEndpointValid = true;
+    } else {
+        Mutex::Autolock lock(mLock);
+        mRetransmitEndpointValid = false;
+    }
+
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2Manager::Client::getRetransmitEndpoint(
+        struct sockaddr_in* endpoint)
+{
+    if (NULL == endpoint)
+        return BAD_VALUE;
+
+    sp<MediaPlayer2Base> p = getPlayer();
+
+    if (p != NULL)
+        return p->getRetransmitEndpoint(endpoint);
+
+    Mutex::Autolock lock(mLock);
+    if (!mRetransmitEndpointValid)
+        return NO_INIT;
+
+    *endpoint = mRetransmitEndpoint;
+
+    return NO_ERROR;
+}
+
+void MediaPlayer2Manager::Client::notify(
+        void* cookie, int msg, int ext1, int ext2, const Parcel *obj)
+{
+    Client* client = static_cast<Client*>(cookie);
+    if (client == NULL) {
+        return;
+    }
+
+    sp<MediaPlayer2EngineClient> c;
+    sp<Client> nextClient;
+    status_t errStartNext = NO_ERROR;
+    {
+        Mutex::Autolock l(client->mLock);
+        c = client->mClient;
+        if (msg == MEDIA2_PLAYBACK_COMPLETE && client->mNextClient != NULL) {
+            nextClient = client->mNextClient;
+
+            if (client->mAudioOutput != NULL)
+                client->mAudioOutput->switchToNextOutput();
+
+            errStartNext = nextClient->start();
+        }
+    }
+
+    if (nextClient != NULL) {
+        sp<MediaPlayer2EngineClient> nc;
+        {
+            Mutex::Autolock l(nextClient->mLock);
+            nc = nextClient->mClient;
+        }
+        if (nc != NULL) {
+            if (errStartNext == NO_ERROR) {
+                nc->notify(MEDIA2_INFO, MEDIA2_INFO_STARTED_AS_NEXT, 0, obj);
+            } else {
+                nc->notify(MEDIA2_ERROR, MEDIA2_ERROR_UNKNOWN , 0, obj);
+                ALOGE("gapless:start playback for next track failed, err(%d)", errStartNext);
+            }
+        }
+    }
+
+    if (MEDIA2_INFO == msg &&
+        MEDIA2_INFO_METADATA_UPDATE == ext1) {
+        const media::Metadata::Type metadata_type = ext2;
+
+        if(client->shouldDropMetadata(metadata_type)) {
+            return;
+        }
+
+        // Update the list of metadata that have changed. getMetadata
+        // also access mMetadataUpdated and clears it.
+        client->addNewMetadataUpdate(metadata_type);
+    }
+
+    if (c != NULL) {
+        ALOGV("[%d] notify (%p, %d, %d, %d)", client->mConnId, cookie, msg, ext1, ext2);
+        c->notify(msg, ext1, ext2, obj);
+    }
+}
+
+
+bool MediaPlayer2Manager::Client::shouldDropMetadata(media::Metadata::Type code) const
+{
+    Mutex::Autolock lock(mLock);
+
+    if (findMetadata(mMetadataDrop, code)) {
+        return true;
+    }
+
+    if (mMetadataAllow.isEmpty() || findMetadata(mMetadataAllow, code)) {
+        return false;
+    } else {
+        return true;
+    }
+}
+
+
+void MediaPlayer2Manager::Client::addNewMetadataUpdate(media::Metadata::Type metadata_type) {
+    Mutex::Autolock lock(mLock);
+    if (mMetadataUpdated.indexOf(metadata_type) < 0) {
+        mMetadataUpdated.add(metadata_type);
+    }
+}
+
+// Modular DRM
+status_t MediaPlayer2Manager::Client::prepareDrm(const uint8_t uuid[16],
+        const Vector<uint8_t>& drmSessionId)
+{
+    ALOGV("[%d] prepareDrm", mConnId);
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+
+    status_t ret = p->prepareDrm(uuid, drmSessionId);
+    ALOGV("prepareDrm ret: %d", ret);
+
+    return ret;
+}
+
+status_t MediaPlayer2Manager::Client::releaseDrm()
+{
+    ALOGV("[%d] releaseDrm", mConnId);
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+
+    status_t ret = p->releaseDrm();
+    ALOGV("releaseDrm ret: %d", ret);
+
+    return ret;
+}
+
+status_t MediaPlayer2Manager::Client::setOutputDevice(audio_port_handle_t deviceId)
+{
+    ALOGV("[%d] setOutputDevice", mConnId);
+    {
+        Mutex::Autolock l(mLock);
+        if (mAudioOutput.get() != nullptr) {
+            return mAudioOutput->setOutputDevice(deviceId);
+        }
+    }
+    return NO_INIT;
+}
+
+status_t MediaPlayer2Manager::Client::getRoutedDeviceId(audio_port_handle_t* deviceId)
+{
+    ALOGV("[%d] getRoutedDeviceId", mConnId);
+    {
+        Mutex::Autolock l(mLock);
+        if (mAudioOutput.get() != nullptr) {
+            return mAudioOutput->getRoutedDeviceId(deviceId);
+        }
+    }
+    return NO_INIT;
+}
+
+status_t MediaPlayer2Manager::Client::enableAudioDeviceCallback(bool enabled)
+{
+    ALOGV("[%d] enableAudioDeviceCallback, %d", mConnId, enabled);
+    {
+        Mutex::Autolock l(mLock);
+        if (mAudioOutput.get() != nullptr) {
+            return mAudioOutput->enableAudioDeviceCallback(enabled);
+        }
+    }
+    return NO_INIT;
+}
+
+#if CALLBACK_ANTAGONIZER
+const int Antagonizer::interval = 10000; // 10 msecs
+
+Antagonizer::Antagonizer(notify_callback_f cb, void* client) :
+    mExit(false), mActive(false), mClient(client), mCb(cb)
+{
+    createThread(callbackThread, this);
+}
+
+void Antagonizer::kill()
+{
+    Mutex::Autolock _l(mLock);
+    mActive = false;
+    mExit = true;
+    mCondition.wait(mLock);
+}
+
+int Antagonizer::callbackThread(void* user)
+{
+    ALOGD("Antagonizer started");
+    Antagonizer* p = reinterpret_cast<Antagonizer*>(user);
+    while (!p->mExit) {
+        if (p->mActive) {
+            ALOGV("send event");
+            p->mCb(p->mClient, 0, 0, 0);
+        }
+        usleep(interval);
+    }
+    Mutex::Autolock _l(p->mLock);
+    p->mCondition.signal();
+    ALOGD("Antagonizer stopped");
+    return 0;
+}
+#endif
+
+#undef LOG_TAG
+#define LOG_TAG "AudioSink"
+MediaPlayer2Manager::AudioOutput::AudioOutput(audio_session_t sessionId, uid_t uid, int pid,
+        const audio_attributes_t* attr, const sp<AudioSystem::AudioDeviceCallback>& deviceCallback)
+    : mCallback(NULL),
+      mCallbackCookie(NULL),
+      mCallbackData(NULL),
+      mStreamType(AUDIO_STREAM_MUSIC),
+      mLeftVolume(1.0),
+      mRightVolume(1.0),
+      mPlaybackRate(AUDIO_PLAYBACK_RATE_DEFAULT),
+      mSampleRateHz(0),
+      mMsecsPerFrame(0),
+      mFrameSize(0),
+      mSessionId(sessionId),
+      mUid(uid),
+      mPid(pid),
+      mSendLevel(0.0),
+      mAuxEffectId(0),
+      mFlags(AUDIO_OUTPUT_FLAG_NONE),
+      mVolumeHandler(new media::VolumeHandler()),
+      mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
+      mRoutedDeviceId(AUDIO_PORT_HANDLE_NONE),
+      mDeviceCallbackEnabled(false),
+      mDeviceCallback(deviceCallback)
+{
+    ALOGV("AudioOutput(%d)", sessionId);
+    if (attr != NULL) {
+        mAttributes = (audio_attributes_t *) calloc(1, sizeof(audio_attributes_t));
+        if (mAttributes != NULL) {
+            memcpy(mAttributes, attr, sizeof(audio_attributes_t));
+            mStreamType = audio_attributes_to_stream_type(attr);
+        }
+    } else {
+        mAttributes = NULL;
+    }
+
+    setMinBufferCount();
+}
+
+MediaPlayer2Manager::AudioOutput::~AudioOutput()
+{
+    close();
+    free(mAttributes);
+    delete mCallbackData;
+}
+
+//static
+void MediaPlayer2Manager::AudioOutput::setMinBufferCount()
+{
+    char value[PROPERTY_VALUE_MAX];
+    if (property_get("ro.kernel.qemu", value, 0)) {
+        mIsOnEmulator = true;
+        mMinBufferCount = 12;  // to prevent systematic buffer underrun for emulator
+    }
+}
+
+// static
+bool MediaPlayer2Manager::AudioOutput::isOnEmulator()
+{
+    setMinBufferCount(); // benign race wrt other threads
+    return mIsOnEmulator;
+}
+
+// static
+int MediaPlayer2Manager::AudioOutput::getMinBufferCount()
+{
+    setMinBufferCount(); // benign race wrt other threads
+    return mMinBufferCount;
+}
+
+ssize_t MediaPlayer2Manager::AudioOutput::bufferSize() const
+{
+    Mutex::Autolock lock(mLock);
+    if (mTrack == 0) return NO_INIT;
+    return mTrack->frameCount() * mFrameSize;
+}
+
+ssize_t MediaPlayer2Manager::AudioOutput::frameCount() const
+{
+    Mutex::Autolock lock(mLock);
+    if (mTrack == 0) return NO_INIT;
+    return mTrack->frameCount();
+}
+
+ssize_t MediaPlayer2Manager::AudioOutput::channelCount() const
+{
+    Mutex::Autolock lock(mLock);
+    if (mTrack == 0) return NO_INIT;
+    return mTrack->channelCount();
+}
+
+ssize_t MediaPlayer2Manager::AudioOutput::frameSize() const
+{
+    Mutex::Autolock lock(mLock);
+    if (mTrack == 0) return NO_INIT;
+    return mFrameSize;
+}
+
+uint32_t MediaPlayer2Manager::AudioOutput::latency () const
+{
+    Mutex::Autolock lock(mLock);
+    if (mTrack == 0) return 0;
+    return mTrack->latency();
+}
+
+float MediaPlayer2Manager::AudioOutput::msecsPerFrame() const
+{
+    Mutex::Autolock lock(mLock);
+    return mMsecsPerFrame;
+}
+
+status_t MediaPlayer2Manager::AudioOutput::getPosition(uint32_t *position) const
+{
+    Mutex::Autolock lock(mLock);
+    if (mTrack == 0) return NO_INIT;
+    return mTrack->getPosition(position);
+}
+
+status_t MediaPlayer2Manager::AudioOutput::getTimestamp(AudioTimestamp &ts) const
+{
+    Mutex::Autolock lock(mLock);
+    if (mTrack == 0) return NO_INIT;
+    return mTrack->getTimestamp(ts);
+}
+
+// TODO: Remove unnecessary calls to getPlayedOutDurationUs()
+// as it acquires locks and may query the audio driver.
+//
+// Some calls could conceivably retrieve extrapolated data instead of
+// accessing getTimestamp() or getPosition() every time a data buffer with
+// a media time is received.
+//
+// Calculate duration of played samples if played at normal rate (i.e., 1.0).
+int64_t MediaPlayer2Manager::AudioOutput::getPlayedOutDurationUs(int64_t nowUs) const
+{
+    Mutex::Autolock lock(mLock);
+    if (mTrack == 0 || mSampleRateHz == 0) {
+        return 0;
+    }
+
+    uint32_t numFramesPlayed;
+    int64_t numFramesPlayedAtUs;
+    AudioTimestamp ts;
+
+    status_t res = mTrack->getTimestamp(ts);
+    if (res == OK) {                 // case 1: mixing audio tracks and offloaded tracks.
+        numFramesPlayed = ts.mPosition;
+        numFramesPlayedAtUs = ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000;
+        //ALOGD("getTimestamp: OK %d %lld", numFramesPlayed, (long long)numFramesPlayedAtUs);
+    } else if (res == WOULD_BLOCK) { // case 2: transitory state on start of a new track
+        numFramesPlayed = 0;
+        numFramesPlayedAtUs = nowUs;
+        //ALOGD("getTimestamp: WOULD_BLOCK %d %lld",
+        //        numFramesPlayed, (long long)numFramesPlayedAtUs);
+    } else {                         // case 3: transitory at new track or audio fast tracks.
+        res = mTrack->getPosition(&numFramesPlayed);
+        CHECK_EQ(res, (status_t)OK);
+        numFramesPlayedAtUs = nowUs;
+        numFramesPlayedAtUs += 1000LL * mTrack->latency() / 2; /* XXX */
+        //ALOGD("getPosition: %u %lld", numFramesPlayed, (long long)numFramesPlayedAtUs);
+    }
+
+    // CHECK_EQ(numFramesPlayed & (1 << 31), 0);  // can't be negative until 12.4 hrs, test
+    // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours.
+    int64_t durationUs = (int64_t)((int32_t)numFramesPlayed * 1000000LL / mSampleRateHz)
+            + nowUs - numFramesPlayedAtUs;
+    if (durationUs < 0) {
+        // Occurs when numFramesPlayed position is very small and the following:
+        // (1) In case 1, the time nowUs is computed before getTimestamp() is called and
+        //     numFramesPlayedAtUs is greater than nowUs by time more than numFramesPlayed.
+        // (2) In case 3, using getPosition and adding mAudioSink->latency() to
+        //     numFramesPlayedAtUs, by a time amount greater than numFramesPlayed.
+        //
+        // Both of these are transitory conditions.
+        ALOGV("getPlayedOutDurationUs: negative duration %lld set to zero", (long long)durationUs);
+        durationUs = 0;
+    }
+    ALOGV("getPlayedOutDurationUs(%lld) nowUs(%lld) frames(%u) framesAt(%lld)",
+            (long long)durationUs, (long long)nowUs,
+            numFramesPlayed, (long long)numFramesPlayedAtUs);
+    return durationUs;
+}
+
+status_t MediaPlayer2Manager::AudioOutput::getFramesWritten(uint32_t *frameswritten) const
+{
+    Mutex::Autolock lock(mLock);
+    if (mTrack == 0) return NO_INIT;
+    ExtendedTimestamp ets;
+    status_t status = mTrack->getTimestamp(&ets);
+    if (status == OK || status == WOULD_BLOCK) {
+        *frameswritten = (uint32_t)ets.mPosition[ExtendedTimestamp::LOCATION_CLIENT];
+    }
+    return status;
+}
+
+status_t MediaPlayer2Manager::AudioOutput::setParameters(const String8& keyValuePairs)
+{
+    Mutex::Autolock lock(mLock);
+    if (mTrack == 0) return NO_INIT;
+    return mTrack->setParameters(keyValuePairs);
+}
+
+String8  MediaPlayer2Manager::AudioOutput::getParameters(const String8& keys)
+{
+    Mutex::Autolock lock(mLock);
+    if (mTrack == 0) return String8::empty();
+    return mTrack->getParameters(keys);
+}
+
+void MediaPlayer2Manager::AudioOutput::setAudioAttributes(const audio_attributes_t * attributes) {
+    Mutex::Autolock lock(mLock);
+    if (attributes == NULL) {
+        free(mAttributes);
+        mAttributes = NULL;
+    } else {
+        if (mAttributes == NULL) {
+            mAttributes = (audio_attributes_t *) calloc(1, sizeof(audio_attributes_t));
+        }
+        memcpy(mAttributes, attributes, sizeof(audio_attributes_t));
+        mStreamType = audio_attributes_to_stream_type(attributes);
+    }
+}
+
+void MediaPlayer2Manager::AudioOutput::setAudioStreamType(audio_stream_type_t streamType)
+{
+    Mutex::Autolock lock(mLock);
+    // do not allow direct stream type modification if attributes have been set
+    if (mAttributes == NULL) {
+        mStreamType = streamType;
+    }
+}
+
+void MediaPlayer2Manager::AudioOutput::deleteRecycledTrack_l()
+{
+    ALOGV("deleteRecycledTrack_l");
+    if (mRecycledTrack != 0) {
+
+        if (mCallbackData != NULL) {
+            mCallbackData->setOutput(NULL);
+            mCallbackData->endTrackSwitch();
+        }
+
+        if ((mRecycledTrack->getFlags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) == 0) {
+            int32_t msec = 0;
+            if (!mRecycledTrack->stopped()) { // check if active
+                 (void)mRecycledTrack->pendingDuration(&msec);
+            }
+            mRecycledTrack->stop(); // ensure full data drain
+            ALOGD("deleting recycled track, waiting for data drain (%d msec)", msec);
+            if (msec > 0) {
+                static const int32_t WAIT_LIMIT_MS = 3000;
+                if (msec > WAIT_LIMIT_MS) {
+                    msec = WAIT_LIMIT_MS;
+                }
+                usleep(msec * 1000LL);
+            }
+        }
+        // An offloaded track isn't flushed because the STREAM_END is reported
+        // slightly prematurely to allow time for the gapless track switch
+        // but this means that if we decide not to recycle the track there
+        // could be a small amount of residual data still playing. We leave
+        // AudioFlinger to drain the track.
+
+        mRecycledTrack.clear();
+        close_l();
+        delete mCallbackData;
+        mCallbackData = NULL;
+    }
+}
+
+void MediaPlayer2Manager::AudioOutput::close_l()
+{
+    mTrack.clear();
+}
+
+status_t MediaPlayer2Manager::AudioOutput::open(
+        uint32_t sampleRate, int channelCount, audio_channel_mask_t channelMask,
+        audio_format_t format, int bufferCount,
+        AudioCallback cb, void *cookie,
+        audio_output_flags_t flags,
+        const audio_offload_info_t *offloadInfo,
+        bool doNotReconnect,
+        uint32_t suggestedFrameCount)
+{
+    ALOGV("open(%u, %d, 0x%x, 0x%x, %d, %d 0x%x)", sampleRate, channelCount, channelMask,
+                format, bufferCount, mSessionId, flags);
+
+    // offloading is only supported in callback mode for now.
+    // offloadInfo must be present if offload flag is set
+    if (((flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0) &&
+            ((cb == NULL) || (offloadInfo == NULL))) {
+        return BAD_VALUE;
+    }
+
+    // compute frame count for the AudioTrack internal buffer
+    size_t frameCount;
+    if ((flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0) {
+        frameCount = 0; // AudioTrack will get frame count from AudioFlinger
+    } else {
+        // try to estimate the buffer processing fetch size from AudioFlinger.
+        // framesPerBuffer is approximate and generally correct, except when it's not :-).
+        uint32_t afSampleRate;
+        size_t afFrameCount;
+        if (AudioSystem::getOutputFrameCount(&afFrameCount, mStreamType) != NO_ERROR) {
+            return NO_INIT;
+        }
+        if (AudioSystem::getOutputSamplingRate(&afSampleRate, mStreamType) != NO_ERROR) {
+            return NO_INIT;
+        }
+        const size_t framesPerBuffer =
+                (unsigned long long)sampleRate * afFrameCount / afSampleRate;
+
+        if (bufferCount == 0) {
+            // use suggestedFrameCount
+            bufferCount = (suggestedFrameCount + framesPerBuffer - 1) / framesPerBuffer;
+        }
+        // Check argument bufferCount against the mininum buffer count
+        if (bufferCount != 0 && bufferCount < mMinBufferCount) {
+            ALOGV("bufferCount (%d) increased to %d", bufferCount, mMinBufferCount);
+            bufferCount = mMinBufferCount;
+        }
+        // if frameCount is 0, then AudioTrack will get frame count from AudioFlinger
+        // which will be the minimum size permitted.
+        frameCount = bufferCount * framesPerBuffer;
+    }
+
+    if (channelMask == CHANNEL_MASK_USE_CHANNEL_ORDER) {
+        channelMask = audio_channel_out_mask_from_count(channelCount);
+        if (0 == channelMask) {
+            ALOGE("open() error, can\'t derive mask for %d audio channels", channelCount);
+            return NO_INIT;
+        }
+    }
+
+    Mutex::Autolock lock(mLock);
+    mCallback = cb;
+    mCallbackCookie = cookie;
+
+    // Check whether we can recycle the track
+    bool reuse = false;
+    bool bothOffloaded = false;
+
+    if (mRecycledTrack != 0) {
+        // check whether we are switching between two offloaded tracks
+        bothOffloaded = (flags & mRecycledTrack->getFlags()
+                                & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0;
+
+        // check if the existing track can be reused as-is, or if a new track needs to be created.
+        reuse = true;
+
+        if ((mCallbackData == NULL && mCallback != NULL) ||
+                (mCallbackData != NULL && mCallback == NULL)) {
+            // recycled track uses callbacks but the caller wants to use writes, or vice versa
+            ALOGV("can't chain callback and write");
+            reuse = false;
+        } else if ((mRecycledTrack->getSampleRate() != sampleRate) ||
+                (mRecycledTrack->channelCount() != (uint32_t)channelCount) ) {
+            ALOGV("samplerate, channelcount differ: %u/%u Hz, %u/%d ch",
+                  mRecycledTrack->getSampleRate(), sampleRate,
+                  mRecycledTrack->channelCount(), channelCount);
+            reuse = false;
+        } else if (flags != mFlags) {
+            ALOGV("output flags differ %08x/%08x", flags, mFlags);
+            reuse = false;
+        } else if (mRecycledTrack->format() != format) {
+            reuse = false;
+        }
+    } else {
+        ALOGV("no track available to recycle");
+    }
+
+    ALOGV_IF(bothOffloaded, "both tracks offloaded");
+
+    // If we can't recycle and both tracks are offloaded
+    // we must close the previous output before opening a new one
+    if (bothOffloaded && !reuse) {
+        ALOGV("both offloaded and not recycling");
+        deleteRecycledTrack_l();
+    }
+
+    sp<AudioTrack> t;
+    CallbackData *newcbd = NULL;
+
+    // We don't attempt to create a new track if we are recycling an
+    // offloaded track. But, if we are recycling a non-offloaded or we
+    // are switching where one is offloaded and one isn't then we create
+    // the new track in advance so that we can read additional stream info
+
+    if (!(reuse && bothOffloaded)) {
+        ALOGV("creating new AudioTrack");
+
+        if (mCallback != NULL) {
+            newcbd = new CallbackData(this);
+            t = new AudioTrack(
+                    mStreamType,
+                    sampleRate,
+                    format,
+                    channelMask,
+                    frameCount,
+                    flags,
+                    CallbackWrapper,
+                    newcbd,
+                    0,  // notification frames
+                    mSessionId,
+                    AudioTrack::TRANSFER_CALLBACK,
+                    offloadInfo,
+                    mUid,
+                    mPid,
+                    mAttributes,
+                    doNotReconnect,
+                    1.0f,  // default value for maxRequiredSpeed
+                    mSelectedDeviceId);
+        } else {
+            // TODO: Due to buffer memory concerns, we use a max target playback speed
+            // based on mPlaybackRate at the time of open (instead of kMaxRequiredSpeed),
+            // also clamping the target speed to 1.0 <= targetSpeed <= kMaxRequiredSpeed.
+            const float targetSpeed =
+                    std::min(std::max(mPlaybackRate.mSpeed, 1.0f), kMaxRequiredSpeed);
+            ALOGW_IF(targetSpeed != mPlaybackRate.mSpeed,
+                    "track target speed:%f clamped from playback speed:%f",
+                    targetSpeed, mPlaybackRate.mSpeed);
+            t = new AudioTrack(
+                    mStreamType,
+                    sampleRate,
+                    format,
+                    channelMask,
+                    frameCount,
+                    flags,
+                    NULL, // callback
+                    NULL, // user data
+                    0, // notification frames
+                    mSessionId,
+                    AudioTrack::TRANSFER_DEFAULT,
+                    NULL, // offload info
+                    mUid,
+                    mPid,
+                    mAttributes,
+                    doNotReconnect,
+                    targetSpeed,
+                    mSelectedDeviceId);
+        }
+
+        if ((t == 0) || (t->initCheck() != NO_ERROR)) {
+            ALOGE("Unable to create audio track");
+            delete newcbd;
+            // t goes out of scope, so reference count drops to zero
+            return NO_INIT;
+        } else {
+            // successful AudioTrack initialization implies a legacy stream type was generated
+            // from the audio attributes
+            mStreamType = t->streamType();
+        }
+    }
+
+    if (reuse) {
+        CHECK(mRecycledTrack != NULL);
+
+        if (!bothOffloaded) {
+            if (mRecycledTrack->frameCount() != t->frameCount()) {
+                ALOGV("framecount differs: %zu/%zu frames",
+                      mRecycledTrack->frameCount(), t->frameCount());
+                reuse = false;
+            }
+        }
+
+        if (reuse) {
+            ALOGV("chaining to next output and recycling track");
+            close_l();
+            mTrack = mRecycledTrack;
+            mRecycledTrack.clear();
+            if (mCallbackData != NULL) {
+                mCallbackData->setOutput(this);
+            }
+            delete newcbd;
+            return updateTrack();
+        }
+    }
+
+    // we're not going to reuse the track, unblock and flush it
+    // this was done earlier if both tracks are offloaded
+    if (!bothOffloaded) {
+        deleteRecycledTrack_l();
+    }
+
+    CHECK((t != NULL) && ((mCallback == NULL) || (newcbd != NULL)));
+
+    mCallbackData = newcbd;
+    ALOGV("setVolume");
+    t->setVolume(mLeftVolume, mRightVolume);
+
+    // Restore VolumeShapers for the MediaPlayer2 in case the track was recreated
+    // due to an output sink error (e.g. offload to non-offload switch).
+    mVolumeHandler->forall([&t](const VolumeShaper &shaper) -> VolumeShaper::Status {
+        sp<VolumeShaper::Operation> operationToEnd =
+                new VolumeShaper::Operation(shaper.mOperation);
+        // TODO: Ideally we would restore to the exact xOffset position
+        // as returned by getVolumeShaperState(), but we don't have that
+        // information when restoring at the client unless we periodically poll
+        // the server or create shared memory state.
+        //
+        // For now, we simply advance to the end of the VolumeShaper effect
+        // if it has been started.
+        if (shaper.isStarted()) {
+            operationToEnd->setNormalizedTime(1.f);
+        }
+        return t->applyVolumeShaper(shaper.mConfiguration, operationToEnd);
+    });
+
+    mSampleRateHz = sampleRate;
+    mFlags = flags;
+    mMsecsPerFrame = 1E3f / (mPlaybackRate.mSpeed * sampleRate);
+    mFrameSize = t->frameSize();
+    mTrack = t;
+
+    return updateTrack();
+}
+
+status_t MediaPlayer2Manager::AudioOutput::updateTrack() {
+    if (mTrack == NULL) {
+        return NO_ERROR;
+    }
+
+    status_t res = NO_ERROR;
+    // Note some output devices may give us a direct track even though we don't specify it.
+    // Example: Line application b/17459982.
+    if ((mTrack->getFlags()
+            & (AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD | AUDIO_OUTPUT_FLAG_DIRECT)) == 0) {
+        res = mTrack->setPlaybackRate(mPlaybackRate);
+        if (res == NO_ERROR) {
+            mTrack->setAuxEffectSendLevel(mSendLevel);
+            res = mTrack->attachAuxEffect(mAuxEffectId);
+        }
+    }
+    mTrack->setOutputDevice(mSelectedDeviceId);
+    if (mDeviceCallbackEnabled) {
+        mTrack->addAudioDeviceCallback(mDeviceCallback.promote());
+    }
+    ALOGV("updateTrack() DONE status %d", res);
+    return res;
+}
+
+status_t MediaPlayer2Manager::AudioOutput::start()
+{
+    ALOGV("start");
+    Mutex::Autolock lock(mLock);
+    if (mCallbackData != NULL) {
+        mCallbackData->endTrackSwitch();
+    }
+    if (mTrack != 0) {
+        mTrack->setVolume(mLeftVolume, mRightVolume);
+        mTrack->setAuxEffectSendLevel(mSendLevel);
+        status_t status = mTrack->start();
+        if (status == NO_ERROR) {
+            mVolumeHandler->setStarted();
+        }
+        return status;
+    }
+    return NO_INIT;
+}
+
+void MediaPlayer2Manager::AudioOutput::setNextOutput(const sp<AudioOutput>& nextOutput) {
+    Mutex::Autolock lock(mLock);
+    mNextOutput = nextOutput;
+}
+
+void MediaPlayer2Manager::AudioOutput::switchToNextOutput() {
+    ALOGV("switchToNextOutput");
+
+    // Try to acquire the callback lock before moving track (without incurring deadlock).
+    const unsigned kMaxSwitchTries = 100;
+    Mutex::Autolock lock(mLock);
+    for (unsigned tries = 0;;) {
+        if (mTrack == 0) {
+            return;
+        }
+        if (mNextOutput != NULL && mNextOutput != this) {
+            if (mCallbackData != NULL) {
+                // two alternative approaches
+#if 1
+                CallbackData *callbackData = mCallbackData;
+                mLock.unlock();
+                // proper acquisition sequence
+                callbackData->lock();
+                mLock.lock();
+                // Caution: it is unlikely that someone deleted our callback or changed our target
+                if (callbackData != mCallbackData || mNextOutput == NULL || mNextOutput == this) {
+                    // fatal if we are starved out.
+                    LOG_ALWAYS_FATAL_IF(++tries > kMaxSwitchTries,
+                            "switchToNextOutput() cannot obtain correct lock sequence");
+                    callbackData->unlock();
+                    continue;
+                }
+                callbackData->mSwitching = true; // begin track switch
+                callbackData->setOutput(NULL);
+#else
+                // tryBeginTrackSwitch() returns false if the callback has the lock.
+                if (!mCallbackData->tryBeginTrackSwitch()) {
+                    // fatal if we are starved out.
+                    LOG_ALWAYS_FATAL_IF(++tries > kMaxSwitchTries,
+                            "switchToNextOutput() cannot obtain callback lock");
+                    mLock.unlock();
+                    usleep(5 * 1000 /* usec */); // allow callback to use AudioOutput
+                    mLock.lock();
+                    continue;
+                }
+#endif
+            }
+
+            Mutex::Autolock nextLock(mNextOutput->mLock);
+
+            // If the next output track is not NULL, then it has been
+            // opened already for playback.
+            // This is possible even without the next player being started,
+            // for example, the next player could be prepared and seeked.
+            //
+            // Presuming it isn't advisable to force the track over.
+             if (mNextOutput->mTrack == NULL) {
+                ALOGD("Recycling track for gapless playback");
+                delete mNextOutput->mCallbackData;
+                mNextOutput->mCallbackData = mCallbackData;
+                mNextOutput->mRecycledTrack = mTrack;
+                mNextOutput->mSampleRateHz = mSampleRateHz;
+                mNextOutput->mMsecsPerFrame = mMsecsPerFrame;
+                mNextOutput->mFlags = mFlags;
+                mNextOutput->mFrameSize = mFrameSize;
+                close_l();
+                mCallbackData = NULL;  // destruction handled by mNextOutput
+            } else {
+                ALOGW("Ignoring gapless playback because next player has already started");
+                // remove track in case resource needed for future players.
+                if (mCallbackData != NULL) {
+                    mCallbackData->endTrackSwitch();  // release lock for callbacks before close.
+                }
+                close_l();
+            }
+        }
+        break;
+    }
+}
+
+ssize_t MediaPlayer2Manager::AudioOutput::write(const void* buffer, size_t size, bool blocking)
+{
+    Mutex::Autolock lock(mLock);
+    LOG_ALWAYS_FATAL_IF(mCallback != NULL, "Don't call write if supplying a callback.");
+
+    //ALOGV("write(%p, %u)", buffer, size);
+    if (mTrack != 0) {
+        return mTrack->write(buffer, size, blocking);
+    }
+    return NO_INIT;
+}
+
+void MediaPlayer2Manager::AudioOutput::stop()
+{
+    ALOGV("stop");
+    Mutex::Autolock lock(mLock);
+    if (mTrack != 0) mTrack->stop();
+}
+
+void MediaPlayer2Manager::AudioOutput::flush()
+{
+    ALOGV("flush");
+    Mutex::Autolock lock(mLock);
+    if (mTrack != 0) mTrack->flush();
+}
+
+void MediaPlayer2Manager::AudioOutput::pause()
+{
+    ALOGV("pause");
+    Mutex::Autolock lock(mLock);
+    if (mTrack != 0) mTrack->pause();
+}
+
+void MediaPlayer2Manager::AudioOutput::close()
+{
+    ALOGV("close");
+    sp<AudioTrack> track;
+    {
+        Mutex::Autolock lock(mLock);
+        track = mTrack;
+        close_l(); // clears mTrack
+    }
+    // destruction of the track occurs outside of mutex.
+}
+
+void MediaPlayer2Manager::AudioOutput::setVolume(float left, float right)
+{
+    ALOGV("setVolume(%f, %f)", left, right);
+    Mutex::Autolock lock(mLock);
+    mLeftVolume = left;
+    mRightVolume = right;
+    if (mTrack != 0) {
+        mTrack->setVolume(left, right);
+    }
+}
+
+status_t MediaPlayer2Manager::AudioOutput::setPlaybackRate(const AudioPlaybackRate &rate)
+{
+    ALOGV("setPlaybackRate(%f %f %d %d)",
+                rate.mSpeed, rate.mPitch, rate.mFallbackMode, rate.mStretchMode);
+    Mutex::Autolock lock(mLock);
+    if (mTrack == 0) {
+        // remember rate so that we can set it when the track is opened
+        mPlaybackRate = rate;
+        return OK;
+    }
+    status_t res = mTrack->setPlaybackRate(rate);
+    if (res != NO_ERROR) {
+        return res;
+    }
+    // rate.mSpeed is always greater than 0 if setPlaybackRate succeeded
+    CHECK_GT(rate.mSpeed, 0.f);
+    mPlaybackRate = rate;
+    if (mSampleRateHz != 0) {
+        mMsecsPerFrame = 1E3f / (rate.mSpeed * mSampleRateHz);
+    }
+    return res;
+}
+
+status_t MediaPlayer2Manager::AudioOutput::getPlaybackRate(AudioPlaybackRate *rate)
+{
+    ALOGV("setPlaybackRate");
+    Mutex::Autolock lock(mLock);
+    if (mTrack == 0) {
+        return NO_INIT;
+    }
+    *rate = mTrack->getPlaybackRate();
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2Manager::AudioOutput::setAuxEffectSendLevel(float level)
+{
+    ALOGV("setAuxEffectSendLevel(%f)", level);
+    Mutex::Autolock lock(mLock);
+    mSendLevel = level;
+    if (mTrack != 0) {
+        return mTrack->setAuxEffectSendLevel(level);
+    }
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2Manager::AudioOutput::attachAuxEffect(int effectId)
+{
+    ALOGV("attachAuxEffect(%d)", effectId);
+    Mutex::Autolock lock(mLock);
+    mAuxEffectId = effectId;
+    if (mTrack != 0) {
+        return mTrack->attachAuxEffect(effectId);
+    }
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2Manager::AudioOutput::setOutputDevice(audio_port_handle_t deviceId)
+{
+    ALOGV("setOutputDevice(%d)", deviceId);
+    Mutex::Autolock lock(mLock);
+    mSelectedDeviceId = deviceId;
+    if (mTrack != 0) {
+        return mTrack->setOutputDevice(deviceId);
+    }
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2Manager::AudioOutput::getRoutedDeviceId(audio_port_handle_t* deviceId)
+{
+    ALOGV("getRoutedDeviceId");
+    Mutex::Autolock lock(mLock);
+    if (mTrack != 0) {
+        mRoutedDeviceId = mTrack->getRoutedDeviceId();
+    }
+    *deviceId = mRoutedDeviceId;
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2Manager::AudioOutput::enableAudioDeviceCallback(bool enabled)
+{
+    ALOGV("enableAudioDeviceCallback, %d", enabled);
+    Mutex::Autolock lock(mLock);
+    mDeviceCallbackEnabled = enabled;
+    if (mTrack != 0) {
+        status_t status;
+        if (enabled) {
+            status = mTrack->addAudioDeviceCallback(mDeviceCallback.promote());
+        } else {
+            status = mTrack->removeAudioDeviceCallback(mDeviceCallback.promote());
+        }
+        return status;
+    }
+    return NO_ERROR;
+}
+
+VolumeShaper::Status MediaPlayer2Manager::AudioOutput::applyVolumeShaper(
+                const sp<VolumeShaper::Configuration>& configuration,
+                const sp<VolumeShaper::Operation>& operation)
+{
+    Mutex::Autolock lock(mLock);
+    ALOGV("AudioOutput::applyVolumeShaper");
+
+    mVolumeHandler->setIdIfNecessary(configuration);
+
+    VolumeShaper::Status status;
+    if (mTrack != 0) {
+        status = mTrack->applyVolumeShaper(configuration, operation);
+        if (status >= 0) {
+            (void)mVolumeHandler->applyVolumeShaper(configuration, operation);
+            if (mTrack->isPlaying()) { // match local AudioTrack to properly restore.
+                mVolumeHandler->setStarted();
+            }
+        }
+    } else {
+        // VolumeShapers are not affected when a track moves between players for
+        // gapless playback (setNextMediaPlayer).
+        // We forward VolumeShaper operations that do not change configuration
+        // to the new player so that unducking may occur as expected.
+        // Unducking is an idempotent operation, same if applied back-to-back.
+        if (configuration->getType() == VolumeShaper::Configuration::TYPE_ID
+                && mNextOutput != nullptr) {
+            ALOGV("applyVolumeShaper: Attempting to forward missed operation: %s %s",
+                    configuration->toString().c_str(), operation->toString().c_str());
+            Mutex::Autolock nextLock(mNextOutput->mLock);
+
+            // recycled track should be forwarded from this AudioSink by switchToNextOutput
+            sp<AudioTrack> track = mNextOutput->mRecycledTrack;
+            if (track != nullptr) {
+                ALOGD("Forward VolumeShaper operation to recycled track %p", track.get());
+                (void)track->applyVolumeShaper(configuration, operation);
+            } else {
+                // There is a small chance that the unduck occurs after the next
+                // player has already started, but before it is registered to receive
+                // the unduck command.
+                track = mNextOutput->mTrack;
+                if (track != nullptr) {
+                    ALOGD("Forward VolumeShaper operation to track %p", track.get());
+                    (void)track->applyVolumeShaper(configuration, operation);
+                }
+            }
+        }
+        status = mVolumeHandler->applyVolumeShaper(configuration, operation);
+    }
+    return status;
+}
+
+sp<VolumeShaper::State> MediaPlayer2Manager::AudioOutput::getVolumeShaperState(int id)
+{
+    Mutex::Autolock lock(mLock);
+    if (mTrack != 0) {
+        return mTrack->getVolumeShaperState(id);
+    } else {
+        return mVolumeHandler->getVolumeShaperState(id);
+    }
+}
+
+// static
+void MediaPlayer2Manager::AudioOutput::CallbackWrapper(
+        int event, void *cookie, void *info) {
+    //ALOGV("callbackwrapper");
+    CallbackData *data = (CallbackData*)cookie;
+    // lock to ensure we aren't caught in the middle of a track switch.
+    data->lock();
+    AudioOutput *me = data->getOutput();
+    AudioTrack::Buffer *buffer = (AudioTrack::Buffer *)info;
+    if (me == NULL) {
+        // no output set, likely because the track was scheduled to be reused
+        // by another player, but the format turned out to be incompatible.
+        data->unlock();
+        if (buffer != NULL) {
+            buffer->size = 0;
+        }
+        return;
+    }
+
+    switch(event) {
+    case AudioTrack::EVENT_MORE_DATA: {
+        size_t actualSize = (*me->mCallback)(
+                me, buffer->raw, buffer->size, me->mCallbackCookie,
+                CB_EVENT_FILL_BUFFER);
+
+        // Log when no data is returned from the callback.
+        // (1) We may have no data (especially with network streaming sources).
+        // (2) We may have reached the EOS and the audio track is not stopped yet.
+        // Note that AwesomePlayer/AudioPlayer will only return zero size when it reaches the EOS.
+        // NuPlayer2Renderer will return zero when it doesn't have data (it doesn't block to fill).
+        //
+        // This is a benign busy-wait, with the next data request generated 10 ms or more later;
+        // nevertheless for power reasons, we don't want to see too many of these.
+
+        ALOGV_IF(actualSize == 0 && buffer->size > 0, "callbackwrapper: empty buffer returned");
+
+        buffer->size = actualSize;
+        } break;
+
+    case AudioTrack::EVENT_STREAM_END:
+        // currently only occurs for offloaded callbacks
+        ALOGV("callbackwrapper: deliver EVENT_STREAM_END");
+        (*me->mCallback)(me, NULL /* buffer */, 0 /* size */,
+                me->mCallbackCookie, CB_EVENT_STREAM_END);
+        break;
+
+    case AudioTrack::EVENT_NEW_IAUDIOTRACK :
+        ALOGV("callbackwrapper: deliver EVENT_TEAR_DOWN");
+        (*me->mCallback)(me,  NULL /* buffer */, 0 /* size */,
+                me->mCallbackCookie, CB_EVENT_TEAR_DOWN);
+        break;
+
+    case AudioTrack::EVENT_UNDERRUN:
+        // This occurs when there is no data available, typically
+        // when there is a failure to supply data to the AudioTrack.  It can also
+        // occur in non-offloaded mode when the audio device comes out of standby.
+        //
+        // If an AudioTrack underruns it outputs silence. Since this happens suddenly
+        // it may sound like an audible pop or glitch.
+        //
+        // The underrun event is sent once per track underrun; the condition is reset
+        // when more data is sent to the AudioTrack.
+        ALOGD("callbackwrapper: EVENT_UNDERRUN (discarded)");
+        break;
+
+    default:
+        ALOGE("received unknown event type: %d inside CallbackWrapper !", event);
+    }
+
+    data->unlock();
+}
+
+audio_session_t MediaPlayer2Manager::AudioOutput::getSessionId() const
+{
+    Mutex::Autolock lock(mLock);
+    return mSessionId;
+}
+
+uint32_t MediaPlayer2Manager::AudioOutput::getSampleRate() const
+{
+    Mutex::Autolock lock(mLock);
+    if (mTrack == 0) return 0;
+    return mTrack->getSampleRate();
+}
+
+int64_t MediaPlayer2Manager::AudioOutput::getBufferDurationInUs() const
+{
+    Mutex::Autolock lock(mLock);
+    if (mTrack == 0) {
+        return 0;
+    }
+    int64_t duration;
+    if (mTrack->getBufferDurationInUs(&duration) != OK) {
+        return 0;
+    }
+    return duration;
+}
+
+} // namespace android
diff --git a/media/libmedia/MediaPlayer2Manager.h b/media/libmedia/MediaPlayer2Manager.h
new file mode 100644
index 0000000..b346f40
--- /dev/null
+++ b/media/libmedia/MediaPlayer2Manager.h
@@ -0,0 +1,412 @@
+/*
+**
+** Copyright 2017, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_MEDIAPLAYER2MANAGER_H
+#define ANDROID_MEDIAPLAYER2MANAGER_H
+
+#include <arpa/inet.h>
+
+#include <utils/threads.h>
+#include <utils/Errors.h>
+#include <utils/KeyedVector.h>
+#include <utils/String8.h>
+#include <utils/Vector.h>
+
+#include <media/MediaPlayer2Engine.h>
+#include <media/MediaPlayer2Interface.h>
+#include <media/Metadata.h>
+#include <media/stagefright/foundation/ABase.h>
+
+#include <system/audio.h>
+
+namespace android {
+
+struct AudioPlaybackRate;
+class AudioTrack;
+struct AVSyncSettings;
+class IDataSource;
+struct MediaHTTPService;
+class MediaPlayer2EngineClient;
+
+#define CALLBACK_ANTAGONIZER 0
+#if CALLBACK_ANTAGONIZER
+class Antagonizer {
+public:
+    Antagonizer(notify_callback_f cb, void* client);
+    void start() { mActive = true; }
+    void stop() { mActive = false; }
+    void kill();
+private:
+    static const int interval;
+    Antagonizer();
+    static int callbackThread(void* cookie);
+    Mutex               mLock;
+    Condition           mCondition;
+    bool                mExit;
+    bool                mActive;
+    void*               mClient;
+    notify_callback_f   mCb;
+};
+#endif
+
+class MediaPlayer2Manager {
+    class Client;
+
+    class AudioOutput : public MediaPlayer2Base::AudioSink
+    {
+        class CallbackData;
+
+     public:
+                                AudioOutput(
+                                        audio_session_t sessionId,
+                                        uid_t uid,
+                                        int pid,
+                                        const audio_attributes_t * attr,
+                                        const sp<AudioSystem::AudioDeviceCallback>& deviceCallback);
+        virtual                 ~AudioOutput();
+
+        virtual bool            ready() const { return mTrack != 0; }
+        virtual ssize_t         bufferSize() const;
+        virtual ssize_t         frameCount() const;
+        virtual ssize_t         channelCount() const;
+        virtual ssize_t         frameSize() const;
+        virtual uint32_t        latency() const;
+        virtual float           msecsPerFrame() const;
+        virtual status_t        getPosition(uint32_t *position) const;
+        virtual status_t        getTimestamp(AudioTimestamp &ts) const;
+        virtual int64_t         getPlayedOutDurationUs(int64_t nowUs) const;
+        virtual status_t        getFramesWritten(uint32_t *frameswritten) const;
+        virtual audio_session_t getSessionId() const;
+        virtual uint32_t        getSampleRate() const;
+        virtual int64_t         getBufferDurationInUs() const;
+
+        virtual status_t        open(
+                uint32_t sampleRate, int channelCount, audio_channel_mask_t channelMask,
+                audio_format_t format, int bufferCount,
+                AudioCallback cb, void *cookie,
+                audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
+                const audio_offload_info_t *offloadInfo = NULL,
+                bool doNotReconnect = false,
+                uint32_t suggestedFrameCount = 0);
+
+        virtual status_t        start();
+        virtual ssize_t         write(const void* buffer, size_t size, bool blocking = true);
+        virtual void            stop();
+        virtual void            flush();
+        virtual void            pause();
+        virtual void            close();
+                void            setAudioStreamType(audio_stream_type_t streamType);
+        virtual audio_stream_type_t getAudioStreamType() const { return mStreamType; }
+                void            setAudioAttributes(const audio_attributes_t * attributes);
+
+                void            setVolume(float left, float right);
+        virtual status_t        setPlaybackRate(const AudioPlaybackRate& rate);
+        virtual status_t        getPlaybackRate(AudioPlaybackRate* rate /* nonnull */);
+
+                status_t        setAuxEffectSendLevel(float level);
+                status_t        attachAuxEffect(int effectId);
+        virtual status_t        dump(int fd, const Vector<String16>& args) const;
+
+        static bool             isOnEmulator();
+        static int              getMinBufferCount();
+                void            setNextOutput(const sp<AudioOutput>& nextOutput);
+                void            switchToNextOutput();
+        virtual bool            needsTrailingPadding() { return mNextOutput == NULL; }
+        virtual status_t        setParameters(const String8& keyValuePairs);
+        virtual String8         getParameters(const String8& keys);
+
+        virtual media::VolumeShaper::Status applyVolumeShaper(
+                                        const sp<media::VolumeShaper::Configuration>& configuration,
+                                        const sp<media::VolumeShaper::Operation>& operation) override;
+        virtual sp<media::VolumeShaper::State> getVolumeShaperState(int id) override;
+
+        // AudioRouting
+        virtual status_t        setOutputDevice(audio_port_handle_t deviceId);
+        virtual status_t        getRoutedDeviceId(audio_port_handle_t* deviceId);
+        virtual status_t        enableAudioDeviceCallback(bool enabled);
+
+    private:
+        static void             setMinBufferCount();
+        static void             CallbackWrapper(
+                int event, void *me, void *info);
+               void             deleteRecycledTrack_l();
+               void             close_l();
+           status_t             updateTrack();
+
+        sp<AudioTrack>          mTrack;
+        sp<AudioTrack>          mRecycledTrack;
+        sp<AudioOutput>         mNextOutput;
+        AudioCallback           mCallback;
+        void *                  mCallbackCookie;
+        CallbackData *          mCallbackData;
+        audio_stream_type_t     mStreamType;
+        audio_attributes_t *    mAttributes;
+        float                   mLeftVolume;
+        float                   mRightVolume;
+        AudioPlaybackRate       mPlaybackRate;
+        uint32_t                mSampleRateHz; // sample rate of the content, as set in open()
+        float                   mMsecsPerFrame;
+        size_t                  mFrameSize;
+        audio_session_t         mSessionId;
+        uid_t                   mUid;
+        int                     mPid;
+        float                   mSendLevel;
+        int                     mAuxEffectId;
+        audio_output_flags_t    mFlags;
+        sp<media::VolumeHandler>       mVolumeHandler;
+        audio_port_handle_t     mSelectedDeviceId;
+        audio_port_handle_t     mRoutedDeviceId;
+        bool                    mDeviceCallbackEnabled;
+        wp<AudioSystem::AudioDeviceCallback>        mDeviceCallback;
+        mutable Mutex           mLock;
+
+        // static variables below not protected by mutex
+        static bool             mIsOnEmulator;
+        static int              mMinBufferCount;  // 12 for emulator; otherwise 4
+
+        // CallbackData is what is passed to the AudioTrack as the "user" data.
+        // We need to be able to target this to a different Output on the fly,
+        // so we can't use the Output itself for this.
+        class CallbackData {
+            friend AudioOutput;
+        public:
+            explicit CallbackData(AudioOutput *cookie) {
+                mData = cookie;
+                mSwitching = false;
+            }
+            AudioOutput *   getOutput() const { return mData; }
+            void            setOutput(AudioOutput* newcookie) { mData = newcookie; }
+            // lock/unlock are used by the callback before accessing the payload of this object
+            void            lock() const { mLock.lock(); }
+            void            unlock() const { mLock.unlock(); }
+
+            // tryBeginTrackSwitch/endTrackSwitch are used when the CallbackData is handed over
+            // to the next sink.
+
+            // tryBeginTrackSwitch() returns true only if it obtains the lock.
+            bool            tryBeginTrackSwitch() {
+                LOG_ALWAYS_FATAL_IF(mSwitching, "tryBeginTrackSwitch() already called");
+                if (mLock.tryLock() != OK) {
+                    return false;
+                }
+                mSwitching = true;
+                return true;
+            }
+            void            endTrackSwitch() {
+                if (mSwitching) {
+                    mLock.unlock();
+                }
+                mSwitching = false;
+            }
+        private:
+            AudioOutput *   mData;
+            mutable Mutex   mLock; // a recursive mutex might make this unnecessary.
+            bool            mSwitching;
+            DISALLOW_EVIL_CONSTRUCTORS(CallbackData);
+        };
+
+    }; // AudioOutput
+
+
+public:
+    MediaPlayer2Manager();
+    virtual ~MediaPlayer2Manager();
+
+    static MediaPlayer2Manager& get();
+
+    // MediaPlayer2Manager interface
+    virtual sp<MediaPlayer2Engine> create(const sp<MediaPlayer2EngineClient>& client,
+                                          audio_session_t audioSessionId);
+
+    virtual status_t            dump(int fd, const Vector<String16>& args);
+
+            void                removeClient(const wp<Client>& client);
+            bool                hasClient(wp<Client> client);
+
+private:
+    class Client : public MediaPlayer2Engine {
+        // MediaPlayer2Engine interface
+        virtual void            disconnect();
+        virtual status_t        setVideoSurfaceTexture(
+                                        const sp<IGraphicBufferProducer>& bufferProducer);
+        virtual status_t        setBufferingSettings(const BufferingSettings& buffering) override;
+        virtual status_t        getBufferingSettings(
+                                        BufferingSettings* buffering /* nonnull */) override;
+        virtual status_t        prepareAsync();
+        virtual status_t        start();
+        virtual status_t        stop();
+        virtual status_t        pause();
+        virtual status_t        isPlaying(bool* state);
+        virtual status_t        setPlaybackSettings(const AudioPlaybackRate& rate);
+        virtual status_t        getPlaybackSettings(AudioPlaybackRate* rate /* nonnull */);
+        virtual status_t        setSyncSettings(const AVSyncSettings& rate, float videoFpsHint);
+        virtual status_t        getSyncSettings(AVSyncSettings* rate /* nonnull */,
+                                                float* videoFps /* nonnull */);
+        virtual status_t        seekTo(
+                int msec,
+                MediaPlayer2SeekMode mode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC);
+        virtual status_t        getCurrentPosition(int* msec);
+        virtual status_t        getDuration(int* msec);
+        virtual status_t        reset();
+        virtual status_t        notifyAt(int64_t mediaTimeUs);
+        virtual status_t        setAudioStreamType(audio_stream_type_t type);
+        virtual status_t        setLooping(int loop);
+        virtual status_t        setVolume(float leftVolume, float rightVolume);
+        virtual status_t        invoke(const Parcel& request, Parcel *reply);
+        virtual status_t        setMetadataFilter(const Parcel& filter);
+        virtual status_t        getMetadata(bool update_only,
+                                            bool apply_filter,
+                                            Parcel *reply);
+        virtual status_t        setAuxEffectSendLevel(float level);
+        virtual status_t        attachAuxEffect(int effectId);
+        virtual status_t        setParameter(int key, const Parcel &request);
+        virtual status_t        getParameter(int key, Parcel *reply);
+        virtual status_t        setRetransmitEndpoint(const struct sockaddr_in* endpoint);
+        virtual status_t        getRetransmitEndpoint(struct sockaddr_in* endpoint);
+        virtual status_t        setNextPlayer(const sp<MediaPlayer2Engine>& player);
+
+        virtual media::VolumeShaper::Status applyVolumeShaper(
+                                        const sp<media::VolumeShaper::Configuration>& configuration,
+                                        const sp<media::VolumeShaper::Operation>& operation) override;
+        virtual sp<media::VolumeShaper::State> getVolumeShaperState(int id) override;
+
+        sp<MediaPlayer2Base>     createPlayer(player2_type playerType);
+
+        virtual status_t        setDataSource(
+                        const sp<MediaHTTPService> &httpService,
+                        const char *url,
+                        const KeyedVector<String8, String8> *headers);
+
+        virtual status_t        setDataSource(int fd, int64_t offset, int64_t length);
+
+        virtual status_t        setDataSource(const sp<IStreamSource> &source);
+        virtual status_t        setDataSource(const sp<IDataSource> &source);
+
+
+        sp<MediaPlayer2Base>     setDataSource_pre(player2_type playerType);
+        status_t                setDataSource_post(const sp<MediaPlayer2Base>& p,
+                                                   status_t status);
+
+        static  void            notify(void* cookie, int msg,
+                                       int ext1, int ext2, const Parcel *obj);
+
+                pid_t           pid() const { return mPid; }
+        virtual status_t        dump(int fd, const Vector<String16>& args);
+
+                audio_session_t getAudioSessionId() { return mAudioSessionId; }
+        // Modular DRM
+        virtual status_t prepareDrm(const uint8_t uuid[16], const Vector<uint8_t>& drmSessionId);
+        virtual status_t releaseDrm();
+        // AudioRouting
+        virtual status_t setOutputDevice(audio_port_handle_t deviceId);
+        virtual status_t getRoutedDeviceId(audio_port_handle_t* deviceId);
+        virtual status_t enableAudioDeviceCallback(bool enabled);
+
+    private:
+        class AudioDeviceUpdatedNotifier: public AudioSystem::AudioDeviceCallback
+        {
+        public:
+            AudioDeviceUpdatedNotifier(const sp<MediaPlayer2Base>& listener) {
+                mListener = listener;
+            }
+            ~AudioDeviceUpdatedNotifier() {}
+
+            virtual void onAudioDeviceUpdate(audio_io_handle_t audioIo,
+                                             audio_port_handle_t deviceId);
+
+        private:
+            wp<MediaPlayer2Base> mListener;
+        };
+
+        friend class MediaPlayer2Manager;
+                                Client(pid_t pid,
+                                       int32_t connId,
+                                       const sp<MediaPlayer2EngineClient>& client,
+                                       audio_session_t audioSessionId,
+                                       uid_t uid);
+                                Client();
+        virtual                 ~Client();
+
+                void            deletePlayer();
+
+        sp<MediaPlayer2Base>     getPlayer() const { Mutex::Autolock lock(mLock); return mPlayer; }
+
+
+
+        // @param type Of the metadata to be tested.
+        // @return true if the metadata should be dropped according to
+        //              the filters.
+        bool shouldDropMetadata(media::Metadata::Type type) const;
+
+        // Add a new element to the set of metadata updated. Noop if
+        // the element exists already.
+        // @param type Of the metadata to be recorded.
+        void addNewMetadataUpdate(media::Metadata::Type type);
+
+        // Disconnect from the currently connected ANativeWindow.
+        void disconnectNativeWindow_l();
+
+        status_t setAudioAttributes_l(const Parcel &request);
+
+        mutable     Mutex                        mLock;
+                    sp<MediaPlayer2Base>         mPlayer;
+                    sp<MediaPlayer2EngineClient> mClient;
+                    sp<AudioOutput>              mAudioOutput;
+                    pid_t                        mPid;
+                    status_t                     mStatus;
+                    bool                         mLoop;
+                    int32_t                      mConnId;
+                    audio_session_t              mAudioSessionId;
+                    audio_attributes_t *         mAudioAttributes;
+                    uid_t                        mUid;
+                    sp<ANativeWindow>            mConnectedWindow;
+                    sp<IBinder>                  mConnectedWindowBinder;
+                    struct sockaddr_in           mRetransmitEndpoint;
+                    bool                         mRetransmitEndpointValid;
+                    sp<Client>                   mNextClient;
+
+        // Metadata filters.
+        media::Metadata::Filter mMetadataAllow;  // protected by mLock
+        media::Metadata::Filter mMetadataDrop;  // protected by mLock
+
+        // Metadata updated. For each MEDIA_INFO_METADATA_UPDATE
+        // notification we try to update mMetadataUpdated which is a
+        // set: no duplicate.
+        // getMetadata clears this set.
+        media::Metadata::Filter mMetadataUpdated;  // protected by mLock
+
+        sp<AudioDeviceUpdatedNotifier> mAudioDeviceUpdatedListener;
+#if CALLBACK_ANTAGONIZER
+                    Antagonizer*                mAntagonizer;
+#endif
+    }; // Client
+
+// ----------------------------------------------------------------------------
+
+    pid_t mPid;
+    uid_t mUid;
+
+    mutable Mutex mLock;
+    SortedVector< wp<Client> > mClients;
+    int32_t mNextConnId;
+};
+
+// ----------------------------------------------------------------------------
+
+}; // namespace android
+
+#endif // ANDROID_MEDIAPLAYER2MANAGER_H
diff --git a/media/libmedia/TestPlayerStub.cpp b/media/libmedia/TestPlayerStub.cpp
new file mode 100644
index 0000000..3548793
--- /dev/null
+++ b/media/libmedia/TestPlayerStub.cpp
@@ -0,0 +1,198 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "TestPlayerStub"
+#include "utils/Log.h"
+
+#include "TestPlayerStub.h"
+
+#include <dlfcn.h>  // for dlopen/dlclose
+#include <stdlib.h>
+#include <string.h>
+#include <cutils/properties.h>
+#include <utils/Errors.h>  // for status_t
+
+#include "media/MediaPlayer2Interface.h"
+
+
+namespace {
+using android::status_t;
+using android::MediaPlayer2Base;
+
+const char *kTestUrlScheme = "test:";
+const char *kUrlParam = "url=";
+
+const char *kBuildTypePropName = "ro.build.type";
+const char *kEngBuild = "eng";
+const char *kTestBuild = "test";
+
+// @return true if the current build is 'eng' or 'test'.
+bool isTestBuild()
+{
+    char prop[PROPERTY_VALUE_MAX] = { '\0', };
+
+    property_get(kBuildTypePropName, prop, "\0");
+    return strcmp(prop, kEngBuild) == 0 || strcmp(prop, kTestBuild) == 0;
+}
+
+// @return true if the url scheme is 'test:'
+bool isTestUrl(const char *url)
+{
+    return url && strncmp(url, kTestUrlScheme, strlen(kTestUrlScheme)) == 0;
+}
+
+}  // anonymous namespace
+
+namespace android {
+
+TestPlayerStub::TestPlayerStub()
+    :mUrl(NULL), mFilename(NULL), mContentUrl(NULL),
+     mHandle(NULL), mNewPlayer(NULL), mDeletePlayer(NULL),
+     mPlayer(NULL) { }
+
+TestPlayerStub::~TestPlayerStub()
+{
+    resetInternal();
+}
+
+status_t TestPlayerStub::initCheck()
+{
+    return isTestBuild() ? OK : INVALID_OPERATION;
+}
+
+// Parse mUrl to get:
+// * The library to be dlopened.
+// * The url to be passed to the real setDataSource impl.
+//
+// mUrl is expected to be in following format:
+//
+// test:<name of the .so>?url=<url for setDataSource>
+//
+// The value of the url parameter is treated as a string (no
+// unescaping of illegal charaters).
+status_t TestPlayerStub::parseUrl()
+{
+    if (strlen(mUrl) < strlen(kTestUrlScheme)) {
+        resetInternal();
+        return BAD_VALUE;
+    }
+
+    char *i = mUrl + strlen(kTestUrlScheme);
+
+    mFilename = i;
+
+    while (*i != '\0' && *i != '?') {
+        ++i;
+    }
+
+    if (*i == '\0' || strncmp(i + 1, kUrlParam, strlen(kUrlParam)) != 0) {
+        resetInternal();
+        return BAD_VALUE;
+    }
+    *i = '\0';  // replace '?' to nul-terminate mFilename
+
+    mContentUrl = i + 1 + strlen(kUrlParam);
+    return OK;
+}
+
+// Load the dynamic library.
+// Create the test player.
+// Call setDataSource on the test player with the url in param.
+status_t TestPlayerStub::setDataSource(
+        const sp<MediaHTTPService> &httpService,
+        const char *url,
+        const KeyedVector<String8, String8> *headers) {
+    if (!isTestUrl(url) || NULL != mHandle) {
+        return INVALID_OPERATION;
+    }
+
+    mUrl = strdup(url);
+
+    status_t status = parseUrl();
+
+    if (OK != status) {
+        resetInternal();
+        return status;
+    }
+
+    ::dlerror();  // Clears any pending error.
+
+    // Load the test player from the url. dlopen will fail if the lib
+    // is not there. dls are under /system/lib
+    // None of the entry points should be NULL.
+    mHandle = ::dlopen(mFilename, RTLD_NOW | RTLD_GLOBAL);
+    if (!mHandle) {
+        ALOGE("dlopen failed: %s", ::dlerror());
+        resetInternal();
+        return UNKNOWN_ERROR;
+    }
+
+    // Load the 2 entry points to create and delete instances.
+    const char *err;
+    mNewPlayer = reinterpret_cast<NEW_PLAYER>(dlsym(mHandle,
+                                                    "newPlayer"));
+    err = ::dlerror();
+    if (err || mNewPlayer == NULL) {
+        // if err is NULL the string <null> is inserted in the logs =>
+        // mNewPlayer was NULL.
+        ALOGE("dlsym for newPlayer failed %s", err);
+        resetInternal();
+        return UNKNOWN_ERROR;
+    }
+
+    mDeletePlayer = reinterpret_cast<DELETE_PLAYER>(dlsym(mHandle,
+                                                          "deletePlayer"));
+    err = ::dlerror();
+    if (err || mDeletePlayer == NULL) {
+        ALOGE("dlsym for deletePlayer failed %s", err);
+        resetInternal();
+        return UNKNOWN_ERROR;
+    }
+
+    mPlayer = (*mNewPlayer)();
+    return mPlayer->setDataSource(httpService, mContentUrl, headers);
+}
+
+// Internal cleanup.
+status_t TestPlayerStub::resetInternal()
+{
+    if(mUrl) {
+        free(mUrl);
+        mUrl = NULL;
+    }
+    mFilename = NULL;
+    mContentUrl = NULL;
+
+    if (mPlayer) {
+        ALOG_ASSERT(mDeletePlayer != NULL, "mDeletePlayer is null");
+        (*mDeletePlayer)(mPlayer);
+        mPlayer = NULL;
+    }
+
+    if (mHandle) {
+        ::dlclose(mHandle);
+        mHandle = NULL;
+    }
+    return OK;
+}
+
+/* static */ bool TestPlayerStub::canBeUsed(const char *url)
+{
+    return isTestBuild() && isTestUrl(url);
+}
+
+}  // namespace android
diff --git a/media/libmedia/TestPlayerStub.h b/media/libmedia/TestPlayerStub.h
new file mode 100644
index 0000000..1530ae1
--- /dev/null
+++ b/media/libmedia/TestPlayerStub.h
@@ -0,0 +1,133 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_FRAMEWORKS_BASE_MEDIA_LIBMEDIA_TESTPLAYERSTUB_H__
+#define ANDROID_FRAMEWORKS_BASE_MEDIA_LIBMEDIA_TESTPLAYERSTUB_H__
+
+#include <media/MediaPlayer2Interface.h>
+#include <utils/Errors.h>
+
+namespace android {
+class MediaPlayer2Base;  // in media/MediaPlayer2Interface.h
+
+// Wrapper around a test media player that gets dynamically loaded.
+//
+// The URL passed to setDataSource has this format:
+//
+//   test:<name of the .so>?url=<url for the real setDataSource impl.>
+//
+// e.g:
+//   test:invoke_test_media_player.so?url=http://youtube.com/
+//   test:invoke_test_media_player.so?url=speedtest
+//
+// TestPlayerStub::setDataSource loads the library in the test url. 2
+// entry points with C linkage are expected. One to create the test
+// player and one to destroy it.
+//
+// extern "C" android::MediaPlayer2Base* newPlayer();
+// extern "C" android::status_t deletePlayer(android::MediaPlayer2Base *p);
+//
+// Once the test player has been loaded, its setDataSource
+// implementation is called with the value of the 'url' parameter.
+//
+// typical usage in a java test:
+// ============================
+//
+//  MediaPlayer2 p = new MediaPlayer2();
+//  p.setDataSource("test:invoke_mock_media_player.so?url=http://youtube.com");
+//  p.prepare();
+//  ...
+//  p.release();
+
+class TestPlayerStub : public MediaPlayer2Interface {
+  public:
+    typedef MediaPlayer2Base* (*NEW_PLAYER)();
+    typedef status_t (*DELETE_PLAYER)(MediaPlayer2Base *);
+
+    TestPlayerStub();
+    virtual ~TestPlayerStub();
+
+    // Called right after the constructor. Check if the current build
+    // allows test players.
+    virtual status_t initCheck();
+
+    // @param url Should be a test url. See class comment.
+    virtual status_t setDataSource(
+            const sp<MediaHTTPService> &httpService,
+            const char* url,
+            const KeyedVector<String8, String8> *headers);
+
+    // Test player for a file descriptor source is not supported.
+    virtual status_t setDataSource(int, int64_t, int64_t)  {
+        return INVALID_OPERATION;
+    }
+
+
+    // All the methods below wrap the mPlayer instance.
+    virtual status_t setVideoSurfaceTexture(
+            const android::sp<android::IGraphicBufferProducer>& st)  {
+        return mPlayer->setVideoSurfaceTexture(st);
+    }
+    virtual status_t prepare() {return mPlayer->prepare();}
+    virtual status_t prepareAsync()  {return mPlayer->prepareAsync();}
+    virtual status_t start()  {return mPlayer->start();}
+    virtual status_t stop()  {return mPlayer->stop();}
+    virtual status_t pause()  {return mPlayer->pause();}
+    virtual bool isPlaying() {return mPlayer->isPlaying();}
+    virtual status_t seekTo(
+            int msec,
+            MediaPlayer2SeekMode mode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC) {
+        return mPlayer->seekTo(msec, mode);
+    }
+    virtual status_t getCurrentPosition(int *p)  {
+        return mPlayer->getCurrentPosition(p);
+    }
+    virtual status_t getDuration(int *d)  {return mPlayer->getDuration(d);}
+    virtual status_t reset() {return mPlayer->reset();}
+    virtual status_t setLooping(int b)  {return mPlayer->setLooping(b);}
+    virtual player2_type playerType() {return mPlayer->playerType();}
+    virtual status_t invoke(const android::Parcel& in, android::Parcel *out) {
+        return mPlayer->invoke(in, out);
+    }
+    virtual status_t setParameter(int key, const Parcel &request) {
+        return mPlayer->setParameter(key, request);
+    }
+    virtual status_t getParameter(int key, Parcel *reply) {
+        return mPlayer->getParameter(key, reply);
+    }
+
+
+    // @return true if the current build is 'eng' or 'test' and the
+    //              url's scheme is 'test:'
+    static bool canBeUsed(const char *url);
+
+  private:
+    // Release the player, dlclose the library.
+    status_t resetInternal();
+    status_t parseUrl();
+
+    char *mUrl;                // test:foo.so?url=http://bar
+    char *mFilename;           // foo.so
+    char *mContentUrl;         // http://bar
+    void *mHandle;             // returned by dlopen
+    NEW_PLAYER    mNewPlayer;
+    DELETE_PLAYER mDeletePlayer;
+    MediaPlayer2Base *mPlayer; // wrapped player
+};
+
+}  // namespace android
+
+#endif  // ANDROID_FRAMEWORKS_BASE_MEDIA_LIBMEDIA_TESTPLAYERSTUB_H__
diff --git a/media/libmedia/include/media/MediaPlayer2Engine.h b/media/libmedia/include/media/MediaPlayer2Engine.h
new file mode 100644
index 0000000..11d087b
--- /dev/null
+++ b/media/libmedia/include/media/MediaPlayer2Engine.h
@@ -0,0 +1,140 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIAPLAYER2ENGINE_H
+#define ANDROID_MEDIAPLAYER2ENGINE_H
+
+#include <utils/RefBase.h>
+#include <binder/Parcel.h>
+#include <utils/KeyedVector.h>
+#include <system/audio.h>
+
+#include <media/MediaSource.h>
+#include <media/VolumeShaper.h>
+
+// Fwd decl to make sure everyone agrees that the scope of struct sockaddr_in is
+// global, and not in android::
+struct sockaddr_in;
+
+namespace android {
+
+class Parcel;
+class Surface;
+class IDataSource;
+struct IStreamSource;
+class IGraphicBufferProducer;
+struct MediaHTTPService;
+struct AudioPlaybackRate;
+struct AVSyncSettings;
+struct BufferingSettings;
+
+typedef MediaSource::ReadOptions::SeekMode MediaPlayer2SeekMode;
+
+class MediaPlayer2Engine: public RefBase
+{
+public:
+    virtual void            disconnect() = 0;
+
+    virtual status_t        setDataSource(
+            const sp<MediaHTTPService> &httpService,
+            const char *url,
+            const KeyedVector<String8, String8>* headers) = 0;
+
+    virtual status_t        setDataSource(int fd, int64_t offset, int64_t length) = 0;
+    virtual status_t        setDataSource(const sp<IStreamSource>& source) = 0;
+    virtual status_t        setDataSource(const sp<IDataSource>& source) = 0;
+    virtual status_t        setVideoSurfaceTexture(
+                                    const sp<IGraphicBufferProducer>& bufferProducer) = 0;
+    virtual status_t        getBufferingSettings(
+                                    BufferingSettings* buffering /* nonnull */) = 0;
+    virtual status_t        setBufferingSettings(const BufferingSettings& buffering) = 0;
+    virtual status_t        prepareAsync() = 0;
+    virtual status_t        start() = 0;
+    virtual status_t        stop() = 0;
+    virtual status_t        pause() = 0;
+    virtual status_t        isPlaying(bool* state) = 0;
+    virtual status_t        setPlaybackSettings(const AudioPlaybackRate& rate) = 0;
+    virtual status_t        getPlaybackSettings(AudioPlaybackRate* rate /* nonnull */) = 0;
+    virtual status_t        setSyncSettings(const AVSyncSettings& sync, float videoFpsHint) = 0;
+    virtual status_t        getSyncSettings(AVSyncSettings* sync /* nonnull */,
+                                            float* videoFps /* nonnull */) = 0;
+    virtual status_t        seekTo(
+            int msec,
+            MediaPlayer2SeekMode mode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC) = 0;
+    virtual status_t        getCurrentPosition(int* msec) = 0;
+    virtual status_t        getDuration(int* msec) = 0;
+    virtual status_t        notifyAt(int64_t mediaTimeUs) = 0;
+    virtual status_t        reset() = 0;
+    virtual status_t        setAudioStreamType(audio_stream_type_t type) = 0;
+    virtual status_t        setLooping(int loop) = 0;
+    virtual status_t        setVolume(float leftVolume, float rightVolume) = 0;
+    virtual status_t        setAuxEffectSendLevel(float level) = 0;
+    virtual status_t        attachAuxEffect(int effectId) = 0;
+    virtual status_t        setParameter(int key, const Parcel& request) = 0;
+    virtual status_t        getParameter(int key, Parcel* reply) = 0;
+    virtual status_t        setRetransmitEndpoint(const struct sockaddr_in* endpoint) = 0;
+    virtual status_t        getRetransmitEndpoint(struct sockaddr_in* endpoint) = 0;
+    virtual status_t        setNextPlayer(const sp<MediaPlayer2Engine>& next) = 0;
+
+    virtual media::VolumeShaper::Status applyVolumeShaper(
+                                    const sp<media::VolumeShaper::Configuration>& configuration,
+                                    const sp<media::VolumeShaper::Operation>& operation) = 0;
+    virtual sp<media::VolumeShaper::State> getVolumeShaperState(int id) = 0;
+
+    // Modular DRM
+    virtual status_t        prepareDrm(const uint8_t uuid[16],
+                                    const Vector<uint8_t>& drmSessionId) = 0;
+    virtual status_t        releaseDrm() = 0;
+
+    // Invoke a generic method on the player by using opaque parcels
+    // for the request and reply.
+    // @param request Parcel that must start with the media player
+    // interface token.
+    // @param[out] reply Parcel to hold the reply data. Cannot be null.
+    // @return OK if the invocation was made successfully.
+    virtual status_t        invoke(const Parcel& request, Parcel *reply) = 0;
+
+    // Set a new metadata filter.
+    // @param filter A set of allow and drop rules serialized in a Parcel.
+    // @return OK if the invocation was made successfully.
+    virtual status_t        setMetadataFilter(const Parcel& filter) = 0;
+
+    // Retrieve a set of metadata.
+    // @param update_only Include only the metadata that have changed
+    //                    since the last invocation of getMetadata.
+    //                    The set is built using the unfiltered
+    //                    notifications the native player sent to the
+    //                    MediaPlayer2Manager during that period of
+    //                    time. If false, all the metadatas are considered.
+    // @param apply_filter If true, once the metadata set has been built based
+    //                     on the value update_only, the current filter is
+    //                     applied.
+    // @param[out] metadata On exit contains a set (possibly empty) of metadata.
+    //                      Valid only if the call returned OK.
+    // @return OK if the invocation was made successfully.
+    virtual status_t        getMetadata(bool update_only,
+                                        bool apply_filter,
+                                        Parcel *metadata) = 0;
+
+    // AudioRouting
+    virtual status_t        setOutputDevice(audio_port_handle_t deviceId) = 0;
+    virtual status_t        getRoutedDeviceId(audio_port_handle_t *deviceId) = 0;
+    virtual status_t        enableAudioDeviceCallback(bool enabled) = 0;
+};
+
+}; // namespace android
+
+#endif // ANDROID_MEDIAPLAYER2ENGINE_H
diff --git a/media/libmedia/include/media/MediaPlayer2EngineClient.h b/media/libmedia/include/media/MediaPlayer2EngineClient.h
new file mode 100644
index 0000000..22df095
--- /dev/null
+++ b/media/libmedia/include/media/MediaPlayer2EngineClient.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIAPLAYER2ENGINECLIENT_H
+#define ANDROID_MEDIAPLAYER2ENGINECLIENT_H
+
+#include <utils/RefBase.h>
+#include <binder/Parcel.h>
+
+namespace android {
+
+class MediaPlayer2EngineClient: public RefBase
+{
+public:
+    virtual void notify(int msg, int ext1, int ext2, const Parcel *obj) = 0;
+};
+
+}; // namespace android
+
+#endif // ANDROID_MEDIAPLAYER2ENGINECLIENT_H
diff --git a/media/libmedia/include/media/MediaPlayer2Interface.h b/media/libmedia/include/media/MediaPlayer2Interface.h
new file mode 100644
index 0000000..bd1146d
--- /dev/null
+++ b/media/libmedia/include/media/MediaPlayer2Interface.h
@@ -0,0 +1,339 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIAPLAYER2INTERFACE_H
+#define ANDROID_MEDIAPLAYER2INTERFACE_H
+
+#ifdef __cplusplus
+
+#include <sys/types.h>
+#include <utils/Errors.h>
+#include <utils/KeyedVector.h>
+#include <utils/String8.h>
+#include <utils/RefBase.h>
+
+#include <media/mediaplayer2.h>
+#include <media/AudioResamplerPublic.h>
+#include <media/AudioSystem.h>
+#include <media/AudioTimestamp.h>
+#include <media/AVSyncSettings.h>
+#include <media/BufferingSettings.h>
+#include <media/Metadata.h>
+
+// Fwd decl to make sure everyone agrees that the scope of struct sockaddr_in is
+// global, and not in android::
+struct sockaddr_in;
+
+namespace android {
+
+class DataSource;
+struct MediaHTTPService;
+class Parcel;
+class Surface;
+class IGraphicBufferProducer;
+
+template<typename T> class SortedVector;
+
+enum player2_type {
+    PLAYER2_STAGEFRIGHT_PLAYER = 3,
+    PLAYER2_NU_PLAYER2 = 4,
+    // Test players are available only in the 'test' and 'eng' builds.
+    // The shared library with the test player is passed passed as an
+    // argument to the 'test:' url in the setDataSource call.
+    PLAYER2_TEST_PLAYER = 5,
+};
+
+
+#define DEFAULT_AUDIOSINK_BUFFERCOUNT 4
+#define DEFAULT_AUDIOSINK_BUFFERSIZE 1200
+#define DEFAULT_AUDIOSINK_SAMPLERATE 44100
+
+// when the channel mask isn't known, use the channel count to derive a mask in AudioSink::open()
+#define CHANNEL_MASK_USE_CHANNEL_ORDER 0
+
+// duration below which we do not allow deep audio buffering
+#define AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US 5000000
+
+// callback mechanism for passing messages to MediaPlayer2 object
+typedef void (*notify_callback_f)(void* cookie,
+        int msg, int ext1, int ext2, const Parcel *obj);
+
+// abstract base class - use MediaPlayer2Interface
+class MediaPlayer2Base : public RefBase
+{
+public:
+    // AudioSink: abstraction layer for audio output
+    class AudioSink : public RefBase {
+    public:
+        enum cb_event_t {
+            CB_EVENT_FILL_BUFFER,   // Request to write more data to buffer.
+            CB_EVENT_STREAM_END,    // Sent after all the buffers queued in AF and HW are played
+                                    // back (after stop is called)
+            CB_EVENT_TEAR_DOWN      // The AudioTrack was invalidated due to use case change:
+                                    // Need to re-evaluate offloading options
+        };
+
+        // Callback returns the number of bytes actually written to the buffer.
+        typedef size_t (*AudioCallback)(
+                AudioSink *audioSink, void *buffer, size_t size, void *cookie,
+                        cb_event_t event);
+
+        virtual             ~AudioSink() {}
+        virtual bool        ready() const = 0; // audio output is open and ready
+        virtual ssize_t     bufferSize() const = 0;
+        virtual ssize_t     frameCount() const = 0;
+        virtual ssize_t     channelCount() const = 0;
+        virtual ssize_t     frameSize() const = 0;
+        virtual uint32_t    latency() const = 0;
+        virtual float       msecsPerFrame() const = 0;
+        virtual status_t    getPosition(uint32_t *position) const = 0;
+        virtual status_t    getTimestamp(AudioTimestamp &ts) const = 0;
+        virtual int64_t     getPlayedOutDurationUs(int64_t nowUs) const = 0;
+        virtual status_t    getFramesWritten(uint32_t *frameswritten) const = 0;
+        virtual audio_session_t getSessionId() const = 0;
+        virtual audio_stream_type_t getAudioStreamType() const = 0;
+        virtual uint32_t    getSampleRate() const = 0;
+        virtual int64_t     getBufferDurationInUs() const = 0;
+
+        // If no callback is specified, use the "write" API below to submit
+        // audio data.
+        virtual status_t    open(
+                uint32_t sampleRate, int channelCount, audio_channel_mask_t channelMask,
+                audio_format_t format=AUDIO_FORMAT_PCM_16_BIT,
+                int bufferCount=DEFAULT_AUDIOSINK_BUFFERCOUNT,
+                AudioCallback cb = NULL,
+                void *cookie = NULL,
+                audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
+                const audio_offload_info_t *offloadInfo = NULL,
+                bool doNotReconnect = false,
+                uint32_t suggestedFrameCount = 0) = 0;
+
+        virtual status_t    start() = 0;
+
+        /* Input parameter |size| is in byte units stored in |buffer|.
+         * Data is copied over and actual number of bytes written (>= 0)
+         * is returned, or no data is copied and a negative status code
+         * is returned (even when |blocking| is true).
+         * When |blocking| is false, AudioSink will immediately return after
+         * part of or full |buffer| is copied over.
+         * When |blocking| is true, AudioSink will wait to copy the entire
+         * buffer, unless an error occurs or the copy operation is
+         * prematurely stopped.
+         */
+        virtual ssize_t     write(const void* buffer, size_t size, bool blocking = true) = 0;
+
+        virtual void        stop() = 0;
+        virtual void        flush() = 0;
+        virtual void        pause() = 0;
+        virtual void        close() = 0;
+
+        virtual status_t    setPlaybackRate(const AudioPlaybackRate& rate) = 0;
+        virtual status_t    getPlaybackRate(AudioPlaybackRate* rate /* nonnull */) = 0;
+        virtual bool        needsTrailingPadding() { return true; }
+
+        virtual status_t    setParameters(const String8& /* keyValuePairs */) { return NO_ERROR; }
+        virtual String8     getParameters(const String8& /* keys */) { return String8::empty(); }
+
+        virtual media::VolumeShaper::Status applyVolumeShaper(
+                                    const sp<media::VolumeShaper::Configuration>& configuration,
+                                    const sp<media::VolumeShaper::Operation>& operation);
+        virtual sp<media::VolumeShaper::State> getVolumeShaperState(int id);
+
+        // AudioRouting
+        virtual status_t    setOutputDevice(audio_port_handle_t deviceId);
+        virtual status_t    getRoutedDeviceId(audio_port_handle_t* deviceId);
+        virtual status_t    enableAudioDeviceCallback(bool enabled);
+    };
+
+                        MediaPlayer2Base() : mCookie(0), mNotify(0) {}
+    virtual             ~MediaPlayer2Base() {}
+    virtual status_t    initCheck() = 0;
+    virtual bool        hardwareOutput() = 0;
+
+    virtual status_t    setUID(uid_t /* uid */) {
+        return INVALID_OPERATION;
+    }
+
+    virtual status_t    setDataSource(
+            const sp<MediaHTTPService> &httpService,
+            const char *url,
+            const KeyedVector<String8, String8> *headers = NULL) = 0;
+
+    virtual status_t    setDataSource(int fd, int64_t offset, int64_t length) = 0;
+
+    virtual status_t    setDataSource(const sp<IStreamSource>& /* source */) {
+        return INVALID_OPERATION;
+    }
+
+    virtual status_t    setDataSource(const sp<DataSource>& /* source */) {
+        return INVALID_OPERATION;
+    }
+
+    // pass the buffered IGraphicBufferProducer to the media player service
+    virtual status_t    setVideoSurfaceTexture(
+                                const sp<IGraphicBufferProducer>& bufferProducer) = 0;
+
+    virtual status_t    getBufferingSettings(
+                                BufferingSettings* buffering /* nonnull */) {
+        *buffering = BufferingSettings();
+        return OK;
+    }
+    virtual status_t    setBufferingSettings(const BufferingSettings& /* buffering */) {
+        return OK;
+    }
+
+    virtual status_t    prepare() = 0;
+    virtual status_t    prepareAsync() = 0;
+    virtual status_t    start() = 0;
+    virtual status_t    stop() = 0;
+    virtual status_t    pause() = 0;
+    virtual bool        isPlaying() = 0;
+    virtual status_t    setPlaybackSettings(const AudioPlaybackRate& rate) {
+        // by default, players only support setting rate to the default
+        if (!isAudioPlaybackRateEqual(rate, AUDIO_PLAYBACK_RATE_DEFAULT)) {
+            return BAD_VALUE;
+        }
+        return OK;
+    }
+    virtual status_t    getPlaybackSettings(AudioPlaybackRate* rate /* nonnull */) {
+        *rate = AUDIO_PLAYBACK_RATE_DEFAULT;
+        return OK;
+    }
+    virtual status_t    setSyncSettings(const AVSyncSettings& sync, float /* videoFps */) {
+        // By default, players only support setting sync source to default; all other sync
+        // settings are ignored. There is no requirement for getters to return set values.
+        if (sync.mSource != AVSYNC_SOURCE_DEFAULT) {
+            return BAD_VALUE;
+        }
+        return OK;
+    }
+    virtual status_t    getSyncSettings(
+                                AVSyncSettings* sync /* nonnull */, float* videoFps /* nonnull */) {
+        *sync = AVSyncSettings();
+        *videoFps = -1.f;
+        return OK;
+    }
+    virtual status_t    seekTo(
+            int msec, MediaPlayer2SeekMode mode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC) = 0;
+    virtual status_t    getCurrentPosition(int *msec) = 0;
+    virtual status_t    getDuration(int *msec) = 0;
+    virtual status_t    reset() = 0;
+    virtual status_t    notifyAt(int64_t /* mediaTimeUs */) {
+        return INVALID_OPERATION;
+    }
+    virtual status_t    setLooping(int loop) = 0;
+    virtual player2_type playerType() = 0;
+    virtual status_t    setParameter(int key, const Parcel &request) = 0;
+    virtual status_t    getParameter(int key, Parcel *reply) = 0;
+
+    // default no-op implementation of optional extensions
+    virtual status_t setRetransmitEndpoint(const struct sockaddr_in* /* endpoint */) {
+        return INVALID_OPERATION;
+    }
+    virtual status_t getRetransmitEndpoint(struct sockaddr_in* /* endpoint */) {
+        return INVALID_OPERATION;
+    }
+    virtual status_t setNextPlayer(const sp<MediaPlayer2Base>& /* next */) {
+        return OK;
+    }
+
+    // Invoke a generic method on the player by using opaque parcels
+    // for the request and reply.
+    //
+    // @param request Parcel that is positioned at the start of the
+    //                data sent by the java layer.
+    // @param[out] reply Parcel to hold the reply data. Cannot be null.
+    // @return OK if the call was successful.
+    virtual status_t    invoke(const Parcel& request, Parcel *reply) = 0;
+
+    // The Client in the MetadataPlayerService calls this method on
+    // the native player to retrieve all or a subset of metadata.
+    //
+    // @param ids SortedList of metadata ID to be fetch. If empty, all
+    //            the known metadata should be returned.
+    // @param[inout] records Parcel where the player appends its metadata.
+    // @return OK if the call was successful.
+    virtual status_t    getMetadata(const media::Metadata::Filter& /* ids */,
+                                    Parcel* /* records */) {
+        return INVALID_OPERATION;
+    };
+
+    void        setNotifyCallback(
+            void* cookie, notify_callback_f notifyFunc) {
+        Mutex::Autolock autoLock(mNotifyLock);
+        mCookie = cookie; mNotify = notifyFunc;
+    }
+
+    void        sendEvent(int msg, int ext1=0, int ext2=0,
+                          const Parcel *obj=NULL) {
+        notify_callback_f notifyCB;
+        void* cookie;
+        {
+            Mutex::Autolock autoLock(mNotifyLock);
+            notifyCB = mNotify;
+            cookie = mCookie;
+        }
+
+        if (notifyCB) notifyCB(cookie, msg, ext1, ext2, obj);
+    }
+
+    virtual status_t dump(int /* fd */, const Vector<String16>& /* args */) const {
+        return INVALID_OPERATION;
+    }
+
+    // Modular DRM
+    virtual status_t prepareDrm(const uint8_t /* uuid */[16], const Vector<uint8_t>& /* drmSessionId */) {
+        return INVALID_OPERATION;
+    }
+    virtual status_t releaseDrm() {
+        return INVALID_OPERATION;
+    }
+
+private:
+    friend class MediaPlayer2Manager;
+
+    Mutex               mNotifyLock;
+    void*               mCookie;
+    notify_callback_f   mNotify;
+};
+
+// Implement this class for media players that use the AudioFlinger software mixer
+class MediaPlayer2Interface : public MediaPlayer2Base
+{
+public:
+    virtual             ~MediaPlayer2Interface() { }
+    virtual bool        hardwareOutput() { return false; }
+    virtual void        setAudioSink(const sp<AudioSink>& audioSink) { mAudioSink = audioSink; }
+protected:
+    sp<AudioSink>       mAudioSink;
+};
+
+// Implement this class for media players that output audio directly to hardware
+class MediaPlayer2HWInterface : public MediaPlayer2Base
+{
+public:
+    virtual             ~MediaPlayer2HWInterface() {}
+    virtual bool        hardwareOutput() { return true; }
+    virtual status_t    setVolume(float leftVolume, float rightVolume) = 0;
+    virtual status_t    setAudioStreamType(audio_stream_type_t streamType) = 0;
+};
+
+}; // namespace android
+
+#endif // __cplusplus
+
+
+#endif // ANDROID_MEDIAPLAYER2INTERFACE_H
diff --git a/media/libmedia/include/media/mediaplayer.h b/media/libmedia/include/media/mediaplayer.h
index dc3b3aa..c4dbf42 100644
--- a/media/libmedia/include/media/mediaplayer.h
+++ b/media/libmedia/include/media/mediaplayer.h
@@ -17,6 +17,8 @@
 #ifndef ANDROID_MEDIAPLAYER_H
 #define ANDROID_MEDIAPLAYER_H
 
+#include <media/mediaplayer_common.h>
+
 #include <arpa/inet.h>
 
 #include <binder/IMemory.h>
@@ -188,16 +190,6 @@
     INVOKE_ID_GET_SELECTED_TRACK = 7
 };
 
-// Keep MEDIA_TRACK_TYPE_* in sync with MediaPlayer.java.
-enum media_track_type {
-    MEDIA_TRACK_TYPE_UNKNOWN = 0,
-    MEDIA_TRACK_TYPE_VIDEO = 1,
-    MEDIA_TRACK_TYPE_AUDIO = 2,
-    MEDIA_TRACK_TYPE_TIMEDTEXT = 3,
-    MEDIA_TRACK_TYPE_SUBTITLE = 4,
-    MEDIA_TRACK_TYPE_METADATA = 5,
-};
-
 // ----------------------------------------------------------------------------
 // ref-counted object for callbacks
 class MediaPlayerListener: virtual public RefBase
diff --git a/media/libmedia/include/media/mediaplayer2.h b/media/libmedia/include/media/mediaplayer2.h
new file mode 100644
index 0000000..2358a48
--- /dev/null
+++ b/media/libmedia/include/media/mediaplayer2.h
@@ -0,0 +1,310 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIAPLAYER2_H
+#define ANDROID_MEDIAPLAYER2_H
+
+#include <media/mediaplayer_common.h>
+
+#include <arpa/inet.h>
+
+#include <media/AudioResamplerPublic.h>
+#include <media/BufferingSettings.h>
+#include <media/MediaPlayer2EngineClient.h>
+#include <media/MediaPlayer2Engine.h>
+
+#include <utils/Condition.h>
+#include <utils/KeyedVector.h>
+#include <utils/String8.h>
+#include <utils/ThreadDefs.h>
+
+struct ANativeWindow;
+
+namespace android {
+
+struct AVSyncSettings;
+class IGraphicBufferProducer;
+class Surface;
+
+enum media2_event_type {
+    MEDIA2_NOP               = 0, // interface test message
+    MEDIA2_PREPARED          = 1,
+    MEDIA2_PLAYBACK_COMPLETE = 2,
+    MEDIA2_BUFFERING_UPDATE  = 3,
+    MEDIA2_SEEK_COMPLETE     = 4,
+    MEDIA2_SET_VIDEO_SIZE    = 5,
+    MEDIA2_STARTED           = 6,
+    MEDIA2_PAUSED            = 7,
+    MEDIA2_STOPPED           = 8,
+    MEDIA2_SKIPPED           = 9,
+    MEDIA2_NOTIFY_TIME       = 98,
+    MEDIA2_TIMED_TEXT        = 99,
+    MEDIA2_ERROR             = 100,
+    MEDIA2_INFO              = 200,
+    MEDIA2_SUBTITLE_DATA     = 201,
+    MEDIA2_META_DATA         = 202,
+    MEDIA2_DRM_INFO          = 210,
+    MEDIA2_AUDIO_ROUTING_CHANGED = 10000,
+};
+
+// Generic error codes for the media player framework.  Errors are fatal, the
+// playback must abort.
+//
+// Errors are communicated back to the client using the
+// MediaPlayer2Listener::notify method defined below.
+// In this situation, 'notify' is invoked with the following:
+//   'msg' is set to MEDIA_ERROR.
+//   'ext1' should be a value from the enum media2_error_type.
+//   'ext2' contains an implementation dependant error code to provide
+//          more details. Should default to 0 when not used.
+//
+// The codes are distributed as follow:
+//   0xx: Reserved
+//   1xx: Android Player errors. Something went wrong inside the MediaPlayer2.
+//   2xx: Media errors (e.g Codec not supported). There is a problem with the
+//        media itself.
+//   3xx: Runtime errors. Some extraordinary condition arose making the playback
+//        impossible.
+//
+enum media2_error_type {
+    // 0xx
+    MEDIA2_ERROR_UNKNOWN = 1,
+    // 1xx
+    MEDIA2_ERROR_SERVER_DIED = 100,
+    // 2xx
+    MEDIA2_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK = 200,
+    // 3xx
+};
+
+
+// Info and warning codes for the media player framework.  These are non fatal,
+// the playback is going on but there might be some user visible issues.
+//
+// Info and warning messages are communicated back to the client using the
+// MediaPlayer2Listener::notify method defined below.  In this situation,
+// 'notify' is invoked with the following:
+//   'msg' is set to MEDIA_INFO.
+//   'ext1' should be a value from the enum media2_info_type.
+//   'ext2' contains an implementation dependant info code to provide
+//          more details. Should default to 0 when not used.
+//
+// The codes are distributed as follow:
+//   0xx: Reserved
+//   7xx: Android Player info/warning (e.g player lagging behind.)
+//   8xx: Media info/warning (e.g media badly interleaved.)
+//
+enum media2_info_type {
+    // 0xx
+    MEDIA2_INFO_UNKNOWN = 1,
+    // The player was started because it was used as the next player for another
+    // player, which just completed playback
+    MEDIA2_INFO_STARTED_AS_NEXT = 2,
+    // The player just pushed the very first video frame for rendering
+    MEDIA2_INFO_RENDERING_START = 3,
+    // 7xx
+    // The video is too complex for the decoder: it can't decode frames fast
+    // enough. Possibly only the audio plays fine at this stage.
+    MEDIA2_INFO_VIDEO_TRACK_LAGGING = 700,
+    // MediaPlayer2 is temporarily pausing playback internally in order to
+    // buffer more data.
+    MEDIA2_INFO_BUFFERING_START = 701,
+    // MediaPlayer2 is resuming playback after filling buffers.
+    MEDIA2_INFO_BUFFERING_END = 702,
+    // Bandwidth in recent past
+    MEDIA2_INFO_NETWORK_BANDWIDTH = 703,
+
+    // 8xx
+    // Bad interleaving means that a media has been improperly interleaved or not
+    // interleaved at all, e.g has all the video samples first then all the audio
+    // ones. Video is playing but a lot of disk seek may be happening.
+    MEDIA2_INFO_BAD_INTERLEAVING = 800,
+    // The media is not seekable (e.g live stream).
+    MEDIA2_INFO_NOT_SEEKABLE = 801,
+    // New media metadata is available.
+    MEDIA2_INFO_METADATA_UPDATE = 802,
+    // Audio can not be played.
+    MEDIA2_INFO_PLAY_AUDIO_ERROR = 804,
+    // Video can not be played.
+    MEDIA2_INFO_PLAY_VIDEO_ERROR = 805,
+
+    //9xx
+    MEDIA2_INFO_TIMED_TEXT_ERROR = 900,
+};
+
+
+
+enum media_player2_states {
+    MEDIA_PLAYER2_STATE_ERROR        = 0,
+    MEDIA_PLAYER2_IDLE               = 1 << 0,
+    MEDIA_PLAYER2_INITIALIZED        = 1 << 1,
+    MEDIA_PLAYER2_PREPARING          = 1 << 2,
+    MEDIA_PLAYER2_PREPARED           = 1 << 3,
+    MEDIA_PLAYER2_STARTED            = 1 << 4,
+    MEDIA_PLAYER2_PAUSED             = 1 << 5,
+    MEDIA_PLAYER2_STOPPED            = 1 << 6,
+    MEDIA_PLAYER2_PLAYBACK_COMPLETE  = 1 << 7
+};
+
+// Keep KEY_PARAMETER_* in sync with MediaPlayer2.java.
+// The same enum space is used for both set and get, in case there are future keys that
+// can be both set and get.  But as of now, all parameters are either set only or get only.
+enum media2_parameter_keys {
+    // Streaming/buffering parameters
+    MEDIA2_KEY_PARAMETER_CACHE_STAT_COLLECT_FREQ_MS = 1100,            // set only
+
+    // Return a Parcel containing a single int, which is the channel count of the
+    // audio track, or zero for error (e.g. no audio track) or unknown.
+    MEDIA2_KEY_PARAMETER_AUDIO_CHANNEL_COUNT = 1200,                   // get only
+
+    // Playback rate expressed in permille (1000 is normal speed), saved as int32_t, with negative
+    // values used for rewinding or reverse playback.
+    MEDIA2_KEY_PARAMETER_PLAYBACK_RATE_PERMILLE = 1300,                // set only
+
+    // Set a Parcel containing the value of a parcelled Java AudioAttribute instance
+    MEDIA2_KEY_PARAMETER_AUDIO_ATTRIBUTES = 1400                       // set only
+};
+
+// Keep INVOKE_ID_* in sync with MediaPlayer2.java.
+enum media_player2_invoke_ids {
+    MEDIA_PLAYER2_INVOKE_ID_GET_TRACK_INFO = 1,
+    MEDIA_PLAYER2_INVOKE_ID_ADD_EXTERNAL_SOURCE = 2,
+    MEDIA_PLAYER2_INVOKE_ID_ADD_EXTERNAL_SOURCE_FD = 3,
+    MEDIA_PLAYER2_INVOKE_ID_SELECT_TRACK = 4,
+    MEDIA_PLAYER2_INVOKE_ID_UNSELECT_TRACK = 5,
+    MEDIA_PLAYER2_INVOKE_ID_SET_VIDEO_SCALING_MODE = 6,
+    MEDIA_PLAYER2_INVOKE_ID_GET_SELECTED_TRACK = 7
+};
+
+// ----------------------------------------------------------------------------
+// ref-counted object for callbacks
+class MediaPlayer2Listener: virtual public RefBase
+{
+public:
+    virtual void notify(int msg, int ext1, int ext2, const Parcel *obj) = 0;
+};
+
+struct MediaHTTPService;
+
+class MediaPlayer2 : public MediaPlayer2EngineClient
+{
+public:
+    MediaPlayer2();
+    ~MediaPlayer2();
+            void            disconnect();
+
+            status_t        setDataSource(
+                    const sp<MediaHTTPService> &httpService,
+                    const char *url,
+                    const KeyedVector<String8, String8> *headers);
+
+            status_t        setDataSource(int fd, int64_t offset, int64_t length);
+            status_t        setDataSource(const sp<IDataSource> &source);
+            status_t        setVideoSurfaceTexture(
+                                    const sp<IGraphicBufferProducer>& bufferProducer);
+            status_t        setListener(const sp<MediaPlayer2Listener>& listener);
+            status_t        getBufferingSettings(BufferingSettings* buffering /* nonnull */);
+            status_t        setBufferingSettings(const BufferingSettings& buffering);
+            status_t        prepare();
+            status_t        prepareAsync();
+            status_t        start();
+            status_t        stop();
+            status_t        pause();
+            bool            isPlaying();
+            status_t        setPlaybackSettings(const AudioPlaybackRate& rate);
+            status_t        getPlaybackSettings(AudioPlaybackRate* rate /* nonnull */);
+            status_t        setSyncSettings(const AVSyncSettings& sync, float videoFpsHint);
+            status_t        getSyncSettings(
+                                    AVSyncSettings* sync /* nonnull */,
+                                    float* videoFps /* nonnull */);
+            status_t        getVideoWidth(int *w);
+            status_t        getVideoHeight(int *h);
+            status_t        seekTo(
+                    int msec,
+                    MediaPlayer2SeekMode mode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC);
+            status_t        notifyAt(int64_t mediaTimeUs);
+            status_t        getCurrentPosition(int *msec);
+            status_t        getDuration(int *msec);
+            status_t        reset();
+            status_t        setAudioStreamType(audio_stream_type_t type);
+            status_t        getAudioStreamType(audio_stream_type_t *type);
+            status_t        setLooping(int loop);
+            bool            isLooping();
+            status_t        setVolume(float leftVolume, float rightVolume);
+            void            notify(int msg, int ext1, int ext2, const Parcel *obj = NULL);
+            status_t        invoke(const Parcel& request, Parcel *reply);
+            status_t        setMetadataFilter(const Parcel& filter);
+            status_t        getMetadata(bool update_only, bool apply_filter, Parcel *metadata);
+            status_t        setAudioSessionId(audio_session_t sessionId);
+            audio_session_t getAudioSessionId();
+            status_t        setAuxEffectSendLevel(float level);
+            status_t        attachAuxEffect(int effectId);
+            status_t        setParameter(int key, const Parcel& request);
+            status_t        getParameter(int key, Parcel* reply);
+            status_t        setRetransmitEndpoint(const char* addrString, uint16_t port);
+            status_t        setNextMediaPlayer(const sp<MediaPlayer2>& player);
+
+            media::VolumeShaper::Status applyVolumeShaper(
+                                    const sp<media::VolumeShaper::Configuration>& configuration,
+                                    const sp<media::VolumeShaper::Operation>& operation);
+            sp<media::VolumeShaper::State> getVolumeShaperState(int id);
+            // Modular DRM
+            status_t        prepareDrm(const uint8_t uuid[16], const Vector<uint8_t>& drmSessionId);
+            status_t        releaseDrm();
+            // AudioRouting
+            status_t        setOutputDevice(audio_port_handle_t deviceId);
+            audio_port_handle_t getRoutedDeviceId();
+            status_t        enableAudioDeviceCallback(bool enabled);
+
+private:
+            void            clear_l();
+            status_t        seekTo_l(int msec, MediaPlayer2SeekMode mode);
+            status_t        prepareAsync_l();
+            status_t        getDuration_l(int *msec);
+            status_t        attachNewPlayer(const sp<MediaPlayer2Engine>& player);
+            status_t        reset_l();
+            status_t        doSetRetransmitEndpoint(const sp<MediaPlayer2Engine>& player);
+            status_t        checkStateForKeySet_l(int key);
+
+    sp<MediaPlayer2Engine>      mPlayer;
+    thread_id_t                 mLockThreadId;
+    Mutex                       mLock;
+    Mutex                       mNotifyLock;
+    Condition                   mSignal;
+    sp<MediaPlayer2Listener>    mListener;
+    void*                       mCookie;
+    media_player2_states        mCurrentState;
+    int                         mCurrentPosition;
+    MediaPlayer2SeekMode        mCurrentSeekMode;
+    int                         mSeekPosition;
+    MediaPlayer2SeekMode        mSeekMode;
+    bool                        mPrepareSync;
+    status_t                    mPrepareStatus;
+    audio_stream_type_t         mStreamType;
+    Parcel*                     mAudioAttributesParcel;
+    bool                        mLoop;
+    float                       mLeftVolume;
+    float                       mRightVolume;
+    int                         mVideoWidth;
+    int                         mVideoHeight;
+    audio_session_t             mAudioSessionId;
+    float                       mSendLevel;
+    struct sockaddr_in          mRetransmitEndpoint;
+    bool                        mRetransmitEndpointValid;
+};
+
+}; // namespace android
+
+#endif // ANDROID_MEDIAPLAYER2_H
diff --git a/media/libmedia/include/media/mediaplayer_common.h b/media/libmedia/include/media/mediaplayer_common.h
new file mode 100644
index 0000000..d5a0135
--- /dev/null
+++ b/media/libmedia/include/media/mediaplayer_common.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIAPLAYER_COMMON_H
+#define ANDROID_MEDIAPLAYER_COMMON_H
+
+namespace android {
+
+// Keep MEDIA_TRACK_TYPE_* in sync with MediaPlayer.java.
+enum media_track_type {
+    MEDIA_TRACK_TYPE_UNKNOWN = 0,
+    MEDIA_TRACK_TYPE_VIDEO = 1,
+    MEDIA_TRACK_TYPE_AUDIO = 2,
+    MEDIA_TRACK_TYPE_TIMEDTEXT = 3,
+    MEDIA_TRACK_TYPE_SUBTITLE = 4,
+    MEDIA_TRACK_TYPE_METADATA = 5,
+};
+
+}; // namespace android
+
+#endif // ANDROID_MEDIAPLAYER_COMMON_H
diff --git a/media/libmedia/mediaplayer2.cpp b/media/libmedia/mediaplayer2.cpp
new file mode 100644
index 0000000..40d38fa
--- /dev/null
+++ b/media/libmedia/mediaplayer2.cpp
@@ -0,0 +1,1100 @@
+/*
+**
+** Copyright 2017, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaPlayer2Native"
+
+#include <fcntl.h>
+#include <inttypes.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include <utils/Log.h>
+
+#include <binder/IServiceManager.h>
+#include <binder/IPCThreadState.h>
+
+#include <gui/Surface.h>
+
+#include <media/mediaplayer2.h>
+#include <media/AudioResamplerPublic.h>
+#include <media/AudioSystem.h>
+#include <media/AVSyncSettings.h>
+#include <media/IDataSource.h>
+#include <media/MediaAnalyticsItem.h>
+
+#include <binder/MemoryBase.h>
+
+#include <utils/KeyedVector.h>
+#include <utils/String8.h>
+
+#include <system/audio.h>
+#include <system/window.h>
+
+#include "MediaPlayer2Manager.h"
+
+namespace android {
+
+using media::VolumeShaper;
+
+MediaPlayer2::MediaPlayer2()
+{
+    ALOGV("constructor");
+    mListener = NULL;
+    mCookie = NULL;
+    mStreamType = AUDIO_STREAM_MUSIC;
+    mAudioAttributesParcel = NULL;
+    mCurrentPosition = -1;
+    mCurrentSeekMode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC;
+    mSeekPosition = -1;
+    mSeekMode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC;
+    mCurrentState = MEDIA_PLAYER2_IDLE;
+    mPrepareSync = false;
+    mPrepareStatus = NO_ERROR;
+    mLoop = false;
+    mLeftVolume = mRightVolume = 1.0;
+    mVideoWidth = mVideoHeight = 0;
+    mLockThreadId = 0;
+    mAudioSessionId = (audio_session_t) AudioSystem::newAudioUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
+    AudioSystem::acquireAudioSessionId(mAudioSessionId, -1);
+    mSendLevel = 0;
+    mRetransmitEndpointValid = false;
+}
+
+MediaPlayer2::~MediaPlayer2()
+{
+    ALOGV("destructor");
+    if (mAudioAttributesParcel != NULL) {
+        delete mAudioAttributesParcel;
+        mAudioAttributesParcel = NULL;
+    }
+    AudioSystem::releaseAudioSessionId(mAudioSessionId, -1);
+    disconnect();
+    IPCThreadState::self()->flushCommands();
+}
+
+void MediaPlayer2::disconnect()
+{
+    ALOGV("disconnect");
+    sp<MediaPlayer2Engine> p;
+    {
+        Mutex::Autolock _l(mLock);
+        p = mPlayer;
+        mPlayer.clear();
+    }
+
+    if (p != 0) {
+        p->disconnect();
+    }
+}
+
+// always call with lock held
+void MediaPlayer2::clear_l()
+{
+    mCurrentPosition = -1;
+    mCurrentSeekMode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC;
+    mSeekPosition = -1;
+    mSeekMode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC;
+    mVideoWidth = mVideoHeight = 0;
+    mRetransmitEndpointValid = false;
+}
+
+status_t MediaPlayer2::setListener(const sp<MediaPlayer2Listener>& listener)
+{
+    ALOGV("setListener");
+    Mutex::Autolock _l(mLock);
+    mListener = listener;
+    return NO_ERROR;
+}
+
+
+status_t MediaPlayer2::attachNewPlayer(const sp<MediaPlayer2Engine>& player)
+{
+    status_t err = UNKNOWN_ERROR;
+    sp<MediaPlayer2Engine> p;
+    { // scope for the lock
+        Mutex::Autolock _l(mLock);
+
+        if ( !( (mCurrentState & MEDIA_PLAYER2_IDLE) ||
+                (mCurrentState == MEDIA_PLAYER2_STATE_ERROR ) ) ) {
+            ALOGE("attachNewPlayer called in state %d", mCurrentState);
+            return INVALID_OPERATION;
+        }
+
+        clear_l();
+        p = mPlayer;
+        mPlayer = player;
+        if (player != 0) {
+            mCurrentState = MEDIA_PLAYER2_INITIALIZED;
+            err = NO_ERROR;
+        } else {
+            ALOGE("Unable to create media player");
+        }
+    }
+
+    if (p != 0) {
+        p->disconnect();
+    }
+
+    return err;
+}
+
+status_t MediaPlayer2::setDataSource(
+        const sp<MediaHTTPService> &httpService,
+        const char *url, const KeyedVector<String8, String8> *headers)
+{
+    ALOGV("setDataSource(%s)", url);
+    status_t err = BAD_VALUE;
+    if (url != NULL) {
+        sp<MediaPlayer2Engine> player(MediaPlayer2Manager::get().create(this, mAudioSessionId));
+        if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
+            (NO_ERROR != player->setDataSource(httpService, url, headers))) {
+            player.clear();
+        }
+        err = attachNewPlayer(player);
+    }
+    return err;
+}
+
+status_t MediaPlayer2::setDataSource(int fd, int64_t offset, int64_t length)
+{
+    ALOGV("setDataSource(%d, %" PRId64 ", %" PRId64 ")", fd, offset, length);
+    status_t err = UNKNOWN_ERROR;
+    sp<MediaPlayer2Engine> player(MediaPlayer2Manager::get().create(this, mAudioSessionId));
+    if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
+        (NO_ERROR != player->setDataSource(fd, offset, length))) {
+        player.clear();
+    }
+    err = attachNewPlayer(player);
+    return err;
+}
+
+status_t MediaPlayer2::setDataSource(const sp<IDataSource> &source)
+{
+    ALOGV("setDataSource(IDataSource)");
+    status_t err = UNKNOWN_ERROR;
+    sp<MediaPlayer2Engine> player(MediaPlayer2Manager::get().create(this, mAudioSessionId));
+    if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
+        (NO_ERROR != player->setDataSource(source))) {
+        player.clear();
+    }
+    err = attachNewPlayer(player);
+    return err;
+}
+
+status_t MediaPlayer2::invoke(const Parcel& request, Parcel *reply)
+{
+    Mutex::Autolock _l(mLock);
+    const bool hasBeenInitialized =
+            (mCurrentState != MEDIA_PLAYER2_STATE_ERROR) &&
+            ((mCurrentState & MEDIA_PLAYER2_IDLE) != MEDIA_PLAYER2_IDLE);
+    if ((mPlayer != NULL) && hasBeenInitialized) {
+        ALOGV("invoke %zu", request.dataSize());
+        return  mPlayer->invoke(request, reply);
+    }
+    ALOGE("invoke failed: wrong state %X, mPlayer(%p)", mCurrentState, mPlayer.get());
+    return INVALID_OPERATION;
+}
+
+status_t MediaPlayer2::setMetadataFilter(const Parcel& filter)
+{
+    ALOGD("setMetadataFilter");
+    Mutex::Autolock lock(mLock);
+    if (mPlayer == NULL) {
+        return NO_INIT;
+    }
+    return mPlayer->setMetadataFilter(filter);
+}
+
+status_t MediaPlayer2::getMetadata(bool update_only, bool apply_filter, Parcel *metadata)
+{
+    ALOGD("getMetadata");
+    Mutex::Autolock lock(mLock);
+    if (mPlayer == NULL) {
+        return NO_INIT;
+    }
+    return mPlayer->getMetadata(update_only, apply_filter, metadata);
+}
+
+status_t MediaPlayer2::setVideoSurfaceTexture(
+        const sp<IGraphicBufferProducer>& bufferProducer)
+{
+    ALOGV("setVideoSurfaceTexture");
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == 0) return NO_INIT;
+    return mPlayer->setVideoSurfaceTexture(bufferProducer);
+}
+
+status_t MediaPlayer2::getBufferingSettings(BufferingSettings* buffering /* nonnull */)
+{
+    ALOGV("getBufferingSettings");
+
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == 0) {
+        return NO_INIT;
+    }
+    return mPlayer->getBufferingSettings(buffering);
+}
+
+status_t MediaPlayer2::setBufferingSettings(const BufferingSettings& buffering)
+{
+    ALOGV("setBufferingSettings");
+
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == 0) {
+        return NO_INIT;
+    }
+    return mPlayer->setBufferingSettings(buffering);
+}
+
+// must call with lock held
+status_t MediaPlayer2::prepareAsync_l()
+{
+    if ( (mPlayer != 0) && ( mCurrentState & (MEDIA_PLAYER2_INITIALIZED | MEDIA_PLAYER2_STOPPED) ) ) {
+        if (mAudioAttributesParcel != NULL) {
+            mPlayer->setParameter(MEDIA2_KEY_PARAMETER_AUDIO_ATTRIBUTES, *mAudioAttributesParcel);
+        } else {
+            mPlayer->setAudioStreamType(mStreamType);
+        }
+        mCurrentState = MEDIA_PLAYER2_PREPARING;
+        return mPlayer->prepareAsync();
+    }
+    ALOGE("prepareAsync called in state %d, mPlayer(%p)", mCurrentState, mPlayer.get());
+    return INVALID_OPERATION;
+}
+
+// TODO: In case of error, prepareAsync provides the caller with 2 error codes,
+// one defined in the Android framework and one provided by the implementation
+// that generated the error. The sync version of prepare returns only 1 error
+// code.
+status_t MediaPlayer2::prepare()
+{
+    ALOGV("prepare");
+    Mutex::Autolock _l(mLock);
+    mLockThreadId = getThreadId();
+    if (mPrepareSync) {
+        mLockThreadId = 0;
+        return -EALREADY;
+    }
+    mPrepareSync = true;
+    status_t ret = prepareAsync_l();
+    if (ret != NO_ERROR) {
+        mLockThreadId = 0;
+        return ret;
+    }
+
+    if (mPrepareSync) {
+        mSignal.wait(mLock);  // wait for prepare done
+        mPrepareSync = false;
+    }
+    ALOGV("prepare complete - status=%d", mPrepareStatus);
+    mLockThreadId = 0;
+    return mPrepareStatus;
+}
+
+status_t MediaPlayer2::prepareAsync()
+{
+    ALOGV("prepareAsync");
+    Mutex::Autolock _l(mLock);
+    return prepareAsync_l();
+}
+
+status_t MediaPlayer2::start()
+{
+    ALOGV("start");
+
+    status_t ret = NO_ERROR;
+    Mutex::Autolock _l(mLock);
+
+    mLockThreadId = getThreadId();
+
+    if (mCurrentState & MEDIA_PLAYER2_STARTED) {
+        ret = NO_ERROR;
+    } else if ( (mPlayer != 0) && ( mCurrentState & ( MEDIA_PLAYER2_PREPARED |
+                    MEDIA_PLAYER2_PLAYBACK_COMPLETE | MEDIA_PLAYER2_PAUSED ) ) ) {
+        mPlayer->setLooping(mLoop);
+        mPlayer->setVolume(mLeftVolume, mRightVolume);
+        mPlayer->setAuxEffectSendLevel(mSendLevel);
+        mCurrentState = MEDIA_PLAYER2_STARTED;
+        ret = mPlayer->start();
+        if (ret != NO_ERROR) {
+            mCurrentState = MEDIA_PLAYER2_STATE_ERROR;
+        } else {
+            if (mCurrentState == MEDIA_PLAYER2_PLAYBACK_COMPLETE) {
+                ALOGV("playback completed immediately following start()");
+            }
+        }
+    } else {
+        ALOGE("start called in state %d, mPlayer(%p)", mCurrentState, mPlayer.get());
+        ret = INVALID_OPERATION;
+    }
+
+    mLockThreadId = 0;
+
+    return ret;
+}
+
+status_t MediaPlayer2::stop()
+{
+    ALOGV("stop");
+    Mutex::Autolock _l(mLock);
+    if (mCurrentState & MEDIA_PLAYER2_STOPPED) return NO_ERROR;
+    if ( (mPlayer != 0) && ( mCurrentState & ( MEDIA_PLAYER2_STARTED | MEDIA_PLAYER2_PREPARED |
+                    MEDIA_PLAYER2_PAUSED | MEDIA_PLAYER2_PLAYBACK_COMPLETE ) ) ) {
+        status_t ret = mPlayer->stop();
+        if (ret != NO_ERROR) {
+            mCurrentState = MEDIA_PLAYER2_STATE_ERROR;
+        } else {
+            mCurrentState = MEDIA_PLAYER2_STOPPED;
+        }
+        return ret;
+    }
+    ALOGE("stop called in state %d, mPlayer(%p)", mCurrentState, mPlayer.get());
+    return INVALID_OPERATION;
+}
+
+status_t MediaPlayer2::pause()
+{
+    ALOGV("pause");
+    Mutex::Autolock _l(mLock);
+    if (mCurrentState & (MEDIA_PLAYER2_PAUSED|MEDIA_PLAYER2_PLAYBACK_COMPLETE))
+        return NO_ERROR;
+    if ((mPlayer != 0) && (mCurrentState & MEDIA_PLAYER2_STARTED)) {
+        status_t ret = mPlayer->pause();
+        if (ret != NO_ERROR) {
+            mCurrentState = MEDIA_PLAYER2_STATE_ERROR;
+        } else {
+            mCurrentState = MEDIA_PLAYER2_PAUSED;
+        }
+        return ret;
+    }
+    ALOGE("pause called in state %d, mPlayer(%p)", mCurrentState, mPlayer.get());
+    return INVALID_OPERATION;
+}
+
+bool MediaPlayer2::isPlaying()
+{
+    Mutex::Autolock _l(mLock);
+    if (mPlayer != 0) {
+        bool temp = false;
+        mPlayer->isPlaying(&temp);
+        ALOGV("isPlaying: %d", temp);
+        if ((mCurrentState & MEDIA_PLAYER2_STARTED) && ! temp) {
+            ALOGE("internal/external state mismatch corrected");
+            mCurrentState = MEDIA_PLAYER2_PAUSED;
+        } else if ((mCurrentState & MEDIA_PLAYER2_PAUSED) && temp) {
+            ALOGE("internal/external state mismatch corrected");
+            mCurrentState = MEDIA_PLAYER2_STARTED;
+        }
+        return temp;
+    }
+    ALOGV("isPlaying: no active player");
+    return false;
+}
+
+status_t MediaPlayer2::setPlaybackSettings(const AudioPlaybackRate& rate)
+{
+    ALOGV("setPlaybackSettings: %f %f %d %d",
+            rate.mSpeed, rate.mPitch, rate.mFallbackMode, rate.mStretchMode);
+    // Negative speed and pitch does not make sense. Further validation will
+    // be done by the respective mediaplayers.
+    if (rate.mSpeed < 0.f || rate.mPitch < 0.f) {
+        return BAD_VALUE;
+    }
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == 0 || (mCurrentState & MEDIA_PLAYER2_STOPPED)) {
+        return INVALID_OPERATION;
+    }
+
+    if (rate.mSpeed != 0.f && !(mCurrentState & MEDIA_PLAYER2_STARTED)
+            && (mCurrentState & (MEDIA_PLAYER2_PREPARED | MEDIA_PLAYER2_PAUSED
+                    | MEDIA_PLAYER2_PLAYBACK_COMPLETE))) {
+        mPlayer->setLooping(mLoop);
+        mPlayer->setVolume(mLeftVolume, mRightVolume);
+        mPlayer->setAuxEffectSendLevel(mSendLevel);
+    }
+
+    status_t err = mPlayer->setPlaybackSettings(rate);
+    if (err == OK) {
+        if (rate.mSpeed == 0.f && mCurrentState == MEDIA_PLAYER2_STARTED) {
+            mCurrentState = MEDIA_PLAYER2_PAUSED;
+        } else if (rate.mSpeed != 0.f
+                && (mCurrentState & (MEDIA_PLAYER2_PREPARED | MEDIA_PLAYER2_PAUSED
+                    | MEDIA_PLAYER2_PLAYBACK_COMPLETE))) {
+            mCurrentState = MEDIA_PLAYER2_STARTED;
+        }
+    }
+    return err;
+}
+
+status_t MediaPlayer2::getPlaybackSettings(AudioPlaybackRate* rate /* nonnull */)
+{
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == 0) return INVALID_OPERATION;
+    return mPlayer->getPlaybackSettings(rate);
+}
+
+status_t MediaPlayer2::setSyncSettings(const AVSyncSettings& sync, float videoFpsHint)
+{
+    ALOGV("setSyncSettings: %u %u %f %f",
+            sync.mSource, sync.mAudioAdjustMode, sync.mTolerance, videoFpsHint);
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == 0) return INVALID_OPERATION;
+    return mPlayer->setSyncSettings(sync, videoFpsHint);
+}
+
+status_t MediaPlayer2::getSyncSettings(
+        AVSyncSettings* sync /* nonnull */, float* videoFps /* nonnull */)
+{
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == 0) return INVALID_OPERATION;
+    return mPlayer->getSyncSettings(sync, videoFps);
+}
+
+status_t MediaPlayer2::getVideoWidth(int *w)
+{
+    ALOGV("getVideoWidth");
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == 0) return INVALID_OPERATION;
+    *w = mVideoWidth;
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2::getVideoHeight(int *h)
+{
+    ALOGV("getVideoHeight");
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == 0) return INVALID_OPERATION;
+    *h = mVideoHeight;
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2::getCurrentPosition(int *msec)
+{
+    ALOGV("getCurrentPosition");
+    Mutex::Autolock _l(mLock);
+    if (mPlayer != 0) {
+        if (mCurrentPosition >= 0) {
+            ALOGV("Using cached seek position: %d", mCurrentPosition);
+            *msec = mCurrentPosition;
+            return NO_ERROR;
+        }
+        return mPlayer->getCurrentPosition(msec);
+    }
+    return INVALID_OPERATION;
+}
+
+status_t MediaPlayer2::getDuration_l(int *msec)
+{
+    ALOGV("getDuration_l");
+    bool isValidState = (mCurrentState & (MEDIA_PLAYER2_PREPARED | MEDIA_PLAYER2_STARTED |
+            MEDIA_PLAYER2_PAUSED | MEDIA_PLAYER2_STOPPED | MEDIA_PLAYER2_PLAYBACK_COMPLETE));
+    if (mPlayer != 0 && isValidState) {
+        int durationMs;
+        status_t ret = mPlayer->getDuration(&durationMs);
+
+        if (ret != OK) {
+            // Do not enter error state just because no duration was available.
+            durationMs = -1;
+            ret = OK;
+        }
+
+        if (msec) {
+            *msec = durationMs;
+        }
+        return ret;
+    }
+    ALOGE("Attempt to call getDuration in wrong state: mPlayer=%p, mCurrentState=%u",
+            mPlayer.get(), mCurrentState);
+    return INVALID_OPERATION;
+}
+
+status_t MediaPlayer2::getDuration(int *msec)
+{
+    Mutex::Autolock _l(mLock);
+    return getDuration_l(msec);
+}
+
+status_t MediaPlayer2::seekTo_l(int msec, MediaPlayer2SeekMode mode)
+{
+    ALOGV("seekTo (%d, %d)", msec, mode);
+    if ((mPlayer != 0) && ( mCurrentState & ( MEDIA_PLAYER2_STARTED | MEDIA_PLAYER2_PREPARED |
+            MEDIA_PLAYER2_PAUSED |  MEDIA_PLAYER2_PLAYBACK_COMPLETE) ) ) {
+        if ( msec < 0 ) {
+            ALOGW("Attempt to seek to invalid position: %d", msec);
+            msec = 0;
+        }
+
+        int durationMs;
+        status_t err = mPlayer->getDuration(&durationMs);
+
+        if (err != OK) {
+            ALOGW("Stream has no duration and is therefore not seekable.");
+            return err;
+        }
+
+        if (msec > durationMs) {
+            ALOGW("Attempt to seek to past end of file: request = %d, "
+                  "durationMs = %d",
+                  msec,
+                  durationMs);
+
+            msec = durationMs;
+        }
+
+        // cache duration
+        mCurrentPosition = msec;
+        mCurrentSeekMode = mode;
+        if (mSeekPosition < 0) {
+            mSeekPosition = msec;
+            mSeekMode = mode;
+            return mPlayer->seekTo(msec, mode);
+        }
+        else {
+            ALOGV("Seek in progress - queue up seekTo[%d, %d]", msec, mode);
+            return NO_ERROR;
+        }
+    }
+    ALOGE("Attempt to perform seekTo in wrong state: mPlayer=%p, mCurrentState=%u", mPlayer.get(),
+            mCurrentState);
+    return INVALID_OPERATION;
+}
+
+status_t MediaPlayer2::seekTo(int msec, MediaPlayer2SeekMode mode)
+{
+    mLockThreadId = getThreadId();
+    Mutex::Autolock _l(mLock);
+    status_t result = seekTo_l(msec, mode);
+    mLockThreadId = 0;
+
+    return result;
+}
+
+status_t MediaPlayer2::notifyAt(int64_t mediaTimeUs)
+{
+    Mutex::Autolock _l(mLock);
+    if (mPlayer != 0) {
+        return mPlayer->notifyAt(mediaTimeUs);
+    }
+    return INVALID_OPERATION;
+}
+
+status_t MediaPlayer2::reset_l()
+{
+    mLoop = false;
+    if (mCurrentState == MEDIA_PLAYER2_IDLE) return NO_ERROR;
+    mPrepareSync = false;
+    if (mPlayer != 0) {
+        status_t ret = mPlayer->reset();
+        if (ret != NO_ERROR) {
+            ALOGE("reset() failed with return code (%d)", ret);
+            mCurrentState = MEDIA_PLAYER2_STATE_ERROR;
+        } else {
+            mPlayer->disconnect();
+            mCurrentState = MEDIA_PLAYER2_IDLE;
+        }
+        // setDataSource has to be called again to create a
+        // new mediaplayer.
+        mPlayer = 0;
+        return ret;
+    }
+    clear_l();
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2::doSetRetransmitEndpoint(const sp<MediaPlayer2Engine>& player) {
+    Mutex::Autolock _l(mLock);
+
+    if (player == NULL) {
+        return UNKNOWN_ERROR;
+    }
+
+    if (mRetransmitEndpointValid) {
+        return player->setRetransmitEndpoint(&mRetransmitEndpoint);
+    }
+
+    return OK;
+}
+
+status_t MediaPlayer2::reset()
+{
+    ALOGV("reset");
+    mLockThreadId = getThreadId();
+    Mutex::Autolock _l(mLock);
+    status_t result = reset_l();
+    mLockThreadId = 0;
+
+    return result;
+}
+
+status_t MediaPlayer2::setAudioStreamType(audio_stream_type_t type)
+{
+    ALOGV("MediaPlayer2::setAudioStreamType");
+    Mutex::Autolock _l(mLock);
+    if (mStreamType == type) return NO_ERROR;
+    if (mCurrentState & ( MEDIA_PLAYER2_PREPARED | MEDIA_PLAYER2_STARTED |
+                MEDIA_PLAYER2_PAUSED | MEDIA_PLAYER2_PLAYBACK_COMPLETE ) ) {
+        // Can't change the stream type after prepare
+        ALOGE("setAudioStream called in state %d", mCurrentState);
+        return INVALID_OPERATION;
+    }
+    // cache
+    mStreamType = type;
+    return OK;
+}
+
+status_t MediaPlayer2::getAudioStreamType(audio_stream_type_t *type)
+{
+    ALOGV("getAudioStreamType");
+    Mutex::Autolock _l(mLock);
+    *type = mStreamType;
+    return OK;
+}
+
+status_t MediaPlayer2::setLooping(int loop)
+{
+    ALOGV("MediaPlayer2::setLooping");
+    Mutex::Autolock _l(mLock);
+    mLoop = (loop != 0);
+    if (mPlayer != 0) {
+        return mPlayer->setLooping(loop);
+    }
+    return OK;
+}
+
+bool MediaPlayer2::isLooping() {
+    ALOGV("isLooping");
+    Mutex::Autolock _l(mLock);
+    if (mPlayer != 0) {
+        return mLoop;
+    }
+    ALOGV("isLooping: no active player");
+    return false;
+}
+
+status_t MediaPlayer2::setVolume(float leftVolume, float rightVolume)
+{
+    ALOGV("MediaPlayer2::setVolume(%f, %f)", leftVolume, rightVolume);
+    Mutex::Autolock _l(mLock);
+    mLeftVolume = leftVolume;
+    mRightVolume = rightVolume;
+    if (mPlayer != 0) {
+        return mPlayer->setVolume(leftVolume, rightVolume);
+    }
+    return OK;
+}
+
+status_t MediaPlayer2::setAudioSessionId(audio_session_t sessionId)
+{
+    ALOGV("MediaPlayer2::setAudioSessionId(%d)", sessionId);
+    Mutex::Autolock _l(mLock);
+    if (!(mCurrentState & MEDIA_PLAYER2_IDLE)) {
+        ALOGE("setAudioSessionId called in state %d", mCurrentState);
+        return INVALID_OPERATION;
+    }
+    if (sessionId < 0) {
+        return BAD_VALUE;
+    }
+    if (sessionId != mAudioSessionId) {
+        AudioSystem::acquireAudioSessionId(sessionId, -1);
+        AudioSystem::releaseAudioSessionId(mAudioSessionId, -1);
+        mAudioSessionId = sessionId;
+    }
+    return NO_ERROR;
+}
+
+audio_session_t MediaPlayer2::getAudioSessionId()
+{
+    Mutex::Autolock _l(mLock);
+    return mAudioSessionId;
+}
+
+status_t MediaPlayer2::setAuxEffectSendLevel(float level)
+{
+    ALOGV("MediaPlayer2::setAuxEffectSendLevel(%f)", level);
+    Mutex::Autolock _l(mLock);
+    mSendLevel = level;
+    if (mPlayer != 0) {
+        return mPlayer->setAuxEffectSendLevel(level);
+    }
+    return OK;
+}
+
+status_t MediaPlayer2::attachAuxEffect(int effectId)
+{
+    ALOGV("MediaPlayer2::attachAuxEffect(%d)", effectId);
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == 0 ||
+        (mCurrentState & MEDIA_PLAYER2_IDLE) ||
+        (mCurrentState == MEDIA_PLAYER2_STATE_ERROR )) {
+        ALOGE("attachAuxEffect called in state %d, mPlayer(%p)", mCurrentState, mPlayer.get());
+        return INVALID_OPERATION;
+    }
+
+    return mPlayer->attachAuxEffect(effectId);
+}
+
+// always call with lock held
+status_t MediaPlayer2::checkStateForKeySet_l(int key)
+{
+    switch(key) {
+    case MEDIA2_KEY_PARAMETER_AUDIO_ATTRIBUTES:
+        if (mCurrentState & ( MEDIA_PLAYER2_PREPARED | MEDIA_PLAYER2_STARTED |
+                MEDIA_PLAYER2_PAUSED | MEDIA_PLAYER2_PLAYBACK_COMPLETE) ) {
+            // Can't change the audio attributes after prepare
+            ALOGE("trying to set audio attributes called in state %d", mCurrentState);
+            return INVALID_OPERATION;
+        }
+        break;
+    default:
+        // parameter doesn't require player state check
+        break;
+    }
+    return OK;
+}
+
+status_t MediaPlayer2::setParameter(int key, const Parcel& request)
+{
+    ALOGV("MediaPlayer2::setParameter(%d)", key);
+    status_t status = INVALID_OPERATION;
+    Mutex::Autolock _l(mLock);
+    if (checkStateForKeySet_l(key) != OK) {
+        return status;
+    }
+    switch (key) {
+    case MEDIA2_KEY_PARAMETER_AUDIO_ATTRIBUTES:
+        // save the marshalled audio attributes
+        if (mAudioAttributesParcel != NULL) { delete mAudioAttributesParcel; };
+        mAudioAttributesParcel = new Parcel();
+        mAudioAttributesParcel->appendFrom(&request, 0, request.dataSize());
+        status = OK;
+        break;
+    default:
+        ALOGV_IF(mPlayer == NULL, "setParameter: no active player");
+        break;
+    }
+
+    if (mPlayer != NULL) {
+        status = mPlayer->setParameter(key, request);
+    }
+    return status;
+}
+
+status_t MediaPlayer2::getParameter(int key, Parcel *reply)
+{
+    ALOGV("MediaPlayer2::getParameter(%d)", key);
+    Mutex::Autolock _l(mLock);
+    if (mPlayer != NULL) {
+        status_t status =  mPlayer->getParameter(key, reply);
+        if (status != OK) {
+            ALOGD("getParameter returns %d", status);
+        }
+        return status;
+    }
+    ALOGV("getParameter: no active player");
+    return INVALID_OPERATION;
+}
+
+status_t MediaPlayer2::setRetransmitEndpoint(const char* addrString,
+                                            uint16_t port) {
+    ALOGV("MediaPlayer2::setRetransmitEndpoint(%s:%hu)",
+            addrString ? addrString : "(null)", port);
+
+    Mutex::Autolock _l(mLock);
+    if ((mPlayer != NULL) || (mCurrentState != MEDIA_PLAYER2_IDLE))
+        return INVALID_OPERATION;
+
+    if (NULL == addrString) {
+        mRetransmitEndpointValid = false;
+        return OK;
+    }
+
+    struct in_addr saddr;
+    if(!inet_aton(addrString, &saddr)) {
+        return BAD_VALUE;
+    }
+
+    memset(&mRetransmitEndpoint, 0, sizeof(mRetransmitEndpoint));
+    mRetransmitEndpoint.sin_family = AF_INET;
+    mRetransmitEndpoint.sin_addr   = saddr;
+    mRetransmitEndpoint.sin_port   = htons(port);
+    mRetransmitEndpointValid       = true;
+
+    return OK;
+}
+
+void MediaPlayer2::notify(int msg, int ext1, int ext2, const Parcel *obj)
+{
+    ALOGV("message received msg=%d, ext1=%d, ext2=%d", msg, ext1, ext2);
+    bool send = true;
+    bool locked = false;
+
+    // TODO: In the future, we might be on the same thread if the app is
+    // running in the same process as the media server. In that case,
+    // this will deadlock.
+    //
+    // The threadId hack below works around this for the care of prepare,
+    // seekTo, start, and reset within the same process.
+    // FIXME: Remember, this is a hack, it's not even a hack that is applied
+    // consistently for all use-cases, this needs to be revisited.
+    if (mLockThreadId != getThreadId()) {
+        mLock.lock();
+        locked = true;
+    }
+
+    // Allows calls from JNI in idle state to notify errors
+    if (!(msg == MEDIA2_ERROR && mCurrentState == MEDIA_PLAYER2_IDLE) && mPlayer == 0) {
+        ALOGV("notify(%d, %d, %d) callback on disconnected mediaplayer", msg, ext1, ext2);
+        if (locked) mLock.unlock();   // release the lock when done.
+        return;
+    }
+
+    switch (msg) {
+    case MEDIA2_NOP: // interface test message
+        break;
+    case MEDIA2_PREPARED:
+        ALOGV("MediaPlayer2::notify() prepared");
+        mCurrentState = MEDIA_PLAYER2_PREPARED;
+        if (mPrepareSync) {
+            ALOGV("signal application thread");
+            mPrepareSync = false;
+            mPrepareStatus = NO_ERROR;
+            mSignal.signal();
+        }
+        break;
+    case MEDIA2_DRM_INFO:
+        ALOGV("MediaPlayer2::notify() MEDIA2_DRM_INFO(%d, %d, %d, %p)", msg, ext1, ext2, obj);
+        break;
+    case MEDIA2_PLAYBACK_COMPLETE:
+        ALOGV("playback complete");
+        if (mCurrentState == MEDIA_PLAYER2_IDLE) {
+            ALOGE("playback complete in idle state");
+        }
+        if (!mLoop) {
+            mCurrentState = MEDIA_PLAYER2_PLAYBACK_COMPLETE;
+        }
+        break;
+    case MEDIA2_ERROR:
+        // Always log errors.
+        // ext1: Media framework error code.
+        // ext2: Implementation dependant error code.
+        ALOGE("error (%d, %d)", ext1, ext2);
+        mCurrentState = MEDIA_PLAYER2_STATE_ERROR;
+        if (mPrepareSync)
+        {
+            ALOGV("signal application thread");
+            mPrepareSync = false;
+            mPrepareStatus = ext1;
+            mSignal.signal();
+            send = false;
+        }
+        break;
+    case MEDIA2_INFO:
+        // ext1: Media framework error code.
+        // ext2: Implementation dependant error code.
+        if (ext1 != MEDIA2_INFO_VIDEO_TRACK_LAGGING) {
+            ALOGW("info/warning (%d, %d)", ext1, ext2);
+        }
+        break;
+    case MEDIA2_SEEK_COMPLETE:
+        ALOGV("Received seek complete");
+        if (mSeekPosition != mCurrentPosition || (mSeekMode != mCurrentSeekMode)) {
+            ALOGV("Executing queued seekTo(%d, %d)", mCurrentPosition, mCurrentSeekMode);
+            mSeekPosition = -1;
+            mSeekMode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC;
+            seekTo_l(mCurrentPosition, mCurrentSeekMode);
+        }
+        else {
+            ALOGV("All seeks complete - return to regularly scheduled program");
+            mCurrentPosition = mSeekPosition = -1;
+            mCurrentSeekMode = mSeekMode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC;
+        }
+        break;
+    case MEDIA2_BUFFERING_UPDATE:
+        ALOGV("buffering %d", ext1);
+        break;
+    case MEDIA2_SET_VIDEO_SIZE:
+        ALOGV("New video size %d x %d", ext1, ext2);
+        mVideoWidth = ext1;
+        mVideoHeight = ext2;
+        break;
+    case MEDIA2_NOTIFY_TIME:
+        ALOGV("Received notify time message");
+        break;
+    case MEDIA2_TIMED_TEXT:
+        ALOGV("Received timed text message");
+        break;
+    case MEDIA2_SUBTITLE_DATA:
+        ALOGV("Received subtitle data message");
+        break;
+    case MEDIA2_META_DATA:
+        ALOGV("Received timed metadata message");
+        break;
+    default:
+        ALOGV("unrecognized message: (%d, %d, %d)", msg, ext1, ext2);
+        break;
+    }
+
+    sp<MediaPlayer2Listener> listener = mListener;
+    if (locked) mLock.unlock();
+
+    // this prevents re-entrant calls into client code
+    if ((listener != 0) && send) {
+        Mutex::Autolock _l(mNotifyLock);
+        ALOGV("callback application");
+        listener->notify(msg, ext1, ext2, obj);
+        ALOGV("back from callback");
+    }
+}
+
+status_t MediaPlayer2::setNextMediaPlayer(const sp<MediaPlayer2>& next) {
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == NULL) {
+        return NO_INIT;
+    }
+
+    if (next != NULL && !(next->mCurrentState &
+            (MEDIA_PLAYER2_PREPARED | MEDIA_PLAYER2_PAUSED | MEDIA_PLAYER2_PLAYBACK_COMPLETE))) {
+        ALOGE("next player is not prepared");
+        return INVALID_OPERATION;
+    }
+
+    return mPlayer->setNextPlayer(next == NULL ? NULL : next->mPlayer);
+}
+
+VolumeShaper::Status MediaPlayer2::applyVolumeShaper(
+        const sp<VolumeShaper::Configuration>& configuration,
+        const sp<VolumeShaper::Operation>& operation)
+{
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == nullptr) {
+        return VolumeShaper::Status(NO_INIT);
+    }
+    VolumeShaper::Status status = mPlayer->applyVolumeShaper(configuration, operation);
+    return status;
+}
+
+sp<VolumeShaper::State> MediaPlayer2::getVolumeShaperState(int id)
+{
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == nullptr) {
+        return nullptr;
+    }
+    return mPlayer->getVolumeShaperState(id);
+}
+
+// Modular DRM
+status_t MediaPlayer2::prepareDrm(const uint8_t uuid[16], const Vector<uint8_t>& drmSessionId)
+{
+    // TODO change to ALOGV
+    ALOGD("prepareDrm: uuid: %p  drmSessionId: %p(%zu)", uuid,
+            drmSessionId.array(), drmSessionId.size());
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == NULL) {
+        return NO_INIT;
+    }
+
+    // Only allowed it in player's preparing/prepared state.
+    // We get here only if MEDIA_DRM_INFO has already arrived (e.g., prepare is half-way through or
+    // completed) so the state change to "prepared" might not have happened yet (e.g., buffering).
+    // Still, we can allow prepareDrm for the use case of being called in OnDrmInfoListener.
+    if (!(mCurrentState & (MEDIA_PLAYER2_PREPARING | MEDIA_PLAYER2_PREPARED))) {
+        ALOGE("prepareDrm is called in the wrong state (%d).", mCurrentState);
+        return INVALID_OPERATION;
+    }
+
+    if (drmSessionId.isEmpty()) {
+        ALOGE("prepareDrm: Unexpected. Can't proceed with crypto. Empty drmSessionId.");
+        return INVALID_OPERATION;
+    }
+
+    // Passing down to mediaserver mainly for creating the crypto
+    status_t status = mPlayer->prepareDrm(uuid, drmSessionId);
+    ALOGE_IF(status != OK, "prepareDrm: Failed at mediaserver with ret: %d", status);
+
+    // TODO change to ALOGV
+    ALOGD("prepareDrm: mediaserver::prepareDrm ret=%d", status);
+
+    return status;
+}
+
+status_t MediaPlayer2::releaseDrm()
+{
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == NULL) {
+        return NO_INIT;
+    }
+
+    // Not allowing releaseDrm in an active/resumable state
+    if (mCurrentState & (MEDIA_PLAYER2_STARTED |
+                         MEDIA_PLAYER2_PAUSED |
+                         MEDIA_PLAYER2_PLAYBACK_COMPLETE |
+                         MEDIA_PLAYER2_STATE_ERROR)) {
+        ALOGE("releaseDrm Unexpected state %d. Can only be called in stopped/idle.", mCurrentState);
+        return INVALID_OPERATION;
+    }
+
+    status_t status = mPlayer->releaseDrm();
+    // TODO change to ALOGV
+    ALOGD("releaseDrm: mediaserver::releaseDrm ret: %d", status);
+    if (status != OK) {
+        ALOGE("releaseDrm: Failed at mediaserver with ret: %d", status);
+        // Overriding to OK so the client proceed with its own cleanup
+        // Client can't do more cleanup. mediaserver release its crypto at end of session anyway.
+        status = OK;
+    }
+
+    return status;
+}
+
+status_t MediaPlayer2::setOutputDevice(audio_port_handle_t deviceId)
+{
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == NULL) {
+        ALOGV("setOutputDevice: player not init");
+        return NO_INIT;
+    }
+    return mPlayer->setOutputDevice(deviceId);
+}
+
+audio_port_handle_t MediaPlayer2::getRoutedDeviceId()
+{
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == NULL) {
+        ALOGV("getRoutedDeviceId: player not init");
+        return AUDIO_PORT_HANDLE_NONE;
+    }
+    audio_port_handle_t deviceId;
+    status_t status = mPlayer->getRoutedDeviceId(&deviceId);
+    if (status != NO_ERROR) {
+        return AUDIO_PORT_HANDLE_NONE;
+    }
+    return deviceId;
+}
+
+status_t MediaPlayer2::enableAudioDeviceCallback(bool enabled)
+{
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == NULL) {
+        ALOGV("addAudioDeviceCallback: player not init");
+        return NO_INIT;
+    }
+    return mPlayer->enableAudioDeviceCallback(enabled);
+}
+
+} // namespace android
diff --git a/media/libmedia/nuplayer2/Android.bp b/media/libmedia/nuplayer2/Android.bp
new file mode 100644
index 0000000..2a1fc59
--- /dev/null
+++ b/media/libmedia/nuplayer2/Android.bp
@@ -0,0 +1,68 @@
+cc_library_static {
+
+    srcs: [
+        "JWakeLock.cpp",
+        "GenericSource.cpp",
+        "HTTPLiveSource.cpp",
+        "NdkWrapper.cpp",
+        "NuPlayer2.cpp",
+        "NuPlayer2CCDecoder.cpp",
+        "NuPlayer2Decoder.cpp",
+        "NuPlayer2DecoderBase.cpp",
+        "NuPlayer2DecoderPassThrough.cpp",
+        "NuPlayer2Driver.cpp",
+        "NuPlayer2Drm.cpp",
+        "NuPlayer2Renderer.cpp",
+        "NuPlayer2StreamListener.cpp",
+        "RTSPSource.cpp",
+        "StreamingSource.cpp",
+    ],
+
+    header_libs: [
+        "media_plugin_headers",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/libstagefright",
+        "frameworks/av/media/libstagefright/httplive",
+        "frameworks/av/media/libstagefright/include",
+        "frameworks/av/media/libstagefright/mpeg2ts",
+        "frameworks/av/media/libstagefright/rtsp",
+        "frameworks/av/media/libstagefright/timedtext",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    product_variables: {
+        debuggable: {
+            cflags: [
+                "-DENABLE_STAGEFRIGHT_EXPERIMENTS",
+            ],
+        }
+    },
+
+    shared_libs: [
+        "libbinder",
+        "libui",
+        "libgui",
+        "libmedia",
+        "libmediadrm",
+        "libmediandk",
+        "libpowermanager",
+    ],
+
+    name: "libstagefright_nuplayer2",
+
+    tags: ["eng"],
+
+    sanitize: {
+        cfi: true,
+        diag: {
+            cfi: true,
+        },
+    },
+
+}
diff --git a/media/libmedia/nuplayer2/GenericSource.cpp b/media/libmedia/nuplayer2/GenericSource.cpp
new file mode 100644
index 0000000..900c78b
--- /dev/null
+++ b/media/libmedia/nuplayer2/GenericSource.cpp
@@ -0,0 +1,1697 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "GenericSource"
+
+#include "GenericSource.h"
+#include "NdkWrapper.h"
+#include "NuPlayer2Drm.h"
+
+#include "AnotherPacketSource.h"
+#include <binder/IServiceManager.h>
+#include <cutils/properties.h>
+#include <media/DataSource.h>
+#include <media/IMediaExtractorService.h>
+#include <media/MediaHTTPService.h>
+#include <media/MediaExtractor.h>
+#include <media/MediaSource.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/DataSourceFactory.h>
+#include <media/stagefright/FileSource.h>
+#include <media/stagefright/InterfaceUtils.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaClock.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaExtractorFactory.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
+#include "../../libstagefright/include/NuCachedSource2.h"
+#include "../../libstagefright/include/HTTPBase.h"
+
+namespace android {
+
+static const int kInitialMarkMs        = 5000;  // 5secs
+
+//static const int kPausePlaybackMarkMs  = 2000;  // 2secs
+static const int kResumePlaybackMarkMs = 15000;  // 15secs
+
+NuPlayer2::GenericSource::GenericSource(
+        const sp<AMessage> &notify,
+        bool uidValid,
+        uid_t uid,
+        const sp<MediaClock> &mediaClock)
+    : Source(notify),
+      mAudioTimeUs(0),
+      mAudioLastDequeueTimeUs(0),
+      mVideoTimeUs(0),
+      mVideoLastDequeueTimeUs(0),
+      mPrevBufferPercentage(-1),
+      mPollBufferingGeneration(0),
+      mSentPauseOnBuffering(false),
+      mAudioDataGeneration(0),
+      mVideoDataGeneration(0),
+      mFetchSubtitleDataGeneration(0),
+      mFetchTimedTextDataGeneration(0),
+      mDurationUs(-1ll),
+      mAudioIsVorbis(false),
+      mIsSecure(false),
+      mIsStreaming(false),
+      mUIDValid(uidValid),
+      mUID(uid),
+      mMediaClock(mediaClock),
+      mFd(-1),
+      mBitrate(-1ll),
+      mPendingReadBufferTypes(0) {
+    ALOGV("GenericSource");
+    CHECK(mediaClock != NULL);
+
+    mBufferingSettings.mInitialMarkMs = kInitialMarkMs;
+    mBufferingSettings.mResumePlaybackMarkMs = kResumePlaybackMarkMs;
+    resetDataSource();
+}
+
+void NuPlayer2::GenericSource::resetDataSource() {
+    ALOGV("resetDataSource");
+
+    mHTTPService.clear();
+    mHttpSource.clear();
+    mDisconnected = false;
+    mUri.clear();
+    mUriHeaders.clear();
+    if (mFd >= 0) {
+        close(mFd);
+        mFd = -1;
+    }
+    mOffset = 0;
+    mLength = 0;
+    mStarted = false;
+    mPreparing = false;
+
+    mIsDrmProtected = false;
+    mIsDrmReleased = false;
+    mIsSecure = false;
+    mMimes.clear();
+}
+
+status_t NuPlayer2::GenericSource::setDataSource(
+        const sp<MediaHTTPService> &httpService,
+        const char *url,
+        const KeyedVector<String8, String8> *headers) {
+    Mutex::Autolock _l(mLock);
+    ALOGV("setDataSource url: %s", url);
+
+    resetDataSource();
+
+    mHTTPService = httpService;
+    mUri = url;
+
+    if (headers) {
+        mUriHeaders = *headers;
+    }
+
+    // delay data source creation to prepareAsync() to avoid blocking
+    // the calling thread in setDataSource for any significant time.
+    return OK;
+}
+
+status_t NuPlayer2::GenericSource::setDataSource(
+        int fd, int64_t offset, int64_t length) {
+    Mutex::Autolock _l(mLock);
+    ALOGV("setDataSource %d/%lld/%lld", fd, (long long)offset, (long long)length);
+
+    resetDataSource();
+
+    mFd = dup(fd);
+    mOffset = offset;
+    mLength = length;
+
+    // delay data source creation to prepareAsync() to avoid blocking
+    // the calling thread in setDataSource for any significant time.
+    return OK;
+}
+
+status_t NuPlayer2::GenericSource::setDataSource(const sp<DataSource>& source) {
+    Mutex::Autolock _l(mLock);
+    ALOGV("setDataSource (source: %p)", source.get());
+
+    resetDataSource();
+    mDataSource = source;
+    return OK;
+}
+
+sp<MetaData> NuPlayer2::GenericSource::getFileFormatMeta() const {
+    Mutex::Autolock _l(mLock);
+    return mFileMeta;
+}
+
+status_t NuPlayer2::GenericSource::initFromDataSource() {
+    sp<IMediaExtractor> extractor;
+    CHECK(mDataSource != NULL);
+    sp<DataSource> dataSource = mDataSource;
+
+    mLock.unlock();
+    // This might take long time if data source is not reliable.
+    extractor = MediaExtractorFactory::Create(dataSource, NULL);
+
+    if (extractor == NULL) {
+        ALOGE("initFromDataSource, cannot create extractor!");
+        return UNKNOWN_ERROR;
+    }
+
+    sp<MetaData> fileMeta = extractor->getMetaData();
+
+    size_t numtracks = extractor->countTracks();
+    if (numtracks == 0) {
+        ALOGE("initFromDataSource, source has no track!");
+        return UNKNOWN_ERROR;
+    }
+
+    mLock.lock();
+    mFileMeta = fileMeta;
+    if (mFileMeta != NULL) {
+        int64_t duration;
+        if (mFileMeta->findInt64(kKeyDuration, &duration)) {
+            mDurationUs = duration;
+        }
+    }
+
+    int32_t totalBitrate = 0;
+
+    mMimes.clear();
+
+    for (size_t i = 0; i < numtracks; ++i) {
+        sp<IMediaSource> track = extractor->getTrack(i);
+        if (track == NULL) {
+            continue;
+        }
+
+        sp<MetaData> meta = extractor->getTrackMetaData(i);
+        if (meta == NULL) {
+            ALOGE("no metadata for track %zu", i);
+            return UNKNOWN_ERROR;
+        }
+
+        const char *mime;
+        CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+        ALOGV("initFromDataSource track[%zu]: %s", i, mime);
+
+        // Do the string compare immediately with "mime",
+        // we can't assume "mime" would stay valid after another
+        // extractor operation, some extractors might modify meta
+        // during getTrack() and make it invalid.
+        if (!strncasecmp(mime, "audio/", 6)) {
+            if (mAudioTrack.mSource == NULL) {
+                mAudioTrack.mIndex = i;
+                mAudioTrack.mSource = track;
+                mAudioTrack.mPackets =
+                    new AnotherPacketSource(mAudioTrack.mSource->getFormat());
+
+                if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) {
+                    mAudioIsVorbis = true;
+                } else {
+                    mAudioIsVorbis = false;
+                }
+
+                mMimes.add(String8(mime));
+            }
+        } else if (!strncasecmp(mime, "video/", 6)) {
+            if (mVideoTrack.mSource == NULL) {
+                mVideoTrack.mIndex = i;
+                mVideoTrack.mSource = track;
+                mVideoTrack.mPackets =
+                    new AnotherPacketSource(mVideoTrack.mSource->getFormat());
+
+                // video always at the beginning
+                mMimes.insertAt(String8(mime), 0);
+            }
+        }
+
+        mSources.push(track);
+        int64_t durationUs;
+        if (meta->findInt64(kKeyDuration, &durationUs)) {
+            if (durationUs > mDurationUs) {
+                mDurationUs = durationUs;
+            }
+        }
+
+        int32_t bitrate;
+        if (totalBitrate >= 0 && meta->findInt32(kKeyBitRate, &bitrate)) {
+            totalBitrate += bitrate;
+        } else {
+            totalBitrate = -1;
+        }
+    }
+
+    ALOGV("initFromDataSource mSources.size(): %zu  mIsSecure: %d  mime[0]: %s", mSources.size(),
+            mIsSecure, (mMimes.isEmpty() ? "NONE" : mMimes[0].string()));
+
+    if (mSources.size() == 0) {
+        ALOGE("b/23705695");
+        return UNKNOWN_ERROR;
+    }
+
+    // Modular DRM: The return value doesn't affect source initialization.
+    (void)checkDrmInfo();
+
+    mBitrate = totalBitrate;
+
+    return OK;
+}
+
+status_t NuPlayer2::GenericSource::getBufferingSettings(
+        BufferingSettings* buffering /* nonnull */) {
+    {
+        Mutex::Autolock _l(mLock);
+        *buffering = mBufferingSettings;
+    }
+
+    ALOGV("getBufferingSettings{%s}", buffering->toString().string());
+    return OK;
+}
+
+status_t NuPlayer2::GenericSource::setBufferingSettings(const BufferingSettings& buffering) {
+    ALOGV("setBufferingSettings{%s}", buffering.toString().string());
+
+    Mutex::Autolock _l(mLock);
+    mBufferingSettings = buffering;
+    return OK;
+}
+
+status_t NuPlayer2::GenericSource::startSources() {
+    // Start the selected A/V tracks now before we start buffering.
+    // Widevine sources might re-initialize crypto when starting, if we delay
+    // this to start(), all data buffered during prepare would be wasted.
+    // (We don't actually start reading until start().)
+    //
+    // TODO: this logic may no longer be relevant after the removal of widevine
+    // support
+    if (mAudioTrack.mSource != NULL && mAudioTrack.mSource->start() != OK) {
+        ALOGE("failed to start audio track!");
+        return UNKNOWN_ERROR;
+    }
+
+    if (mVideoTrack.mSource != NULL && mVideoTrack.mSource->start() != OK) {
+        ALOGE("failed to start video track!");
+        return UNKNOWN_ERROR;
+    }
+
+    return OK;
+}
+
+int64_t NuPlayer2::GenericSource::getLastReadPosition() {
+    if (mAudioTrack.mSource != NULL) {
+        return mAudioTimeUs;
+    } else if (mVideoTrack.mSource != NULL) {
+        return mVideoTimeUs;
+    } else {
+        return 0;
+    }
+}
+
+status_t NuPlayer2::GenericSource::setBuffers(
+        bool audio, Vector<MediaBuffer *> &buffers) {
+    Mutex::Autolock _l(mLock);
+    if (mIsSecure && !audio && mVideoTrack.mSource != NULL) {
+        return mVideoTrack.mSource->setBuffers(buffers);
+    }
+    return INVALID_OPERATION;
+}
+
+bool NuPlayer2::GenericSource::isStreaming() const {
+    Mutex::Autolock _l(mLock);
+    return mIsStreaming;
+}
+
+NuPlayer2::GenericSource::~GenericSource() {
+    ALOGV("~GenericSource");
+    if (mLooper != NULL) {
+        mLooper->unregisterHandler(id());
+        mLooper->stop();
+    }
+    resetDataSource();
+}
+
+void NuPlayer2::GenericSource::prepareAsync() {
+    Mutex::Autolock _l(mLock);
+    ALOGV("prepareAsync: (looper: %d)", (mLooper != NULL));
+
+    if (mLooper == NULL) {
+        mLooper = new ALooper;
+        mLooper->setName("generic");
+        mLooper->start();
+
+        mLooper->registerHandler(this);
+    }
+
+    sp<AMessage> msg = new AMessage(kWhatPrepareAsync, this);
+    msg->post();
+}
+
+void NuPlayer2::GenericSource::onPrepareAsync() {
+    ALOGV("onPrepareAsync: mDataSource: %d", (mDataSource != NULL));
+
+    // delayed data source creation
+    if (mDataSource == NULL) {
+        // set to false first, if the extractor
+        // comes back as secure, set it to true then.
+        mIsSecure = false;
+
+        if (!mUri.empty()) {
+            const char* uri = mUri.c_str();
+            String8 contentType;
+
+            if (!strncasecmp("http://", uri, 7) || !strncasecmp("https://", uri, 8)) {
+                mHttpSource = DataSourceFactory::CreateMediaHTTP(mHTTPService);
+                if (mHttpSource == NULL) {
+                    ALOGE("Failed to create http source!");
+                    notifyPreparedAndCleanup(UNKNOWN_ERROR);
+                    return;
+                }
+            }
+
+            mLock.unlock();
+            // This might take long time if connection has some issue.
+            sp<DataSource> dataSource = DataSourceFactory::CreateFromURI(
+                   mHTTPService, uri, &mUriHeaders, &contentType,
+                   static_cast<HTTPBase *>(mHttpSource.get()));
+            mLock.lock();
+            if (!mDisconnected) {
+                mDataSource = dataSource;
+            }
+        } else {
+            if (property_get_bool("media.stagefright.extractremote", true) &&
+                    !FileSource::requiresDrm(mFd, mOffset, mLength, nullptr /* mime */)) {
+                sp<IBinder> binder =
+                        defaultServiceManager()->getService(String16("media.extractor"));
+                if (binder != nullptr) {
+                    ALOGD("FileSource remote");
+                    sp<IMediaExtractorService> mediaExService(
+                            interface_cast<IMediaExtractorService>(binder));
+                    sp<IDataSource> source =
+                            mediaExService->makeIDataSource(mFd, mOffset, mLength);
+                    ALOGV("IDataSource(FileSource): %p %d %lld %lld",
+                            source.get(), mFd, (long long)mOffset, (long long)mLength);
+                    if (source.get() != nullptr) {
+                        mDataSource = CreateDataSourceFromIDataSource(source);
+                        if (mDataSource != nullptr) {
+                            // Close the local file descriptor as it is not needed anymore.
+                            close(mFd);
+                            mFd = -1;
+                        }
+                    } else {
+                        ALOGW("extractor service cannot make data source");
+                    }
+                } else {
+                    ALOGW("extractor service not running");
+                }
+            }
+            if (mDataSource == nullptr) {
+                ALOGD("FileSource local");
+                mDataSource = new FileSource(mFd, mOffset, mLength);
+            }
+            // TODO: close should always be done on mFd, see the lines following
+            // CreateDataSourceFromIDataSource above,
+            // and the FileSource constructor should dup the mFd argument as needed.
+            mFd = -1;
+        }
+
+        if (mDataSource == NULL) {
+            ALOGE("Failed to create data source!");
+            notifyPreparedAndCleanup(UNKNOWN_ERROR);
+            return;
+        }
+    }
+
+    if (mDataSource->flags() & DataSource::kIsCachingDataSource) {
+        mCachedSource = static_cast<NuCachedSource2 *>(mDataSource.get());
+    }
+
+    // For cached streaming cases, we need to wait for enough
+    // buffering before reporting prepared.
+    mIsStreaming = (mCachedSource != NULL);
+
+    // init extractor from data source
+    status_t err = initFromDataSource();
+
+    if (err != OK) {
+        ALOGE("Failed to init from data source!");
+        notifyPreparedAndCleanup(err);
+        return;
+    }
+
+    if (mVideoTrack.mSource != NULL) {
+        sp<MetaData> meta = getFormatMeta_l(false /* audio */);
+        sp<AMessage> msg = new AMessage;
+        err = convertMetaDataToMessage(meta, &msg);
+        if(err != OK) {
+            notifyPreparedAndCleanup(err);
+            return;
+        }
+        notifyVideoSizeChanged(msg);
+    }
+
+    notifyFlagsChanged(
+            // FLAG_SECURE will be known if/when prepareDrm is called by the app
+            // FLAG_PROTECTED will be known if/when prepareDrm is called by the app
+            FLAG_CAN_PAUSE |
+            FLAG_CAN_SEEK_BACKWARD |
+            FLAG_CAN_SEEK_FORWARD |
+            FLAG_CAN_SEEK);
+
+    finishPrepareAsync();
+
+    ALOGV("onPrepareAsync: Done");
+}
+
+void NuPlayer2::GenericSource::finishPrepareAsync() {
+    ALOGV("finishPrepareAsync");
+
+    status_t err = startSources();
+    if (err != OK) {
+        ALOGE("Failed to init start data source!");
+        notifyPreparedAndCleanup(err);
+        return;
+    }
+
+    if (mIsStreaming) {
+        mCachedSource->resumeFetchingIfNecessary();
+        mPreparing = true;
+        schedulePollBuffering();
+    } else {
+        notifyPrepared();
+    }
+
+    if (mAudioTrack.mSource != NULL) {
+        postReadBuffer(MEDIA_TRACK_TYPE_AUDIO);
+    }
+
+    if (mVideoTrack.mSource != NULL) {
+        postReadBuffer(MEDIA_TRACK_TYPE_VIDEO);
+    }
+}
+
+void NuPlayer2::GenericSource::notifyPreparedAndCleanup(status_t err) {
+    if (err != OK) {
+        mDataSource.clear();
+        mCachedSource.clear();
+        mHttpSource.clear();
+
+        mBitrate = -1;
+        mPrevBufferPercentage = -1;
+        ++mPollBufferingGeneration;
+    }
+    notifyPrepared(err);
+}
+
+void NuPlayer2::GenericSource::start() {
+    Mutex::Autolock _l(mLock);
+    ALOGI("start");
+
+    if (mAudioTrack.mSource != NULL) {
+        postReadBuffer(MEDIA_TRACK_TYPE_AUDIO);
+    }
+
+    if (mVideoTrack.mSource != NULL) {
+        postReadBuffer(MEDIA_TRACK_TYPE_VIDEO);
+    }
+
+    mStarted = true;
+}
+
+void NuPlayer2::GenericSource::stop() {
+    Mutex::Autolock _l(mLock);
+    mStarted = false;
+}
+
+void NuPlayer2::GenericSource::pause() {
+    Mutex::Autolock _l(mLock);
+    mStarted = false;
+}
+
+void NuPlayer2::GenericSource::resume() {
+    Mutex::Autolock _l(mLock);
+    mStarted = true;
+}
+
+void NuPlayer2::GenericSource::disconnect() {
+    sp<DataSource> dataSource, httpSource;
+    {
+        Mutex::Autolock _l(mLock);
+        dataSource = mDataSource;
+        httpSource = mHttpSource;
+        mDisconnected = true;
+    }
+
+    if (dataSource != NULL) {
+        // disconnect data source
+        if (dataSource->flags() & DataSource::kIsCachingDataSource) {
+            static_cast<NuCachedSource2 *>(dataSource.get())->disconnect();
+        }
+    } else if (httpSource != NULL) {
+        static_cast<HTTPBase *>(httpSource.get())->disconnect();
+    }
+}
+
+status_t NuPlayer2::GenericSource::feedMoreTSData() {
+    return OK;
+}
+
+void NuPlayer2::GenericSource::sendCacheStats() {
+    int32_t kbps = 0;
+    status_t err = UNKNOWN_ERROR;
+
+    if (mCachedSource != NULL) {
+        err = mCachedSource->getEstimatedBandwidthKbps(&kbps);
+    }
+
+    if (err == OK) {
+        sp<AMessage> notify = dupNotify();
+        notify->setInt32("what", kWhatCacheStats);
+        notify->setInt32("bandwidth", kbps);
+        notify->post();
+    }
+}
+
+void NuPlayer2::GenericSource::onMessageReceived(const sp<AMessage> &msg) {
+    Mutex::Autolock _l(mLock);
+    switch (msg->what()) {
+      case kWhatPrepareAsync:
+      {
+          onPrepareAsync();
+          break;
+      }
+      case kWhatFetchSubtitleData:
+      {
+          fetchTextData(kWhatSendSubtitleData, MEDIA_TRACK_TYPE_SUBTITLE,
+                  mFetchSubtitleDataGeneration, mSubtitleTrack.mPackets, msg);
+          break;
+      }
+
+      case kWhatFetchTimedTextData:
+      {
+          fetchTextData(kWhatSendTimedTextData, MEDIA_TRACK_TYPE_TIMEDTEXT,
+                  mFetchTimedTextDataGeneration, mTimedTextTrack.mPackets, msg);
+          break;
+      }
+
+      case kWhatSendSubtitleData:
+      {
+          sendTextData(kWhatSubtitleData, MEDIA_TRACK_TYPE_SUBTITLE,
+                  mFetchSubtitleDataGeneration, mSubtitleTrack.mPackets, msg);
+          break;
+      }
+
+      case kWhatSendGlobalTimedTextData:
+      {
+          sendGlobalTextData(kWhatTimedTextData, mFetchTimedTextDataGeneration, msg);
+          break;
+      }
+      case kWhatSendTimedTextData:
+      {
+          sendTextData(kWhatTimedTextData, MEDIA_TRACK_TYPE_TIMEDTEXT,
+                  mFetchTimedTextDataGeneration, mTimedTextTrack.mPackets, msg);
+          break;
+      }
+
+      case kWhatChangeAVSource:
+      {
+          int32_t trackIndex;
+          CHECK(msg->findInt32("trackIndex", &trackIndex));
+          const sp<IMediaSource> source = mSources.itemAt(trackIndex);
+
+          Track* track;
+          const char *mime;
+          media_track_type trackType, counterpartType;
+          sp<MetaData> meta = source->getFormat();
+          meta->findCString(kKeyMIMEType, &mime);
+          if (!strncasecmp(mime, "audio/", 6)) {
+              track = &mAudioTrack;
+              trackType = MEDIA_TRACK_TYPE_AUDIO;
+              counterpartType = MEDIA_TRACK_TYPE_VIDEO;;
+          } else {
+              CHECK(!strncasecmp(mime, "video/", 6));
+              track = &mVideoTrack;
+              trackType = MEDIA_TRACK_TYPE_VIDEO;
+              counterpartType = MEDIA_TRACK_TYPE_AUDIO;;
+          }
+
+
+          if (track->mSource != NULL) {
+              track->mSource->stop();
+          }
+          track->mSource = source;
+          track->mSource->start();
+          track->mIndex = trackIndex;
+          ++mAudioDataGeneration;
+          ++mVideoDataGeneration;
+
+          int64_t timeUs, actualTimeUs;
+          const bool formatChange = true;
+          if (trackType == MEDIA_TRACK_TYPE_AUDIO) {
+              timeUs = mAudioLastDequeueTimeUs;
+          } else {
+              timeUs = mVideoLastDequeueTimeUs;
+          }
+          readBuffer(trackType, timeUs, MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC /* mode */,
+                  &actualTimeUs, formatChange);
+          readBuffer(counterpartType, -1, MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC /* mode */,
+                  NULL, !formatChange);
+          ALOGV("timeUs %lld actualTimeUs %lld", (long long)timeUs, (long long)actualTimeUs);
+
+          break;
+      }
+
+      case kWhatSeek:
+      {
+          onSeek(msg);
+          break;
+      }
+
+      case kWhatReadBuffer:
+      {
+          onReadBuffer(msg);
+          break;
+      }
+
+      case kWhatPollBuffering:
+      {
+          int32_t generation;
+          CHECK(msg->findInt32("generation", &generation));
+          if (generation == mPollBufferingGeneration) {
+              onPollBuffering();
+          }
+          break;
+      }
+
+      default:
+          Source::onMessageReceived(msg);
+          break;
+    }
+}
+
+void NuPlayer2::GenericSource::fetchTextData(
+        uint32_t sendWhat,
+        media_track_type type,
+        int32_t curGen,
+        const sp<AnotherPacketSource>& packets,
+        const sp<AMessage>& msg) {
+    int32_t msgGeneration;
+    CHECK(msg->findInt32("generation", &msgGeneration));
+    if (msgGeneration != curGen) {
+        // stale
+        return;
+    }
+
+    int32_t avail;
+    if (packets->hasBufferAvailable(&avail)) {
+        return;
+    }
+
+    int64_t timeUs;
+    CHECK(msg->findInt64("timeUs", &timeUs));
+
+    int64_t subTimeUs = 0;
+    readBuffer(type, timeUs, MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC /* mode */, &subTimeUs);
+
+    status_t eosResult;
+    if (!packets->hasBufferAvailable(&eosResult)) {
+        return;
+    }
+
+    if (msg->what() == kWhatFetchSubtitleData) {
+        subTimeUs -= 1000000ll;  // send subtile data one second earlier
+    }
+    sp<AMessage> msg2 = new AMessage(sendWhat, this);
+    msg2->setInt32("generation", msgGeneration);
+    mMediaClock->addTimer(msg2, subTimeUs);
+}
+
+void NuPlayer2::GenericSource::sendTextData(
+        uint32_t what,
+        media_track_type type,
+        int32_t curGen,
+        const sp<AnotherPacketSource>& packets,
+        const sp<AMessage>& msg) {
+    int32_t msgGeneration;
+    CHECK(msg->findInt32("generation", &msgGeneration));
+    if (msgGeneration != curGen) {
+        // stale
+        return;
+    }
+
+    int64_t subTimeUs;
+    if (packets->nextBufferTime(&subTimeUs) != OK) {
+        return;
+    }
+
+    int64_t nextSubTimeUs;
+    readBuffer(type, -1, MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC /* mode */, &nextSubTimeUs);
+
+    sp<ABuffer> buffer;
+    status_t dequeueStatus = packets->dequeueAccessUnit(&buffer);
+    if (dequeueStatus == OK) {
+        sp<AMessage> notify = dupNotify();
+        notify->setInt32("what", what);
+        notify->setBuffer("buffer", buffer);
+        notify->post();
+
+        if (msg->what() == kWhatSendSubtitleData) {
+            nextSubTimeUs -= 1000000ll;  // send subtile data one second earlier
+        }
+        mMediaClock->addTimer(msg, nextSubTimeUs);
+    }
+}
+
+void NuPlayer2::GenericSource::sendGlobalTextData(
+        uint32_t what,
+        int32_t curGen,
+        sp<AMessage> msg) {
+    int32_t msgGeneration;
+    CHECK(msg->findInt32("generation", &msgGeneration));
+    if (msgGeneration != curGen) {
+        // stale
+        return;
+    }
+
+    uint32_t textType;
+    const void *data;
+    size_t size = 0;
+    if (mTimedTextTrack.mSource->getFormat()->findData(
+                    kKeyTextFormatData, &textType, &data, &size)) {
+        mGlobalTimedText = new ABuffer(size);
+        if (mGlobalTimedText->data()) {
+            memcpy(mGlobalTimedText->data(), data, size);
+            sp<AMessage> globalMeta = mGlobalTimedText->meta();
+            globalMeta->setInt64("timeUs", 0);
+            globalMeta->setString("mime", MEDIA_MIMETYPE_TEXT_3GPP);
+            globalMeta->setInt32("global", 1);
+            sp<AMessage> notify = dupNotify();
+            notify->setInt32("what", what);
+            notify->setBuffer("buffer", mGlobalTimedText);
+            notify->post();
+        }
+    }
+}
+
+sp<MetaData> NuPlayer2::GenericSource::getFormatMeta(bool audio) {
+    Mutex::Autolock _l(mLock);
+    return getFormatMeta_l(audio);
+}
+
+sp<MetaData> NuPlayer2::GenericSource::getFormatMeta_l(bool audio) {
+    sp<IMediaSource> source = audio ? mAudioTrack.mSource : mVideoTrack.mSource;
+
+    if (source == NULL) {
+        return NULL;
+    }
+
+    return source->getFormat();
+}
+
+status_t NuPlayer2::GenericSource::dequeueAccessUnit(
+        bool audio, sp<ABuffer> *accessUnit) {
+    Mutex::Autolock _l(mLock);
+    // If has gone through stop/releaseDrm sequence, we no longer send down any buffer b/c
+    // the codec's crypto object has gone away (b/37960096).
+    // Note: This will be unnecessary when stop() changes behavior and releases codec (b/35248283).
+    if (!mStarted && mIsDrmReleased) {
+        return -EWOULDBLOCK;
+    }
+
+    Track *track = audio ? &mAudioTrack : &mVideoTrack;
+
+    if (track->mSource == NULL) {
+        return -EWOULDBLOCK;
+    }
+
+    status_t finalResult;
+    if (!track->mPackets->hasBufferAvailable(&finalResult)) {
+        if (finalResult == OK) {
+            postReadBuffer(
+                    audio ? MEDIA_TRACK_TYPE_AUDIO : MEDIA_TRACK_TYPE_VIDEO);
+            return -EWOULDBLOCK;
+        }
+        return finalResult;
+    }
+
+    status_t result = track->mPackets->dequeueAccessUnit(accessUnit);
+
+    // start pulling in more buffers if cache is running low
+    // so that decoder has less chance of being starved
+    if (!mIsStreaming) {
+        if (track->mPackets->getAvailableBufferCount(&finalResult) < 2) {
+            postReadBuffer(audio? MEDIA_TRACK_TYPE_AUDIO : MEDIA_TRACK_TYPE_VIDEO);
+        }
+    } else {
+        int64_t durationUs = track->mPackets->getBufferedDurationUs(&finalResult);
+        // TODO: maxRebufferingMarkMs could be larger than
+        // mBufferingSettings.mResumePlaybackMarkMs
+        int64_t restartBufferingMarkUs =
+             mBufferingSettings.mResumePlaybackMarkMs * 1000ll / 2;
+        if (finalResult == OK) {
+            if (durationUs < restartBufferingMarkUs) {
+                postReadBuffer(audio? MEDIA_TRACK_TYPE_AUDIO : MEDIA_TRACK_TYPE_VIDEO);
+            }
+            if (track->mPackets->getAvailableBufferCount(&finalResult) < 2
+                && !mSentPauseOnBuffering && !mPreparing) {
+                mCachedSource->resumeFetchingIfNecessary();
+                sendCacheStats();
+                mSentPauseOnBuffering = true;
+                sp<AMessage> notify = dupNotify();
+                notify->setInt32("what", kWhatPauseOnBufferingStart);
+                notify->post();
+            }
+        }
+    }
+
+    if (result != OK) {
+        if (mSubtitleTrack.mSource != NULL) {
+            mSubtitleTrack.mPackets->clear();
+            mFetchSubtitleDataGeneration++;
+        }
+        if (mTimedTextTrack.mSource != NULL) {
+            mTimedTextTrack.mPackets->clear();
+            mFetchTimedTextDataGeneration++;
+        }
+        return result;
+    }
+
+    int64_t timeUs;
+    status_t eosResult; // ignored
+    CHECK((*accessUnit)->meta()->findInt64("timeUs", &timeUs));
+    if (audio) {
+        mAudioLastDequeueTimeUs = timeUs;
+    } else {
+        mVideoLastDequeueTimeUs = timeUs;
+    }
+
+    if (mSubtitleTrack.mSource != NULL
+            && !mSubtitleTrack.mPackets->hasBufferAvailable(&eosResult)) {
+        sp<AMessage> msg = new AMessage(kWhatFetchSubtitleData, this);
+        msg->setInt64("timeUs", timeUs);
+        msg->setInt32("generation", mFetchSubtitleDataGeneration);
+        msg->post();
+    }
+
+    if (mTimedTextTrack.mSource != NULL
+            && !mTimedTextTrack.mPackets->hasBufferAvailable(&eosResult)) {
+        sp<AMessage> msg = new AMessage(kWhatFetchTimedTextData, this);
+        msg->setInt64("timeUs", timeUs);
+        msg->setInt32("generation", mFetchTimedTextDataGeneration);
+        msg->post();
+    }
+
+    return result;
+}
+
+status_t NuPlayer2::GenericSource::getDuration(int64_t *durationUs) {
+    Mutex::Autolock _l(mLock);
+    *durationUs = mDurationUs;
+    return OK;
+}
+
+size_t NuPlayer2::GenericSource::getTrackCount() const {
+    Mutex::Autolock _l(mLock);
+    return mSources.size();
+}
+
+sp<AMessage> NuPlayer2::GenericSource::getTrackInfo(size_t trackIndex) const {
+    Mutex::Autolock _l(mLock);
+    size_t trackCount = mSources.size();
+    if (trackIndex >= trackCount) {
+        return NULL;
+    }
+
+    sp<AMessage> format = new AMessage();
+    sp<MetaData> meta = mSources.itemAt(trackIndex)->getFormat();
+    if (meta == NULL) {
+        ALOGE("no metadata for track %zu", trackIndex);
+        return NULL;
+    }
+
+    const char *mime;
+    CHECK(meta->findCString(kKeyMIMEType, &mime));
+    format->setString("mime", mime);
+
+    int32_t trackType;
+    if (!strncasecmp(mime, "video/", 6)) {
+        trackType = MEDIA_TRACK_TYPE_VIDEO;
+    } else if (!strncasecmp(mime, "audio/", 6)) {
+        trackType = MEDIA_TRACK_TYPE_AUDIO;
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP)) {
+        trackType = MEDIA_TRACK_TYPE_TIMEDTEXT;
+    } else {
+        trackType = MEDIA_TRACK_TYPE_UNKNOWN;
+    }
+    format->setInt32("type", trackType);
+
+    const char *lang;
+    if (!meta->findCString(kKeyMediaLanguage, &lang)) {
+        lang = "und";
+    }
+    format->setString("language", lang);
+
+    if (trackType == MEDIA_TRACK_TYPE_SUBTITLE) {
+        int32_t isAutoselect = 1, isDefault = 0, isForced = 0;
+        meta->findInt32(kKeyTrackIsAutoselect, &isAutoselect);
+        meta->findInt32(kKeyTrackIsDefault, &isDefault);
+        meta->findInt32(kKeyTrackIsForced, &isForced);
+
+        format->setInt32("auto", !!isAutoselect);
+        format->setInt32("default", !!isDefault);
+        format->setInt32("forced", !!isForced);
+    }
+
+    return format;
+}
+
+ssize_t NuPlayer2::GenericSource::getSelectedTrack(media_track_type type) const {
+    Mutex::Autolock _l(mLock);
+    const Track *track = NULL;
+    switch (type) {
+    case MEDIA_TRACK_TYPE_VIDEO:
+        track = &mVideoTrack;
+        break;
+    case MEDIA_TRACK_TYPE_AUDIO:
+        track = &mAudioTrack;
+        break;
+    case MEDIA_TRACK_TYPE_TIMEDTEXT:
+        track = &mTimedTextTrack;
+        break;
+    case MEDIA_TRACK_TYPE_SUBTITLE:
+        track = &mSubtitleTrack;
+        break;
+    default:
+        break;
+    }
+
+    if (track != NULL && track->mSource != NULL) {
+        return track->mIndex;
+    }
+
+    return -1;
+}
+
+status_t NuPlayer2::GenericSource::selectTrack(size_t trackIndex, bool select, int64_t timeUs) {
+    Mutex::Autolock _l(mLock);
+    ALOGV("%s track: %zu", select ? "select" : "deselect", trackIndex);
+
+    if (trackIndex >= mSources.size()) {
+        return BAD_INDEX;
+    }
+
+    if (!select) {
+        Track* track = NULL;
+        if (mSubtitleTrack.mSource != NULL && trackIndex == mSubtitleTrack.mIndex) {
+            track = &mSubtitleTrack;
+            mFetchSubtitleDataGeneration++;
+        } else if (mTimedTextTrack.mSource != NULL && trackIndex == mTimedTextTrack.mIndex) {
+            track = &mTimedTextTrack;
+            mFetchTimedTextDataGeneration++;
+        }
+        if (track == NULL) {
+            return INVALID_OPERATION;
+        }
+        track->mSource->stop();
+        track->mSource = NULL;
+        track->mPackets->clear();
+        return OK;
+    }
+
+    const sp<IMediaSource> source = mSources.itemAt(trackIndex);
+    sp<MetaData> meta = source->getFormat();
+    const char *mime;
+    CHECK(meta->findCString(kKeyMIMEType, &mime));
+    if (!strncasecmp(mime, "text/", 5)) {
+        bool isSubtitle = strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP);
+        Track *track = isSubtitle ? &mSubtitleTrack : &mTimedTextTrack;
+        if (track->mSource != NULL && track->mIndex == trackIndex) {
+            return OK;
+        }
+        track->mIndex = trackIndex;
+        if (track->mSource != NULL) {
+            track->mSource->stop();
+        }
+        track->mSource = mSources.itemAt(trackIndex);
+        track->mSource->start();
+        if (track->mPackets == NULL) {
+            track->mPackets = new AnotherPacketSource(track->mSource->getFormat());
+        } else {
+            track->mPackets->clear();
+            track->mPackets->setFormat(track->mSource->getFormat());
+
+        }
+
+        if (isSubtitle) {
+            mFetchSubtitleDataGeneration++;
+        } else {
+            mFetchTimedTextDataGeneration++;
+        }
+
+        status_t eosResult; // ignored
+        if (mSubtitleTrack.mSource != NULL
+                && !mSubtitleTrack.mPackets->hasBufferAvailable(&eosResult)) {
+            sp<AMessage> msg = new AMessage(kWhatFetchSubtitleData, this);
+            msg->setInt64("timeUs", timeUs);
+            msg->setInt32("generation", mFetchSubtitleDataGeneration);
+            msg->post();
+        }
+
+        sp<AMessage> msg2 = new AMessage(kWhatSendGlobalTimedTextData, this);
+        msg2->setInt32("generation", mFetchTimedTextDataGeneration);
+        msg2->post();
+
+        if (mTimedTextTrack.mSource != NULL
+                && !mTimedTextTrack.mPackets->hasBufferAvailable(&eosResult)) {
+            sp<AMessage> msg = new AMessage(kWhatFetchTimedTextData, this);
+            msg->setInt64("timeUs", timeUs);
+            msg->setInt32("generation", mFetchTimedTextDataGeneration);
+            msg->post();
+        }
+
+        return OK;
+    } else if (!strncasecmp(mime, "audio/", 6) || !strncasecmp(mime, "video/", 6)) {
+        bool audio = !strncasecmp(mime, "audio/", 6);
+        Track *track = audio ? &mAudioTrack : &mVideoTrack;
+        if (track->mSource != NULL && track->mIndex == trackIndex) {
+            return OK;
+        }
+
+        sp<AMessage> msg = new AMessage(kWhatChangeAVSource, this);
+        msg->setInt32("trackIndex", trackIndex);
+        msg->post();
+        return OK;
+    }
+
+    return INVALID_OPERATION;
+}
+
+status_t NuPlayer2::GenericSource::seekTo(int64_t seekTimeUs, MediaPlayer2SeekMode mode) {
+    ALOGV("seekTo: %lld, %d", (long long)seekTimeUs, mode);
+    sp<AMessage> msg = new AMessage(kWhatSeek, this);
+    msg->setInt64("seekTimeUs", seekTimeUs);
+    msg->setInt32("mode", mode);
+
+    // Need to call readBuffer on |mLooper| to ensure the calls to
+    // IMediaSource::read* are serialized. Note that IMediaSource::read*
+    // is called without |mLock| acquired and MediaSource is not thread safe.
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+    if (err == OK && response != NULL) {
+        CHECK(response->findInt32("err", &err));
+    }
+
+    return err;
+}
+
+void NuPlayer2::GenericSource::onSeek(const sp<AMessage>& msg) {
+    int64_t seekTimeUs;
+    int32_t mode;
+    CHECK(msg->findInt64("seekTimeUs", &seekTimeUs));
+    CHECK(msg->findInt32("mode", &mode));
+
+    sp<AMessage> response = new AMessage;
+    status_t err = doSeek(seekTimeUs, (MediaPlayer2SeekMode)mode);
+    response->setInt32("err", err);
+
+    sp<AReplyToken> replyID;
+    CHECK(msg->senderAwaitsResponse(&replyID));
+    response->postReply(replyID);
+}
+
+status_t NuPlayer2::GenericSource::doSeek(int64_t seekTimeUs, MediaPlayer2SeekMode mode) {
+    if (mVideoTrack.mSource != NULL) {
+        ++mVideoDataGeneration;
+
+        int64_t actualTimeUs;
+        readBuffer(MEDIA_TRACK_TYPE_VIDEO, seekTimeUs, mode, &actualTimeUs);
+
+        if (mode != MediaPlayer2SeekMode::SEEK_CLOSEST) {
+            seekTimeUs = actualTimeUs;
+        }
+        mVideoLastDequeueTimeUs = actualTimeUs;
+    }
+
+    if (mAudioTrack.mSource != NULL) {
+        ++mAudioDataGeneration;
+        readBuffer(MEDIA_TRACK_TYPE_AUDIO, seekTimeUs, MediaPlayer2SeekMode::SEEK_CLOSEST);
+        mAudioLastDequeueTimeUs = seekTimeUs;
+    }
+
+    if (mSubtitleTrack.mSource != NULL) {
+        mSubtitleTrack.mPackets->clear();
+        mFetchSubtitleDataGeneration++;
+    }
+
+    if (mTimedTextTrack.mSource != NULL) {
+        mTimedTextTrack.mPackets->clear();
+        mFetchTimedTextDataGeneration++;
+    }
+
+    ++mPollBufferingGeneration;
+    schedulePollBuffering();
+    return OK;
+}
+
+sp<ABuffer> NuPlayer2::GenericSource::mediaBufferToABuffer(
+        MediaBuffer* mb,
+        media_track_type trackType) {
+    bool audio = trackType == MEDIA_TRACK_TYPE_AUDIO;
+    size_t outLength = mb->range_length();
+
+    if (audio && mAudioIsVorbis) {
+        outLength += sizeof(int32_t);
+    }
+
+    sp<ABuffer> ab;
+
+    if (mIsDrmProtected)   {
+        // Modular DRM
+        // Enabled for both video/audio so 1) media buffer is reused without extra copying
+        // 2) meta data can be retrieved in onInputBufferFetched for calling queueSecureInputBuffer.
+
+        // data is already provided in the buffer
+        ab = new ABuffer(NULL, mb->range_length());
+        mb->add_ref();
+        ab->setMediaBufferBase(mb);
+
+        // Modular DRM: Required b/c of the above add_ref.
+        // If ref>0, there must be an observer, or it'll crash at release().
+        // TODO: MediaBuffer might need to be revised to ease such need.
+        mb->setObserver(this);
+        // setMediaBufferBase() interestingly doesn't increment the ref count on its own.
+        // Extra increment (since we want to keep mb alive and attached to ab beyond this function
+        // call. This is to counter the effect of mb->release() towards the end.
+        mb->add_ref();
+
+    } else {
+        ab = new ABuffer(outLength);
+        memcpy(ab->data(),
+               (const uint8_t *)mb->data() + mb->range_offset(),
+               mb->range_length());
+    }
+
+    if (audio && mAudioIsVorbis) {
+        int32_t numPageSamples;
+        if (!mb->meta_data()->findInt32(kKeyValidSamples, &numPageSamples)) {
+            numPageSamples = -1;
+        }
+
+        uint8_t* abEnd = ab->data() + mb->range_length();
+        memcpy(abEnd, &numPageSamples, sizeof(numPageSamples));
+    }
+
+    sp<AMessage> meta = ab->meta();
+
+    int64_t timeUs;
+    CHECK(mb->meta_data()->findInt64(kKeyTime, &timeUs));
+    meta->setInt64("timeUs", timeUs);
+
+    if (trackType == MEDIA_TRACK_TYPE_VIDEO) {
+        int32_t layerId;
+        if (mb->meta_data()->findInt32(kKeyTemporalLayerId, &layerId)) {
+            meta->setInt32("temporal-layer-id", layerId);
+        }
+    }
+
+    if (trackType == MEDIA_TRACK_TYPE_TIMEDTEXT) {
+        const char *mime;
+        CHECK(mTimedTextTrack.mSource != NULL
+                && mTimedTextTrack.mSource->getFormat()->findCString(kKeyMIMEType, &mime));
+        meta->setString("mime", mime);
+    }
+
+    int64_t durationUs;
+    if (mb->meta_data()->findInt64(kKeyDuration, &durationUs)) {
+        meta->setInt64("durationUs", durationUs);
+    }
+
+    if (trackType == MEDIA_TRACK_TYPE_SUBTITLE) {
+        meta->setInt32("trackIndex", mSubtitleTrack.mIndex);
+    }
+
+    uint32_t dataType; // unused
+    const void *seiData;
+    size_t seiLength;
+    if (mb->meta_data()->findData(kKeySEI, &dataType, &seiData, &seiLength)) {
+        sp<ABuffer> sei = ABuffer::CreateAsCopy(seiData, seiLength);;
+        meta->setBuffer("sei", sei);
+    }
+
+    const void *mpegUserDataPointer;
+    size_t mpegUserDataLength;
+    if (mb->meta_data()->findData(
+            kKeyMpegUserData, &dataType, &mpegUserDataPointer, &mpegUserDataLength)) {
+        sp<ABuffer> mpegUserData = ABuffer::CreateAsCopy(mpegUserDataPointer, mpegUserDataLength);
+        meta->setBuffer("mpegUserData", mpegUserData);
+    }
+
+    mb->release();
+    mb = NULL;
+
+    return ab;
+}
+
+int32_t NuPlayer2::GenericSource::getDataGeneration(media_track_type type) const {
+    int32_t generation = -1;
+    switch (type) {
+    case MEDIA_TRACK_TYPE_VIDEO:
+        generation = mVideoDataGeneration;
+        break;
+    case MEDIA_TRACK_TYPE_AUDIO:
+        generation = mAudioDataGeneration;
+        break;
+    case MEDIA_TRACK_TYPE_TIMEDTEXT:
+        generation = mFetchTimedTextDataGeneration;
+        break;
+    case MEDIA_TRACK_TYPE_SUBTITLE:
+        generation = mFetchSubtitleDataGeneration;
+        break;
+    default:
+        break;
+    }
+
+    return generation;
+}
+
+void NuPlayer2::GenericSource::postReadBuffer(media_track_type trackType) {
+    if ((mPendingReadBufferTypes & (1 << trackType)) == 0) {
+        mPendingReadBufferTypes |= (1 << trackType);
+        sp<AMessage> msg = new AMessage(kWhatReadBuffer, this);
+        msg->setInt32("trackType", trackType);
+        msg->post();
+    }
+}
+
+void NuPlayer2::GenericSource::onReadBuffer(const sp<AMessage>& msg) {
+    int32_t tmpType;
+    CHECK(msg->findInt32("trackType", &tmpType));
+    media_track_type trackType = (media_track_type)tmpType;
+    mPendingReadBufferTypes &= ~(1 << trackType);
+    readBuffer(trackType);
+}
+
+void NuPlayer2::GenericSource::readBuffer(
+        media_track_type trackType, int64_t seekTimeUs, MediaPlayer2SeekMode mode,
+        int64_t *actualTimeUs, bool formatChange) {
+    Track *track;
+    size_t maxBuffers = 1;
+    switch (trackType) {
+        case MEDIA_TRACK_TYPE_VIDEO:
+            track = &mVideoTrack;
+            maxBuffers = 8;  // too large of a number may influence seeks
+            break;
+        case MEDIA_TRACK_TYPE_AUDIO:
+            track = &mAudioTrack;
+            maxBuffers = 64;
+            break;
+        case MEDIA_TRACK_TYPE_SUBTITLE:
+            track = &mSubtitleTrack;
+            break;
+        case MEDIA_TRACK_TYPE_TIMEDTEXT:
+            track = &mTimedTextTrack;
+            break;
+        default:
+            TRESPASS();
+    }
+
+    if (track->mSource == NULL) {
+        return;
+    }
+
+    if (actualTimeUs) {
+        *actualTimeUs = seekTimeUs;
+    }
+
+    MediaSource::ReadOptions options;
+
+    bool seeking = false;
+    if (seekTimeUs >= 0) {
+        options.setSeekTo(seekTimeUs, mode);
+        seeking = true;
+    }
+
+    const bool couldReadMultiple = (track->mSource->supportReadMultiple());
+
+    if (couldReadMultiple) {
+        options.setNonBlocking();
+    }
+
+    int32_t generation = getDataGeneration(trackType);
+    for (size_t numBuffers = 0; numBuffers < maxBuffers; ) {
+        Vector<MediaBuffer *> mediaBuffers;
+        status_t err = NO_ERROR;
+
+        sp<IMediaSource> source = track->mSource;
+        mLock.unlock();
+        if (couldReadMultiple) {
+            err = source->readMultiple(
+                    &mediaBuffers, maxBuffers - numBuffers, &options);
+        } else {
+            MediaBuffer *mbuf = NULL;
+            err = source->read(&mbuf, &options);
+            if (err == OK && mbuf != NULL) {
+                mediaBuffers.push_back(mbuf);
+            }
+        }
+        mLock.lock();
+
+        options.clearNonPersistent();
+
+        size_t id = 0;
+        size_t count = mediaBuffers.size();
+
+        // in case track has been changed since we don't have lock for some time.
+        if (generation != getDataGeneration(trackType)) {
+            for (; id < count; ++id) {
+                mediaBuffers[id]->release();
+            }
+            break;
+        }
+
+        for (; id < count; ++id) {
+            int64_t timeUs;
+            MediaBuffer *mbuf = mediaBuffers[id];
+            if (!mbuf->meta_data()->findInt64(kKeyTime, &timeUs)) {
+                mbuf->meta_data()->dumpToLog();
+                track->mPackets->signalEOS(ERROR_MALFORMED);
+                break;
+            }
+            if (trackType == MEDIA_TRACK_TYPE_AUDIO) {
+                mAudioTimeUs = timeUs;
+            } else if (trackType == MEDIA_TRACK_TYPE_VIDEO) {
+                mVideoTimeUs = timeUs;
+            }
+
+            queueDiscontinuityIfNeeded(seeking, formatChange, trackType, track);
+
+            sp<ABuffer> buffer = mediaBufferToABuffer(mbuf, trackType);
+            if (numBuffers == 0 && actualTimeUs != nullptr) {
+                *actualTimeUs = timeUs;
+            }
+            if (seeking && buffer != nullptr) {
+                sp<AMessage> meta = buffer->meta();
+                if (meta != nullptr && mode == MediaPlayer2SeekMode::SEEK_CLOSEST
+                        && seekTimeUs > timeUs) {
+                    sp<AMessage> extra = new AMessage;
+                    extra->setInt64("resume-at-mediaTimeUs", seekTimeUs);
+                    meta->setMessage("extra", extra);
+                }
+            }
+
+            track->mPackets->queueAccessUnit(buffer);
+            formatChange = false;
+            seeking = false;
+            ++numBuffers;
+        }
+        if (id < count) {
+            // Error, some mediaBuffer doesn't have kKeyTime.
+            for (; id < count; ++id) {
+                mediaBuffers[id]->release();
+            }
+            break;
+        }
+
+        if (err == WOULD_BLOCK) {
+            break;
+        } else if (err == INFO_FORMAT_CHANGED) {
+#if 0
+            track->mPackets->queueDiscontinuity(
+                    ATSParser::DISCONTINUITY_FORMATCHANGE,
+                    NULL,
+                    false /* discard */);
+#endif
+        } else if (err != OK) {
+            queueDiscontinuityIfNeeded(seeking, formatChange, trackType, track);
+            track->mPackets->signalEOS(err);
+            break;
+        }
+    }
+
+    if (mIsStreaming
+        && (trackType == MEDIA_TRACK_TYPE_VIDEO || trackType == MEDIA_TRACK_TYPE_AUDIO)) {
+        status_t finalResult;
+        int64_t durationUs = track->mPackets->getBufferedDurationUs(&finalResult);
+
+        // TODO: maxRebufferingMarkMs could be larger than
+        // mBufferingSettings.mResumePlaybackMarkMs
+        int64_t markUs = (mPreparing ? mBufferingSettings.mInitialMarkMs
+            : mBufferingSettings.mResumePlaybackMarkMs) * 1000ll;
+        if (finalResult == ERROR_END_OF_STREAM || durationUs >= markUs) {
+            if (mPreparing || mSentPauseOnBuffering) {
+                Track *counterTrack =
+                    (trackType == MEDIA_TRACK_TYPE_VIDEO ? &mAudioTrack : &mVideoTrack);
+                if (counterTrack->mSource != NULL) {
+                    durationUs = counterTrack->mPackets->getBufferedDurationUs(&finalResult);
+                }
+                if (finalResult == ERROR_END_OF_STREAM || durationUs >= markUs) {
+                    if (mPreparing) {
+                        notifyPrepared();
+                        mPreparing = false;
+                    } else {
+                        sendCacheStats();
+                        mSentPauseOnBuffering = false;
+                        sp<AMessage> notify = dupNotify();
+                        notify->setInt32("what", kWhatResumeOnBufferingEnd);
+                        notify->post();
+                    }
+                }
+            }
+            return;
+        }
+
+        postReadBuffer(trackType);
+    }
+}
+
+void NuPlayer2::GenericSource::queueDiscontinuityIfNeeded(
+        bool seeking, bool formatChange, media_track_type trackType, Track *track) {
+    // formatChange && seeking: track whose source is changed during selection
+    // formatChange && !seeking: track whose source is not changed during selection
+    // !formatChange: normal seek
+    if ((seeking || formatChange)
+            && (trackType == MEDIA_TRACK_TYPE_AUDIO
+            || trackType == MEDIA_TRACK_TYPE_VIDEO)) {
+        ATSParser::DiscontinuityType type = (formatChange && seeking)
+                ? ATSParser::DISCONTINUITY_FORMATCHANGE
+                : ATSParser::DISCONTINUITY_NONE;
+        track->mPackets->queueDiscontinuity(type, NULL /* extra */, true /* discard */);
+    }
+}
+
+void NuPlayer2::GenericSource::notifyBufferingUpdate(int32_t percentage) {
+    // Buffering percent could go backward as it's estimated from remaining
+    // data and last access time. This could cause the buffering position
+    // drawn on media control to jitter slightly. Remember previously reported
+    // percentage and don't allow it to go backward.
+    if (percentage < mPrevBufferPercentage) {
+        percentage = mPrevBufferPercentage;
+    } else if (percentage > 100) {
+        percentage = 100;
+    }
+
+    mPrevBufferPercentage = percentage;
+
+    ALOGV("notifyBufferingUpdate: buffering %d%%", percentage);
+
+    sp<AMessage> notify = dupNotify();
+    notify->setInt32("what", kWhatBufferingUpdate);
+    notify->setInt32("percentage", percentage);
+    notify->post();
+}
+
+void NuPlayer2::GenericSource::schedulePollBuffering() {
+    sp<AMessage> msg = new AMessage(kWhatPollBuffering, this);
+    msg->setInt32("generation", mPollBufferingGeneration);
+    // Enquires buffering status every second.
+    msg->post(1000000ll);
+}
+
+void NuPlayer2::GenericSource::onPollBuffering() {
+    status_t finalStatus = UNKNOWN_ERROR;
+    int64_t cachedDurationUs = -1ll;
+    ssize_t cachedDataRemaining = -1;
+
+    if (mCachedSource != NULL) {
+        cachedDataRemaining = mCachedSource->approxDataRemaining(&finalStatus);
+
+        if (finalStatus == OK) {
+            off64_t size;
+            int64_t bitrate = 0ll;
+            if (mDurationUs > 0 && mCachedSource->getSize(&size) == OK) {
+                // |bitrate| uses bits/second unit, while size is number of bytes.
+                bitrate = size * 8000000ll / mDurationUs;
+            } else if (mBitrate > 0) {
+                bitrate = mBitrate;
+            }
+            if (bitrate > 0) {
+                cachedDurationUs = cachedDataRemaining * 8000000ll / bitrate;
+            }
+        }
+    }
+
+    if (finalStatus != OK) {
+        ALOGV("onPollBuffering: EOS (finalStatus = %d)", finalStatus);
+
+        if (finalStatus == ERROR_END_OF_STREAM) {
+            notifyBufferingUpdate(100);
+        }
+
+        return;
+    }
+
+    if (cachedDurationUs >= 0ll) {
+        if (mDurationUs > 0ll) {
+            int64_t cachedPosUs = getLastReadPosition() + cachedDurationUs;
+            int percentage = 100.0 * cachedPosUs / mDurationUs;
+            if (percentage > 100) {
+                percentage = 100;
+            }
+
+            notifyBufferingUpdate(percentage);
+        }
+
+        ALOGV("onPollBuffering: cachedDurationUs %.1f sec", cachedDurationUs / 1000000.0f);
+    }
+
+    schedulePollBuffering();
+}
+
+// Modular DRM
+status_t NuPlayer2::GenericSource::prepareDrm(
+        const uint8_t uuid[16],
+        const Vector<uint8_t> &drmSessionId,
+        sp<AMediaCryptoWrapper> *outCrypto) {
+    Mutex::Autolock _l(mLock);
+    ALOGV("prepareDrm");
+
+    mIsDrmProtected = false;
+    mIsDrmReleased = false;
+    mIsSecure = false;
+
+    status_t status = OK;
+    sp<AMediaCryptoWrapper> crypto =
+        new AMediaCryptoWrapper(uuid, drmSessionId.array(), drmSessionId.size());
+    if (crypto == NULL) {
+        ALOGE("prepareDrm: failed to create crypto.");
+        return UNKNOWN_ERROR;
+    }
+    ALOGV("prepareDrm: crypto created for uuid: %s",
+            DrmUUID::toHexString(uuid).string());
+
+    *outCrypto = crypto;
+    // as long a there is an active crypto
+    mIsDrmProtected = true;
+
+    if (mMimes.size() == 0) {
+        status = UNKNOWN_ERROR;
+        ALOGE("prepareDrm: Unexpected. Must have at least one track. status: %d", status);
+        return status;
+    }
+
+    // first mime in this list is either the video track, or the first audio track
+    const char *mime = mMimes[0].string();
+    mIsSecure = crypto->requiresSecureDecoderComponent(mime);
+    ALOGV("prepareDrm: requiresSecureDecoderComponent mime: %s  isSecure: %d",
+            mime, mIsSecure);
+
+    // Checking the member flags while in the looper to send out the notification.
+    // The legacy mDecryptHandle!=NULL check (for FLAG_PROTECTED) is equivalent to mIsDrmProtected.
+    notifyFlagsChanged(
+            (mIsSecure ? FLAG_SECURE : 0) |
+            // Setting "protected screen" only for L1: b/38390836
+            (mIsSecure ? FLAG_PROTECTED : 0) |
+            FLAG_CAN_PAUSE |
+            FLAG_CAN_SEEK_BACKWARD |
+            FLAG_CAN_SEEK_FORWARD |
+            FLAG_CAN_SEEK);
+
+    if (status == OK) {
+        ALOGV("prepareDrm: mCrypto: %p", outCrypto->get());
+        ALOGD("prepareDrm ret: %d ", status);
+    } else {
+        ALOGE("prepareDrm err: %d", status);
+    }
+    return status;
+}
+
+status_t NuPlayer2::GenericSource::releaseDrm() {
+    Mutex::Autolock _l(mLock);
+    ALOGV("releaseDrm");
+
+    if (mIsDrmProtected) {
+        mIsDrmProtected = false;
+        // to prevent returning any more buffer after stop/releaseDrm (b/37960096)
+        mIsDrmReleased = true;
+        ALOGV("releaseDrm: mIsDrmProtected is reset.");
+    } else {
+        ALOGE("releaseDrm: mIsDrmProtected is already false.");
+    }
+
+    return OK;
+}
+
+status_t NuPlayer2::GenericSource::checkDrmInfo()
+{
+    // clearing the flag at prepare in case the player is reused after stop/releaseDrm with the
+    // same source without being reset (called by prepareAsync/initFromDataSource)
+    mIsDrmReleased = false;
+
+    if (mFileMeta == NULL) {
+        ALOGI("checkDrmInfo: No metadata");
+        return OK; // letting the caller responds accordingly
+    }
+
+    uint32_t type;
+    const void *pssh;
+    size_t psshsize;
+
+    if (!mFileMeta->findData(kKeyPssh, &type, &pssh, &psshsize)) {
+        ALOGV("checkDrmInfo: No PSSH");
+        return OK; // source without DRM info
+    }
+
+    Parcel parcel;
+    NuPlayer2Drm::retrieveDrmInfo(pssh, psshsize, &parcel);
+    ALOGV("checkDrmInfo: MEDIA2_DRM_INFO PSSH size: %d  Parcel size: %d  objects#: %d",
+          (int)psshsize, (int)parcel.dataSize(), (int)parcel.objectsCount());
+
+    if (parcel.dataSize() == 0) {
+        ALOGE("checkDrmInfo: Unexpected parcel size: 0");
+        return UNKNOWN_ERROR;
+    }
+
+    // Can't pass parcel as a message to the player. Converting Parcel->ABuffer to pass it
+    // to the Player's onSourceNotify then back to Parcel for calling driver's notifyListener.
+    sp<ABuffer> drmInfoBuffer = ABuffer::CreateAsCopy(parcel.data(), parcel.dataSize());
+    notifyDrmInfo(drmInfoBuffer);
+
+    return OK;
+}
+
+void NuPlayer2::GenericSource::signalBufferReturned(MediaBuffer *buffer)
+{
+    //ALOGV("signalBufferReturned %p  refCount: %d", buffer, buffer->localRefcount());
+
+    buffer->setObserver(NULL);
+    buffer->release(); // this leads to delete since that there is no observor
+}
+
+}  // namespace android
diff --git a/media/libmedia/nuplayer2/GenericSource.h b/media/libmedia/nuplayer2/GenericSource.h
new file mode 100644
index 0000000..aedd8d9
--- /dev/null
+++ b/media/libmedia/nuplayer2/GenericSource.h
@@ -0,0 +1,249 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef GENERIC_SOURCE_H_
+
+#define GENERIC_SOURCE_H_
+
+#include "NuPlayer2.h"
+#include "NuPlayer2Source.h"
+
+#include "ATSParser.h"
+
+#include <media/mediaplayer2.h>
+#include <media/stagefright/MediaBuffer.h>
+
+namespace android {
+
+class DecryptHandle;
+struct AnotherPacketSource;
+struct ARTSPController;
+class DataSource;
+class IDataSource;
+struct MediaHTTPService;
+struct MediaSource;
+class MediaBuffer;
+struct MediaClock;
+struct NuCachedSource2;
+
+struct NuPlayer2::GenericSource : public NuPlayer2::Source,
+                                 public MediaBufferObserver // Modular DRM
+{
+    GenericSource(const sp<AMessage> &notify, bool uidValid, uid_t uid,
+                  const sp<MediaClock> &mediaClock);
+
+    status_t setDataSource(
+            const sp<MediaHTTPService> &httpService,
+            const char *url,
+            const KeyedVector<String8, String8> *headers);
+
+    status_t setDataSource(int fd, int64_t offset, int64_t length);
+
+    status_t setDataSource(const sp<DataSource>& dataSource);
+
+    virtual status_t getBufferingSettings(
+            BufferingSettings* buffering /* nonnull */) override;
+    virtual status_t setBufferingSettings(const BufferingSettings& buffering) override;
+
+    virtual void prepareAsync();
+
+    virtual void start();
+    virtual void stop();
+    virtual void pause();
+    virtual void resume();
+
+    virtual void disconnect();
+
+    virtual status_t feedMoreTSData();
+
+    virtual sp<MetaData> getFileFormatMeta() const;
+
+    virtual status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit);
+
+    virtual status_t getDuration(int64_t *durationUs);
+    virtual size_t getTrackCount() const;
+    virtual sp<AMessage> getTrackInfo(size_t trackIndex) const;
+    virtual ssize_t getSelectedTrack(media_track_type type) const;
+    virtual status_t selectTrack(size_t trackIndex, bool select, int64_t timeUs);
+    virtual status_t seekTo(
+        int64_t seekTimeUs,
+        MediaPlayer2SeekMode mode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC) override;
+
+    virtual status_t setBuffers(bool audio, Vector<MediaBuffer *> &buffers);
+
+    virtual bool isStreaming() const;
+
+    // Modular DRM
+    virtual void signalBufferReturned(MediaBuffer *buffer);
+
+    virtual status_t prepareDrm(
+            const uint8_t uuid[16],
+            const Vector<uint8_t> &drmSessionId,
+            sp<AMediaCryptoWrapper> *outCrypto);
+
+    virtual status_t releaseDrm();
+
+
+protected:
+    virtual ~GenericSource();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+    virtual sp<MetaData> getFormatMeta(bool audio);
+
+private:
+    enum {
+        kWhatPrepareAsync,
+        kWhatFetchSubtitleData,
+        kWhatFetchTimedTextData,
+        kWhatSendSubtitleData,
+        kWhatSendGlobalTimedTextData,
+        kWhatSendTimedTextData,
+        kWhatChangeAVSource,
+        kWhatPollBuffering,
+        kWhatSeek,
+        kWhatReadBuffer,
+        kWhatStart,
+        kWhatResume,
+        kWhatSecureDecodersInstantiated,
+    };
+
+    struct Track {
+        size_t mIndex;
+        sp<IMediaSource> mSource;
+        sp<AnotherPacketSource> mPackets;
+    };
+
+    Vector<sp<IMediaSource> > mSources;
+    Track mAudioTrack;
+    int64_t mAudioTimeUs;
+    int64_t mAudioLastDequeueTimeUs;
+    Track mVideoTrack;
+    int64_t mVideoTimeUs;
+    int64_t mVideoLastDequeueTimeUs;
+    Track mSubtitleTrack;
+    Track mTimedTextTrack;
+
+    BufferingSettings mBufferingSettings;
+    int32_t mPrevBufferPercentage;
+    int32_t mPollBufferingGeneration;
+    bool mSentPauseOnBuffering;
+
+    int32_t mAudioDataGeneration;
+    int32_t mVideoDataGeneration;
+    int32_t mFetchSubtitleDataGeneration;
+    int32_t mFetchTimedTextDataGeneration;
+    int64_t mDurationUs;
+    bool mAudioIsVorbis;
+    // Secure codec is required.
+    bool mIsSecure;
+    bool mIsStreaming;
+    bool mUIDValid;
+    uid_t mUID;
+    const sp<MediaClock> mMediaClock;
+    sp<MediaHTTPService> mHTTPService;
+    AString mUri;
+    KeyedVector<String8, String8> mUriHeaders;
+    int mFd;
+    int64_t mOffset;
+    int64_t mLength;
+
+    bool mDisconnected;
+    sp<DataSource> mDataSource;
+    sp<NuCachedSource2> mCachedSource;
+    sp<DataSource> mHttpSource;
+    sp<MetaData> mFileMeta;
+    bool mStarted;
+    bool mPreparing;
+    int64_t mBitrate;
+    uint32_t mPendingReadBufferTypes;
+    sp<ABuffer> mGlobalTimedText;
+
+    mutable Mutex mLock;
+
+    sp<ALooper> mLooper;
+
+    void resetDataSource();
+
+    status_t initFromDataSource();
+    int64_t getLastReadPosition();
+
+    void notifyPreparedAndCleanup(status_t err);
+    void onSecureDecodersInstantiated(status_t err);
+    void finishPrepareAsync();
+    status_t startSources();
+
+    void onSeek(const sp<AMessage>& msg);
+    status_t doSeek(int64_t seekTimeUs, MediaPlayer2SeekMode mode);
+
+    void onPrepareAsync();
+
+    void fetchTextData(
+            uint32_t what, media_track_type type,
+            int32_t curGen, const sp<AnotherPacketSource>& packets, const sp<AMessage>& msg);
+
+    void sendGlobalTextData(
+            uint32_t what,
+            int32_t curGen, sp<AMessage> msg);
+
+    void sendTextData(
+            uint32_t what, media_track_type type,
+            int32_t curGen, const sp<AnotherPacketSource>& packets, const sp<AMessage>& msg);
+
+    sp<ABuffer> mediaBufferToABuffer(
+            MediaBuffer *mbuf,
+            media_track_type trackType);
+
+    void postReadBuffer(media_track_type trackType);
+    void onReadBuffer(const sp<AMessage>& msg);
+    // When |mode| is MediaPlayer2SeekMode::SEEK_CLOSEST, the buffer read shall
+    // include an item indicating skipping rendering all buffers with timestamp
+    // earlier than |seekTimeUs|.
+    // For other modes, the buffer read will not include the item as above in order
+    // to facilitate fast seek operation.
+    void readBuffer(
+            media_track_type trackType,
+            int64_t seekTimeUs = -1ll,
+            MediaPlayer2SeekMode mode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC,
+            int64_t *actualTimeUs = NULL, bool formatChange = false);
+
+    void queueDiscontinuityIfNeeded(
+            bool seeking, bool formatChange, media_track_type trackType, Track *track);
+
+    void schedulePollBuffering();
+    void onPollBuffering();
+    void notifyBufferingUpdate(int32_t percentage);
+
+    void sendCacheStats();
+
+    sp<MetaData> getFormatMeta_l(bool audio);
+    int32_t getDataGeneration(media_track_type type) const;
+
+    // Modular DRM
+    // The source is DRM protected and is prepared for DRM.
+    bool mIsDrmProtected;
+    // releaseDrm has been processed.
+    bool mIsDrmReleased;
+    Vector<String8> mMimes;
+
+    status_t checkDrmInfo();
+
+    DISALLOW_EVIL_CONSTRUCTORS(GenericSource);
+};
+
+}  // namespace android
+
+#endif  // GENERIC_SOURCE_H_
diff --git a/media/libmedia/nuplayer2/HTTPLiveSource.cpp b/media/libmedia/nuplayer2/HTTPLiveSource.cpp
new file mode 100644
index 0000000..14b67cad
--- /dev/null
+++ b/media/libmedia/nuplayer2/HTTPLiveSource.cpp
@@ -0,0 +1,448 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "HTTPLiveSource"
+#include <utils/Log.h>
+
+#include "HTTPLiveSource.h"
+
+#include "AnotherPacketSource.h"
+#include "LiveDataSource.h"
+
+#include <media/MediaHTTPService.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/Utils.h>
+
+// default buffer prepare/ready/underflow marks
+static const int kReadyMarkMs     = 5000;  // 5 seconds
+static const int kPrepareMarkMs   = 1500;  // 1.5 seconds
+
+namespace android {
+
+NuPlayer2::HTTPLiveSource::HTTPLiveSource(
+        const sp<AMessage> &notify,
+        const sp<MediaHTTPService> &httpService,
+        const char *url,
+        const KeyedVector<String8, String8> *headers)
+    : Source(notify),
+      mHTTPService(httpService),
+      mURL(url),
+      mFlags(0),
+      mFinalResult(OK),
+      mOffset(0),
+      mFetchSubtitleDataGeneration(0),
+      mFetchMetaDataGeneration(0),
+      mHasMetadata(false),
+      mMetadataSelected(false) {
+    mBufferingSettings.mInitialMarkMs = kPrepareMarkMs;
+    mBufferingSettings.mResumePlaybackMarkMs = kReadyMarkMs;
+    if (headers) {
+        mExtraHeaders = *headers;
+
+        ssize_t index =
+            mExtraHeaders.indexOfKey(String8("x-hide-urls-from-log"));
+
+        if (index >= 0) {
+            mFlags |= kFlagIncognito;
+
+            mExtraHeaders.removeItemsAt(index);
+        }
+    }
+}
+
+NuPlayer2::HTTPLiveSource::~HTTPLiveSource() {
+    if (mLiveSession != NULL) {
+        mLiveSession->disconnect();
+
+        mLiveLooper->unregisterHandler(mLiveSession->id());
+        mLiveLooper->unregisterHandler(id());
+        mLiveLooper->stop();
+
+        mLiveSession.clear();
+        mLiveLooper.clear();
+    }
+}
+
+status_t NuPlayer2::HTTPLiveSource::getBufferingSettings(
+            BufferingSettings* buffering /* nonnull */) {
+    *buffering = mBufferingSettings;
+
+    return OK;
+}
+
+status_t NuPlayer2::HTTPLiveSource::setBufferingSettings(const BufferingSettings& buffering) {
+    mBufferingSettings = buffering;
+
+    if (mLiveSession != NULL) {
+        mLiveSession->setBufferingSettings(mBufferingSettings);
+    }
+
+    return OK;
+}
+
+void NuPlayer2::HTTPLiveSource::prepareAsync() {
+    if (mLiveLooper == NULL) {
+        mLiveLooper = new ALooper;
+        mLiveLooper->setName("http live");
+        mLiveLooper->start();
+
+        mLiveLooper->registerHandler(this);
+    }
+
+    sp<AMessage> notify = new AMessage(kWhatSessionNotify, this);
+
+    mLiveSession = new LiveSession(
+            notify,
+            (mFlags & kFlagIncognito) ? LiveSession::kFlagIncognito : 0,
+            mHTTPService);
+
+    mLiveLooper->registerHandler(mLiveSession);
+
+    mLiveSession->setBufferingSettings(mBufferingSettings);
+    mLiveSession->connectAsync(
+            mURL.c_str(), mExtraHeaders.isEmpty() ? NULL : &mExtraHeaders);
+}
+
+void NuPlayer2::HTTPLiveSource::start() {
+}
+
+sp<MetaData> NuPlayer2::HTTPLiveSource::getFormatMeta(bool audio) {
+    sp<MetaData> meta;
+    if (mLiveSession != NULL) {
+        mLiveSession->getStreamFormatMeta(
+                audio ? LiveSession::STREAMTYPE_AUDIO
+                      : LiveSession::STREAMTYPE_VIDEO,
+                &meta);
+    }
+
+    return meta;
+}
+
+sp<AMessage> NuPlayer2::HTTPLiveSource::getFormat(bool audio) {
+    sp<MetaData> meta;
+    status_t err = -EWOULDBLOCK;
+    if (mLiveSession != NULL) {
+        err = mLiveSession->getStreamFormatMeta(
+                audio ? LiveSession::STREAMTYPE_AUDIO
+                      : LiveSession::STREAMTYPE_VIDEO,
+                &meta);
+    }
+
+    sp<AMessage> format;
+    if (err == -EWOULDBLOCK) {
+        format = new AMessage();
+        format->setInt32("err", err);
+        return format;
+    }
+
+    if (err != OK || convertMetaDataToMessage(meta, &format) != OK) {
+        return NULL;
+    }
+    return format;
+}
+
+status_t NuPlayer2::HTTPLiveSource::feedMoreTSData() {
+    return OK;
+}
+
+status_t NuPlayer2::HTTPLiveSource::dequeueAccessUnit(
+        bool audio, sp<ABuffer> *accessUnit) {
+    return mLiveSession->dequeueAccessUnit(
+            audio ? LiveSession::STREAMTYPE_AUDIO
+                  : LiveSession::STREAMTYPE_VIDEO,
+            accessUnit);
+}
+
+status_t NuPlayer2::HTTPLiveSource::getDuration(int64_t *durationUs) {
+    return mLiveSession->getDuration(durationUs);
+}
+
+size_t NuPlayer2::HTTPLiveSource::getTrackCount() const {
+    return mLiveSession->getTrackCount();
+}
+
+sp<AMessage> NuPlayer2::HTTPLiveSource::getTrackInfo(size_t trackIndex) const {
+    return mLiveSession->getTrackInfo(trackIndex);
+}
+
+ssize_t NuPlayer2::HTTPLiveSource::getSelectedTrack(media_track_type type) const {
+    if (mLiveSession == NULL) {
+        return -1;
+    } else if (type == MEDIA_TRACK_TYPE_METADATA) {
+        // MEDIA_TRACK_TYPE_METADATA is always last track
+        // mMetadataSelected can only be true when mHasMetadata is true
+        return mMetadataSelected ? (mLiveSession->getTrackCount() - 1) : -1;
+    } else {
+        return mLiveSession->getSelectedTrack(type);
+    }
+}
+
+status_t NuPlayer2::HTTPLiveSource::selectTrack(size_t trackIndex, bool select, int64_t /*timeUs*/) {
+    if (mLiveSession == NULL) {
+        return INVALID_OPERATION;
+    }
+
+    status_t err = INVALID_OPERATION;
+    bool postFetchMsg = false, isSub = false;
+    if (!mHasMetadata || trackIndex != mLiveSession->getTrackCount() - 1) {
+        err = mLiveSession->selectTrack(trackIndex, select);
+        postFetchMsg = select;
+        isSub = true;
+    } else {
+        // metadata track; i.e. (mHasMetadata && trackIndex == mLiveSession->getTrackCount() - 1)
+        if (mMetadataSelected && !select) {
+            err = OK;
+        } else if (!mMetadataSelected && select) {
+            postFetchMsg = true;
+            err = OK;
+        } else {
+            err = BAD_VALUE; // behave as LiveSession::selectTrack
+        }
+
+        mMetadataSelected = select;
+    }
+
+    if (err == OK) {
+        int32_t &generation = isSub ? mFetchSubtitleDataGeneration : mFetchMetaDataGeneration;
+        generation++;
+        if (postFetchMsg) {
+            int32_t what = isSub ? kWhatFetchSubtitleData : kWhatFetchMetaData;
+            sp<AMessage> msg = new AMessage(what, this);
+            msg->setInt32("generation", generation);
+            msg->post();
+        }
+    }
+
+    // LiveSession::selectTrack returns BAD_VALUE when selecting the currently
+    // selected track, or unselecting a non-selected track. In this case it's an
+    // no-op so we return OK.
+    return (err == OK || err == BAD_VALUE) ? (status_t)OK : err;
+}
+
+status_t NuPlayer2::HTTPLiveSource::seekTo(int64_t seekTimeUs, MediaPlayer2SeekMode mode) {
+    if (mLiveSession->isSeekable()) {
+        return mLiveSession->seekTo(seekTimeUs, mode);
+    } else {
+        return INVALID_OPERATION;
+    }
+}
+
+void NuPlayer2::HTTPLiveSource::pollForRawData(
+        const sp<AMessage> &msg, int32_t currentGeneration,
+        LiveSession::StreamType fetchType, int32_t pushWhat) {
+
+    int32_t generation;
+    CHECK(msg->findInt32("generation", &generation));
+
+    if (generation != currentGeneration) {
+        return;
+    }
+
+    sp<ABuffer> buffer;
+    while (mLiveSession->dequeueAccessUnit(fetchType, &buffer) == OK) {
+
+        sp<AMessage> notify = dupNotify();
+        notify->setInt32("what", pushWhat);
+        notify->setBuffer("buffer", buffer);
+
+        int64_t timeUs, baseUs, delayUs;
+        CHECK(buffer->meta()->findInt64("baseUs", &baseUs));
+        CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+        delayUs = baseUs + timeUs - ALooper::GetNowUs();
+
+        if (fetchType == LiveSession::STREAMTYPE_SUBTITLES) {
+            notify->post();
+            msg->post(delayUs > 0ll ? delayUs : 0ll);
+            return;
+        } else if (fetchType == LiveSession::STREAMTYPE_METADATA) {
+            if (delayUs < -1000000ll) { // 1 second
+                continue;
+            }
+            notify->post();
+            // push all currently available metadata buffers in each invocation of pollForRawData
+            // continue;
+        } else {
+            TRESPASS();
+        }
+    }
+
+    // try again in 1 second
+    msg->post(1000000ll);
+}
+
+void NuPlayer2::HTTPLiveSource::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatSessionNotify:
+        {
+            onSessionNotify(msg);
+            break;
+        }
+
+        case kWhatFetchSubtitleData:
+        {
+            pollForRawData(
+                    msg, mFetchSubtitleDataGeneration,
+                    /* fetch */ LiveSession::STREAMTYPE_SUBTITLES,
+                    /* push */ kWhatSubtitleData);
+
+            break;
+        }
+
+        case kWhatFetchMetaData:
+        {
+            if (!mMetadataSelected) {
+                break;
+            }
+
+            pollForRawData(
+                    msg, mFetchMetaDataGeneration,
+                    /* fetch */ LiveSession::STREAMTYPE_METADATA,
+                    /* push */ kWhatTimedMetaData);
+
+            break;
+        }
+
+        default:
+            Source::onMessageReceived(msg);
+            break;
+    }
+}
+
+void NuPlayer2::HTTPLiveSource::onSessionNotify(const sp<AMessage> &msg) {
+    int32_t what;
+    CHECK(msg->findInt32("what", &what));
+
+    switch (what) {
+        case LiveSession::kWhatPrepared:
+        {
+            // notify the current size here if we have it, otherwise report an initial size of (0,0)
+            sp<AMessage> format = getFormat(false /* audio */);
+            int32_t width;
+            int32_t height;
+            if (format != NULL &&
+                    format->findInt32("width", &width) && format->findInt32("height", &height)) {
+                notifyVideoSizeChanged(format);
+            } else {
+                notifyVideoSizeChanged();
+            }
+
+            uint32_t flags = 0;
+            if (mLiveSession->isSeekable()) {
+                flags |= FLAG_CAN_PAUSE;
+                flags |= FLAG_CAN_SEEK;
+                flags |= FLAG_CAN_SEEK_BACKWARD;
+                flags |= FLAG_CAN_SEEK_FORWARD;
+            }
+
+            if (mLiveSession->hasDynamicDuration()) {
+                flags |= FLAG_DYNAMIC_DURATION;
+            }
+
+            notifyFlagsChanged(flags);
+
+            notifyPrepared();
+            break;
+        }
+
+        case LiveSession::kWhatPreparationFailed:
+        {
+            status_t err;
+            CHECK(msg->findInt32("err", &err));
+
+            notifyPrepared(err);
+            break;
+        }
+
+        case LiveSession::kWhatStreamsChanged:
+        {
+            uint32_t changedMask;
+            CHECK(msg->findInt32(
+                        "changedMask", (int32_t *)&changedMask));
+
+            bool audio = changedMask & LiveSession::STREAMTYPE_AUDIO;
+            bool video = changedMask & LiveSession::STREAMTYPE_VIDEO;
+
+            sp<AMessage> reply;
+            CHECK(msg->findMessage("reply", &reply));
+
+            sp<AMessage> notify = dupNotify();
+            notify->setInt32("what", kWhatQueueDecoderShutdown);
+            notify->setInt32("audio", audio);
+            notify->setInt32("video", video);
+            notify->setMessage("reply", reply);
+            notify->post();
+            break;
+        }
+
+        case LiveSession::kWhatBufferingStart:
+        {
+            sp<AMessage> notify = dupNotify();
+            notify->setInt32("what", kWhatPauseOnBufferingStart);
+            notify->post();
+            break;
+        }
+
+        case LiveSession::kWhatBufferingEnd:
+        {
+            sp<AMessage> notify = dupNotify();
+            notify->setInt32("what", kWhatResumeOnBufferingEnd);
+            notify->post();
+            break;
+        }
+
+
+        case LiveSession::kWhatBufferingUpdate:
+        {
+            sp<AMessage> notify = dupNotify();
+            int32_t percentage;
+            CHECK(msg->findInt32("percentage", &percentage));
+            notify->setInt32("what", kWhatBufferingUpdate);
+            notify->setInt32("percentage", percentage);
+            notify->post();
+            break;
+        }
+
+        case LiveSession::kWhatMetadataDetected:
+        {
+            if (!mHasMetadata) {
+                mHasMetadata = true;
+
+                sp<AMessage> notify = dupNotify();
+                // notification without buffer triggers MEDIA2_INFO_METADATA_UPDATE
+                notify->setInt32("what", kWhatTimedMetaData);
+                notify->post();
+            }
+            break;
+        }
+
+        case LiveSession::kWhatError:
+        {
+            break;
+        }
+
+        default:
+            TRESPASS();
+    }
+}
+
+}  // namespace android
+
diff --git a/media/libmedia/nuplayer2/HTTPLiveSource.h b/media/libmedia/nuplayer2/HTTPLiveSource.h
new file mode 100644
index 0000000..7b6a312
--- /dev/null
+++ b/media/libmedia/nuplayer2/HTTPLiveSource.h
@@ -0,0 +1,99 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef HTTP_LIVE_SOURCE_H_
+
+#define HTTP_LIVE_SOURCE_H_
+
+#include "NuPlayer2.h"
+#include "NuPlayer2Source.h"
+
+#include "LiveSession.h"
+
+namespace android {
+
+struct LiveSession;
+
+struct NuPlayer2::HTTPLiveSource : public NuPlayer2::Source {
+    HTTPLiveSource(
+            const sp<AMessage> &notify,
+            const sp<MediaHTTPService> &httpService,
+            const char *url,
+            const KeyedVector<String8, String8> *headers);
+
+    virtual status_t getBufferingSettings(
+            BufferingSettings* buffering /* nonnull */) override;
+    virtual status_t setBufferingSettings(const BufferingSettings& buffering) override;
+
+    virtual void prepareAsync();
+    virtual void start();
+
+    virtual status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit);
+    virtual sp<MetaData> getFormatMeta(bool audio);
+    virtual sp<AMessage> getFormat(bool audio);
+
+    virtual status_t feedMoreTSData();
+    virtual status_t getDuration(int64_t *durationUs);
+    virtual size_t getTrackCount() const;
+    virtual sp<AMessage> getTrackInfo(size_t trackIndex) const;
+    virtual ssize_t getSelectedTrack(media_track_type /* type */) const;
+    virtual status_t selectTrack(size_t trackIndex, bool select, int64_t timeUs);
+    virtual status_t seekTo(
+            int64_t seekTimeUs,
+            MediaPlayer2SeekMode mode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC) override;
+
+protected:
+    virtual ~HTTPLiveSource();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+private:
+    enum Flags {
+        // Don't log any URLs.
+        kFlagIncognito = 1,
+    };
+
+    enum {
+        kWhatSessionNotify,
+        kWhatFetchSubtitleData,
+        kWhatFetchMetaData,
+    };
+
+    sp<MediaHTTPService> mHTTPService;
+    AString mURL;
+    KeyedVector<String8, String8> mExtraHeaders;
+    uint32_t mFlags;
+    status_t mFinalResult;
+    off64_t mOffset;
+    sp<ALooper> mLiveLooper;
+    sp<LiveSession> mLiveSession;
+    int32_t mFetchSubtitleDataGeneration;
+    int32_t mFetchMetaDataGeneration;
+    bool mHasMetadata;
+    bool mMetadataSelected;
+    BufferingSettings mBufferingSettings;
+
+    void onSessionNotify(const sp<AMessage> &msg);
+    void pollForRawData(
+            const sp<AMessage> &msg, int32_t currentGeneration,
+            LiveSession::StreamType fetchType, int32_t pushWhat);
+
+    DISALLOW_EVIL_CONSTRUCTORS(HTTPLiveSource);
+};
+
+}  // namespace android
+
+#endif  // HTTP_LIVE_SOURCE_H_
diff --git a/media/libmedia/nuplayer2/JWakeLock.cpp b/media/libmedia/nuplayer2/JWakeLock.cpp
new file mode 100644
index 0000000..c9a1071
--- /dev/null
+++ b/media/libmedia/nuplayer2/JWakeLock.cpp
@@ -0,0 +1,110 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "JWakeLock"
+#include <utils/Log.h>
+
+#include "JWakeLock.h"
+
+#include <binder/IPCThreadState.h>
+#include <binder/IServiceManager.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <powermanager/PowerManager.h>
+
+
+namespace android {
+
+//TODO: use JAVA PowerManager, instead of binder
+JWakeLock::JWakeLock() :
+    mPowerManager(NULL),
+    mWakeLockToken(NULL),
+    mWakeLockCount(0),
+    mDeathRecipient(new PMDeathRecipient(this)) {}
+
+JWakeLock::~JWakeLock() {
+    if (mPowerManager != NULL) {
+        sp<IBinder> binder = IInterface::asBinder(mPowerManager);
+        binder->unlinkToDeath(mDeathRecipient);
+    }
+    clearPowerManager();
+}
+
+bool JWakeLock::acquire() {
+    if (mWakeLockCount == 0) {
+        CHECK(mWakeLockToken == NULL);
+        if (mPowerManager == NULL) {
+            // use checkService() to avoid blocking if power service is not up yet
+            sp<IBinder> binder =
+                defaultServiceManager()->checkService(String16("power"));
+            if (binder == NULL) {
+                ALOGW("could not get the power manager service");
+            } else {
+                mPowerManager = interface_cast<IPowerManager>(binder);
+                binder->linkToDeath(mDeathRecipient);
+            }
+        }
+        if (mPowerManager != NULL) {
+            sp<IBinder> binder = new BBinder();
+            int64_t token = IPCThreadState::self()->clearCallingIdentity();
+            status_t status = mPowerManager->acquireWakeLock(
+                    POWERMANAGER_PARTIAL_WAKE_LOCK,
+                    binder, String16("JWakeLock"), String16("media"));
+            IPCThreadState::self()->restoreCallingIdentity(token);
+            if (status == NO_ERROR) {
+                mWakeLockToken = binder;
+                mWakeLockCount++;
+                return true;
+            }
+        }
+    } else {
+        mWakeLockCount++;
+        return true;
+    }
+    return false;
+}
+
+void JWakeLock::release(bool force) {
+    if (mWakeLockCount == 0) {
+        return;
+    }
+    if (force) {
+        // Force wakelock release below by setting reference count to 1.
+        mWakeLockCount = 1;
+    }
+    if (--mWakeLockCount == 0) {
+        CHECK(mWakeLockToken != NULL);
+        if (mPowerManager != NULL) {
+            int64_t token = IPCThreadState::self()->clearCallingIdentity();
+            mPowerManager->releaseWakeLock(mWakeLockToken, 0 /* flags */);
+            IPCThreadState::self()->restoreCallingIdentity(token);
+        }
+        mWakeLockToken.clear();
+    }
+}
+
+void JWakeLock::clearPowerManager() {
+    release(true);
+    mPowerManager.clear();
+}
+
+void JWakeLock::PMDeathRecipient::binderDied(const wp<IBinder>& who __unused) {
+    if (mWakeLock != NULL) {
+        mWakeLock->clearPowerManager();
+    }
+}
+
+}  // namespace android
diff --git a/media/libmedia/nuplayer2/JWakeLock.h b/media/libmedia/nuplayer2/JWakeLock.h
new file mode 100644
index 0000000..eace87e
--- /dev/null
+++ b/media/libmedia/nuplayer2/JWakeLock.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef J_WAKELOCK_H_
+#define J_WAKELOCK_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <powermanager/IPowerManager.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+class JWakeLock : public RefBase {
+
+public:
+    JWakeLock();
+
+    // NOTE: acquire and release are not thread safe
+
+    // returns true if wakelock was acquired
+    bool acquire();
+    void release(bool force = false);
+
+    virtual ~JWakeLock();
+
+private:
+    sp<IPowerManager> mPowerManager;
+    sp<IBinder>       mWakeLockToken;
+    uint32_t          mWakeLockCount;
+
+    class PMDeathRecipient : public IBinder::DeathRecipient {
+    public:
+        explicit PMDeathRecipient(JWakeLock *wakeLock) : mWakeLock(wakeLock) {}
+        virtual ~PMDeathRecipient() {}
+
+        // IBinder::DeathRecipient
+        virtual void binderDied(const wp<IBinder> &who);
+
+    private:
+        PMDeathRecipient(const PMDeathRecipient&);
+        PMDeathRecipient& operator= (const PMDeathRecipient&);
+
+        JWakeLock *mWakeLock;
+    };
+
+    const sp<PMDeathRecipient> mDeathRecipient;
+
+    void clearPowerManager();
+
+    DISALLOW_EVIL_CONSTRUCTORS(JWakeLock);
+};
+
+}  // namespace android
+
+#endif  // J_WAKELOCK_H_
diff --git a/media/libmedia/nuplayer2/MODULE_LICENSE_APACHE2 b/media/libmedia/nuplayer2/MODULE_LICENSE_APACHE2
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/media/libmedia/nuplayer2/MODULE_LICENSE_APACHE2
diff --git a/media/libmedia/nuplayer2/NOTICE b/media/libmedia/nuplayer2/NOTICE
new file mode 100644
index 0000000..c5b1efa
--- /dev/null
+++ b/media/libmedia/nuplayer2/NOTICE
@@ -0,0 +1,190 @@
+
+   Copyright (c) 2005-2008, The Android Open Source Project
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
diff --git a/media/libmedia/nuplayer2/NdkWrapper.cpp b/media/libmedia/nuplayer2/NdkWrapper.cpp
new file mode 100644
index 0000000..3c55c56
--- /dev/null
+++ b/media/libmedia/nuplayer2/NdkWrapper.cpp
@@ -0,0 +1,996 @@
+/*
+ * Copyright 2017, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NdkWrapper"
+
+#include "NdkWrapper.h"
+
+#include <gui/Surface.h>
+#include <log/log.h>
+#include <media/NdkMediaCodec.h>
+#include <media/NdkMediaCrypto.h>
+#include <media/NdkMediaDrm.h>
+#include <media/NdkMediaFormat.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <utils/Errors.h>
+
+namespace android {
+
+static const size_t kAESBlockSize = 16;  // AES_BLOCK_SIZE
+
+static const char *AMediaFormatKeyGroupInt32[] = {
+    AMEDIAFORMAT_KEY_AAC_DRC_ATTENUATION_FACTOR,
+    AMEDIAFORMAT_KEY_AAC_DRC_BOOST_FACTOR,
+    AMEDIAFORMAT_KEY_AAC_DRC_HEAVY_COMPRESSION,
+    AMEDIAFORMAT_KEY_AAC_DRC_TARGET_REFERENCE_LEVEL,
+    AMEDIAFORMAT_KEY_AAC_ENCODED_TARGET_LEVEL,
+    AMEDIAFORMAT_KEY_AAC_MAX_OUTPUT_CHANNEL_COUNT,
+    AMEDIAFORMAT_KEY_AAC_PROFILE,
+    AMEDIAFORMAT_KEY_AAC_SBR_MODE,
+    AMEDIAFORMAT_KEY_AUDIO_SESSION_ID,
+    AMEDIAFORMAT_KEY_BITRATE_MODE,
+    AMEDIAFORMAT_KEY_BIT_RATE,
+    AMEDIAFORMAT_KEY_CAPTURE_RATE,
+    AMEDIAFORMAT_KEY_CHANNEL_COUNT,
+    AMEDIAFORMAT_KEY_CHANNEL_MASK,
+    AMEDIAFORMAT_KEY_COLOR_FORMAT,
+    AMEDIAFORMAT_KEY_COLOR_RANGE,
+    AMEDIAFORMAT_KEY_COLOR_STANDARD,
+    AMEDIAFORMAT_KEY_COLOR_TRANSFER,
+    AMEDIAFORMAT_KEY_COMPLEXITY,
+    AMEDIAFORMAT_KEY_FLAC_COMPRESSION_LEVEL,
+    AMEDIAFORMAT_KEY_GRID_COLS,
+    AMEDIAFORMAT_KEY_GRID_HEIGHT,
+    AMEDIAFORMAT_KEY_GRID_ROWS,
+    AMEDIAFORMAT_KEY_GRID_WIDTH,
+    AMEDIAFORMAT_KEY_HEIGHT,
+    AMEDIAFORMAT_KEY_INTRA_REFRESH_PERIOD,
+    AMEDIAFORMAT_KEY_IS_ADTS,
+    AMEDIAFORMAT_KEY_IS_AUTOSELECT,
+    AMEDIAFORMAT_KEY_IS_DEFAULT,
+    AMEDIAFORMAT_KEY_IS_FORCED_SUBTITLE,
+    AMEDIAFORMAT_KEY_LATENCY,
+    AMEDIAFORMAT_KEY_LEVEL,
+    AMEDIAFORMAT_KEY_MAX_HEIGHT,
+    AMEDIAFORMAT_KEY_MAX_INPUT_SIZE,
+    AMEDIAFORMAT_KEY_MAX_WIDTH,
+    AMEDIAFORMAT_KEY_PCM_ENCODING,
+    AMEDIAFORMAT_KEY_PRIORITY,
+    AMEDIAFORMAT_KEY_PROFILE,
+    AMEDIAFORMAT_KEY_PUSH_BLANK_BUFFERS_ON_STOP,
+    AMEDIAFORMAT_KEY_ROTATION,
+    AMEDIAFORMAT_KEY_SAMPLE_RATE,
+    AMEDIAFORMAT_KEY_SLICE_HEIGHT,
+    AMEDIAFORMAT_KEY_STRIDE,
+    AMEDIAFORMAT_KEY_TRACK_ID,
+    AMEDIAFORMAT_KEY_WIDTH,
+};
+
+static const char *AMediaFormatKeyGroupInt64[] = {
+    AMEDIAFORMAT_KEY_DURATION,
+    AMEDIAFORMAT_KEY_REPEAT_PREVIOUS_FRAME_AFTER,
+};
+
+static const char *AMediaFormatKeyGroupString[] = {
+    AMEDIAFORMAT_KEY_LANGUAGE,
+    AMEDIAFORMAT_KEY_MIME,
+    AMEDIAFORMAT_KEY_TEMPORAL_LAYERING,
+};
+
+static const char *AMediaFormatKeyGroupBuffer[] = {
+    AMEDIAFORMAT_KEY_HDR_STATIC_INFO,
+};
+
+static const char *AMediaFormatKeyGroupRect[] = {
+    AMEDIAFORMAT_KEY_DISPLAY_CROP,
+};
+
+static const char *AMediaFormatKeyGroupFloatInt32[] = {
+    AMEDIAFORMAT_KEY_FRAME_RATE,
+    AMEDIAFORMAT_KEY_I_FRAME_INTERVAL,
+    AMEDIAFORMAT_KEY_OPERATING_RATE,
+};
+
+static status_t translateErrorCode(media_status_t err) {
+    if (err == AMEDIA_OK) {
+        return OK;
+    } else if (err == AMEDIACODEC_INFO_TRY_AGAIN_LATER) {
+        return -EAGAIN;
+    }
+
+    ALOGE("ndk error code: %d", err);
+    return UNKNOWN_ERROR;
+}
+
+static int32_t translateActionCode(int32_t actionCode) {
+    if (AMediaCodecActionCode_isTransient(actionCode)) {
+        return ACTION_CODE_TRANSIENT;
+    } else if (AMediaCodecActionCode_isRecoverable(actionCode)) {
+        return ACTION_CODE_RECOVERABLE;
+    }
+    return ACTION_CODE_FATAL;
+}
+
+static CryptoPlugin::Mode translateToCryptoPluginMode(cryptoinfo_mode_t mode) {
+    CryptoPlugin::Mode ret = CryptoPlugin::kMode_Unencrypted;
+    switch (mode) {
+        case AMEDIACODECRYPTOINFO_MODE_AES_CTR: {
+            ret = CryptoPlugin::kMode_AES_CTR;
+            break;
+        }
+
+        case AMEDIACODECRYPTOINFO_MODE_AES_WV: {
+            ret = CryptoPlugin::kMode_AES_WV;
+            break;
+        }
+
+        case AMEDIACODECRYPTOINFO_MODE_AES_CBC: {
+            ret = CryptoPlugin::kMode_AES_CBC;
+            break;
+        }
+
+        default:
+            break;
+    }
+
+    return ret;
+}
+
+static cryptoinfo_mode_t translateToCryptoInfoMode(CryptoPlugin::Mode mode) {
+    cryptoinfo_mode_t ret = AMEDIACODECRYPTOINFO_MODE_CLEAR;
+    switch (mode) {
+        case CryptoPlugin::kMode_AES_CTR: {
+            ret = AMEDIACODECRYPTOINFO_MODE_AES_CTR;
+            break;
+        }
+
+        case CryptoPlugin::kMode_AES_WV: {
+            ret = AMEDIACODECRYPTOINFO_MODE_AES_WV;
+            break;
+        }
+
+        case CryptoPlugin::kMode_AES_CBC: {
+            ret = AMEDIACODECRYPTOINFO_MODE_AES_CBC;
+            break;
+        }
+
+        default:
+            break;
+    }
+
+    return ret;
+}
+
+
+//////////// AMediaFormatWrapper
+// static
+sp<AMediaFormatWrapper> AMediaFormatWrapper::Create(const sp<AMessage> &message) {
+    sp<AMediaFormatWrapper> aMediaFormat = new AMediaFormatWrapper();
+
+    for (size_t i = 0; i < message->countEntries(); ++i) {
+        AMessage::Type valueType;
+        const char *key = message->getEntryNameAt(i, &valueType);
+
+        switch (valueType) {
+            case AMessage::kTypeInt32: {
+                int32_t val;
+                if (!message->findInt32(key, &val)) {
+                    ALOGE("AMediaFormatWrapper::Create: error at item %zu", i);
+                    continue;
+                }
+                aMediaFormat->setInt32(key, val);
+                break;
+            }
+
+            case AMessage::kTypeInt64: {
+                int64_t val;
+                if (!message->findInt64(key, &val)) {
+                    ALOGE("AMediaFormatWrapper::Create: error at item %zu", i);
+                    continue;
+                }
+                aMediaFormat->setInt64(key, val);
+                break;
+            }
+
+            case AMessage::kTypeFloat: {
+                float val;
+                if (!message->findFloat(key, &val)) {
+                    ALOGE("AMediaFormatWrapper::Create: error at item %zu", i);
+                    continue;
+                }
+                aMediaFormat->setFloat(key, val);
+                break;
+            }
+
+            case AMessage::kTypeDouble: {
+                double val;
+                if (!message->findDouble(key, &val)) {
+                    ALOGE("AMediaFormatWrapper::Create: error at item %zu", i);
+                    continue;
+                }
+                aMediaFormat->setDouble(key, val);
+                break;
+            }
+
+            case AMessage::kTypeSize: {
+                size_t val;
+                if (!message->findSize(key, &val)) {
+                    ALOGE("AMediaFormatWrapper::Create: error at item %zu", i);
+                    continue;
+                }
+                aMediaFormat->setSize(key, val);
+                break;
+            }
+
+            case AMessage::kTypeRect: {
+                int32_t left, top, right, bottom;
+                if (!message->findRect(key, &left, &top, &right, &bottom)) {
+                    ALOGE("AMediaFormatWrapper::Create: error at item %zu", i);
+                    continue;
+                }
+                aMediaFormat->setRect(key, left, top, right, bottom);
+                break;
+            }
+
+            case AMessage::kTypeString: {
+                AString val;
+                if (!message->findString(key, &val)) {
+                    ALOGE("AMediaFormatWrapper::Create: error at item %zu", i);
+                    continue;
+                }
+                aMediaFormat->setString(key, val);
+                break;
+            }
+
+            case AMessage::kTypeBuffer: {
+                sp<ABuffer> val;
+                if (!message->findBuffer(key, &val)) {
+                    ALOGE("AMediaFormatWrapper::Create: error at item %zu", i);
+                    continue;
+                }
+                aMediaFormat->setBuffer(key, val->data(), val->size());
+                break;
+            }
+
+            default: {
+                break;
+            }
+        }
+    }
+
+    return aMediaFormat;
+}
+
+AMediaFormatWrapper::AMediaFormatWrapper() {
+    mAMediaFormat = AMediaFormat_new();
+}
+
+AMediaFormatWrapper::AMediaFormatWrapper(AMediaFormat *aMediaFormat)
+    : mAMediaFormat(aMediaFormat) {
+}
+
+AMediaFormatWrapper::~AMediaFormatWrapper() {
+    release();
+}
+
+status_t AMediaFormatWrapper::release() {
+    if (mAMediaFormat != NULL) {
+        media_status_t err = AMediaFormat_delete(mAMediaFormat);
+        mAMediaFormat = NULL;
+        return translateErrorCode(err);
+    }
+    return OK;
+}
+
+AMediaFormat *AMediaFormatWrapper::getAMediaFormat() const {
+    return mAMediaFormat;
+}
+
+sp<AMessage> AMediaFormatWrapper::toAMessage() const {
+    if (mAMediaFormat == NULL) {
+        return NULL;
+    }
+
+    sp<AMessage> msg = new AMessage;
+    for (auto& key : AMediaFormatKeyGroupInt32) {
+        int32_t val;
+        if (getInt32(key, &val)) {
+            msg->setInt32(key, val);
+        }
+    }
+    for (auto& key : AMediaFormatKeyGroupInt64) {
+        int64_t val;
+        if (getInt64(key, &val)) {
+            msg->setInt64(key, val);
+        }
+    }
+    for (auto& key : AMediaFormatKeyGroupString) {
+        AString val;
+        if (getString(key, &val)) {
+            msg->setString(key, val);
+        }
+    }
+    for (auto& key : AMediaFormatKeyGroupBuffer) {
+        void *data;
+        size_t size;
+        if (getBuffer(key, &data, &size)) {
+            sp<ABuffer> buffer = ABuffer::CreateAsCopy(data, size);
+            msg->setBuffer(key, buffer);
+        }
+    }
+    for (auto& key : AMediaFormatKeyGroupRect) {
+        int32_t left, top, right, bottom;
+        if (getRect(key, &left, &top, &right, &bottom)) {
+            msg->setRect(key, left, top, right, bottom);
+        }
+    }
+    for (auto& key : AMediaFormatKeyGroupFloatInt32) {
+        float valFloat;
+        if (getFloat(key, &valFloat)) {
+            msg->setFloat(key, valFloat);
+        } else {
+            int32_t valInt32;
+            if (getInt32(key, &valInt32)) {
+                msg->setFloat(key, (float)valInt32);
+            }
+        }
+    }
+    return msg;
+}
+
+const char* AMediaFormatWrapper::toString() const {
+    if (mAMediaFormat == NULL) {
+        return NULL;
+    }
+    return AMediaFormat_toString(mAMediaFormat);
+}
+
+bool AMediaFormatWrapper::getInt32(const char *name, int32_t *out) const {
+    if (mAMediaFormat == NULL) {
+        return false;
+    }
+    return AMediaFormat_getInt32(mAMediaFormat, name, out);
+}
+
+bool AMediaFormatWrapper::getInt64(const char *name, int64_t *out) const {
+    if (mAMediaFormat == NULL) {
+        return false;
+    }
+    return AMediaFormat_getInt64(mAMediaFormat, name, out);
+}
+
+bool AMediaFormatWrapper::getFloat(const char *name, float *out) const {
+    if (mAMediaFormat == NULL) {
+        return false;
+    }
+    return AMediaFormat_getFloat(mAMediaFormat, name, out);
+}
+
+bool AMediaFormatWrapper::getDouble(const char *name, double *out) const {
+    if (mAMediaFormat == NULL) {
+        return false;
+    }
+    return AMediaFormat_getDouble(mAMediaFormat, name, out);
+}
+
+bool AMediaFormatWrapper::getSize(const char *name, size_t *out) const {
+    if (mAMediaFormat == NULL) {
+        return false;
+    }
+    return AMediaFormat_getSize(mAMediaFormat, name, out);
+}
+
+bool AMediaFormatWrapper::getRect(
+        const char *name, int32_t *left, int32_t *top, int32_t *right, int32_t *bottom) const {
+    if (mAMediaFormat == NULL) {
+        return false;
+    }
+    return AMediaFormat_getRect(mAMediaFormat, name, left, top, right, bottom);
+}
+
+bool AMediaFormatWrapper::getBuffer(const char *name, void** data, size_t *outSize) const {
+    if (mAMediaFormat == NULL) {
+        return false;
+    }
+    return AMediaFormat_getBuffer(mAMediaFormat, name, data, outSize);
+}
+
+bool AMediaFormatWrapper::getString(const char *name, AString *out) const {
+    if (mAMediaFormat == NULL) {
+        return false;
+    }
+    const char *outChar = NULL;
+    bool ret = AMediaFormat_getString(mAMediaFormat, name, &outChar);
+    if (ret) {
+        *out = AString(outChar);
+    }
+    return ret;
+}
+
+void AMediaFormatWrapper::setInt32(const char* name, int32_t value) {
+    if (mAMediaFormat != NULL) {
+        AMediaFormat_setInt32(mAMediaFormat, name, value);
+    }
+}
+
+void AMediaFormatWrapper::setInt64(const char* name, int64_t value) {
+    if (mAMediaFormat != NULL) {
+        AMediaFormat_setInt64(mAMediaFormat, name, value);
+    }
+}
+
+void AMediaFormatWrapper::setFloat(const char* name, float value) {
+    if (mAMediaFormat != NULL) {
+        AMediaFormat_setFloat(mAMediaFormat, name, value);
+    }
+}
+
+void AMediaFormatWrapper::setDouble(const char* name, double value) {
+    if (mAMediaFormat != NULL) {
+        AMediaFormat_setDouble(mAMediaFormat, name, value);
+    }
+}
+
+void AMediaFormatWrapper::setSize(const char* name, size_t value) {
+    if (mAMediaFormat != NULL) {
+        AMediaFormat_setSize(mAMediaFormat, name, value);
+    }
+}
+
+void AMediaFormatWrapper::setRect(
+        const char* name, int32_t left, int32_t top, int32_t right, int32_t bottom) {
+    if (mAMediaFormat != NULL) {
+        AMediaFormat_setRect(mAMediaFormat, name, left, top, right, bottom);
+    }
+}
+
+void AMediaFormatWrapper::setString(const char* name, const AString &value) {
+    if (mAMediaFormat != NULL) {
+        AMediaFormat_setString(mAMediaFormat, name, value.c_str());
+    }
+}
+
+void AMediaFormatWrapper::setBuffer(const char* name, void* data, size_t size) {
+    if (mAMediaFormat != NULL) {
+        AMediaFormat_setBuffer(mAMediaFormat, name, data, size);
+    }
+}
+
+
+//////////// AMediaDrmWrapper
+AMediaDrmWrapper::AMediaDrmWrapper(const uint8_t uuid[16]) {
+    mAMediaDrm = AMediaDrm_createByUUID(uuid);
+}
+
+AMediaDrmWrapper::AMediaDrmWrapper(AMediaDrm *aMediaDrm)
+    : mAMediaDrm(aMediaDrm) {
+}
+
+AMediaDrmWrapper::~AMediaDrmWrapper() {
+    release();
+}
+
+status_t AMediaDrmWrapper::release() {
+    if (mAMediaDrm != NULL) {
+        AMediaDrm_release(mAMediaDrm);
+        mAMediaDrm = NULL;
+    }
+    return OK;
+}
+
+AMediaDrm *AMediaDrmWrapper::getAMediaDrm() const {
+    return mAMediaDrm;
+}
+
+// static
+bool AMediaDrmWrapper::isCryptoSchemeSupported(
+        const uint8_t uuid[16],
+        const char *mimeType) {
+    return AMediaDrm_isCryptoSchemeSupported(uuid, mimeType);
+}
+
+
+//////////// AMediaCryptoWrapper
+AMediaCryptoWrapper::AMediaCryptoWrapper(
+        const uint8_t uuid[16], const void *initData, size_t initDataSize) {
+    mAMediaCrypto = AMediaCrypto_new(uuid, initData, initDataSize);
+}
+
+AMediaCryptoWrapper::AMediaCryptoWrapper(AMediaCrypto *aMediaCrypto)
+    : mAMediaCrypto(aMediaCrypto) {
+}
+
+AMediaCryptoWrapper::~AMediaCryptoWrapper() {
+    release();
+}
+
+status_t AMediaCryptoWrapper::release() {
+    if (mAMediaCrypto != NULL) {
+        AMediaCrypto_delete(mAMediaCrypto);
+        mAMediaCrypto = NULL;
+    }
+    return OK;
+}
+
+AMediaCrypto *AMediaCryptoWrapper::getAMediaCrypto() const {
+    return mAMediaCrypto;
+}
+
+bool AMediaCryptoWrapper::isCryptoSchemeSupported(const uint8_t uuid[16]) {
+    if (mAMediaCrypto == NULL) {
+        return false;
+    }
+    return AMediaCrypto_isCryptoSchemeSupported(uuid);
+}
+
+bool AMediaCryptoWrapper::requiresSecureDecoderComponent(const char *mime) {
+    if (mAMediaCrypto == NULL) {
+        return false;
+    }
+    return AMediaCrypto_requiresSecureDecoderComponent(mime);
+}
+
+
+//////////// AMediaCodecCryptoInfoWrapper
+// static
+sp<AMediaCodecCryptoInfoWrapper> AMediaCodecCryptoInfoWrapper::Create(sp<MetaData> meta) {
+    if (meta == NULL) {
+        ALOGE("Create: Unexpected. No meta data for sample.");
+        return NULL;
+    }
+
+    uint32_t type;
+    const void *crypteddata;
+    size_t cryptedsize;
+
+    if (!meta->findData(kKeyEncryptedSizes, &type, &crypteddata, &cryptedsize)) {
+        return NULL;
+    }
+
+    int numSubSamples = cryptedsize / sizeof(size_t);
+
+    if (numSubSamples <= 0) {
+        ALOGE("Create: INVALID numSubSamples: %d", numSubSamples);
+        return NULL;
+    }
+
+    const void *cleardata;
+    size_t clearsize;
+    if (meta->findData(kKeyPlainSizes, &type, &cleardata, &clearsize)) {
+        if (clearsize != cryptedsize) {
+            // The two must be of the same length.
+            ALOGE("Create: mismatch cryptedsize: %zu != clearsize: %zu", cryptedsize, clearsize);
+            return NULL;
+        }
+    }
+
+    const void *key;
+    size_t keysize;
+    if (meta->findData(kKeyCryptoKey, &type, &key, &keysize)) {
+        if (keysize != kAESBlockSize) {
+            // Keys must be 16 bytes in length.
+            ALOGE("Create: Keys must be %zu bytes in length: %zu", kAESBlockSize, keysize);
+            return NULL;
+        }
+    }
+
+    const void *iv;
+    size_t ivsize;
+    if (meta->findData(kKeyCryptoIV, &type, &iv, &ivsize)) {
+        if (ivsize != kAESBlockSize) {
+            // IVs must be 16 bytes in length.
+            ALOGE("Create: IV must be %zu bytes in length: %zu", kAESBlockSize, ivsize);
+            return NULL;
+        }
+    }
+
+    int32_t mode;
+    if (!meta->findInt32(kKeyCryptoMode, &mode)) {
+        mode = CryptoPlugin::kMode_AES_CTR;
+    }
+
+    return new AMediaCodecCryptoInfoWrapper(
+            numSubSamples,
+            (uint8_t*) key,
+            (uint8_t*) iv,
+            (CryptoPlugin::Mode)mode,
+            (size_t*) cleardata,
+            (size_t*) crypteddata);
+}
+
+AMediaCodecCryptoInfoWrapper::AMediaCodecCryptoInfoWrapper(
+        int numsubsamples,
+        uint8_t key[16],
+        uint8_t iv[16],
+        CryptoPlugin::Mode mode,
+        size_t *clearbytes,
+        size_t *encryptedbytes) {
+    mAMediaCodecCryptoInfo =
+        AMediaCodecCryptoInfo_new(numsubsamples,
+                                  key,
+                                  iv,
+                                  translateToCryptoInfoMode(mode),
+                                  clearbytes,
+                                  encryptedbytes);
+}
+
+AMediaCodecCryptoInfoWrapper::AMediaCodecCryptoInfoWrapper(
+        AMediaCodecCryptoInfo *aMediaCodecCryptoInfo)
+    : mAMediaCodecCryptoInfo(aMediaCodecCryptoInfo) {
+}
+
+AMediaCodecCryptoInfoWrapper::~AMediaCodecCryptoInfoWrapper() {
+    release();
+}
+
+status_t AMediaCodecCryptoInfoWrapper::release() {
+    if (mAMediaCodecCryptoInfo != NULL) {
+        media_status_t err = AMediaCodecCryptoInfo_delete(mAMediaCodecCryptoInfo);
+        mAMediaCodecCryptoInfo = NULL;
+        return translateErrorCode(err);
+    }
+    return OK;
+}
+
+AMediaCodecCryptoInfo *AMediaCodecCryptoInfoWrapper::getAMediaCodecCryptoInfo() const {
+    return mAMediaCodecCryptoInfo;
+}
+
+void AMediaCodecCryptoInfoWrapper::setPattern(CryptoPlugin::Pattern *pattern) {
+    if (mAMediaCodecCryptoInfo == NULL || pattern == NULL) {
+        return;
+    }
+    cryptoinfo_pattern_t ndkPattern = {(int32_t)pattern->mEncryptBlocks,
+                                       (int32_t)pattern->mSkipBlocks };
+    return AMediaCodecCryptoInfo_setPattern(mAMediaCodecCryptoInfo, &ndkPattern);
+}
+
+size_t AMediaCodecCryptoInfoWrapper::getNumSubSamples() {
+    if (mAMediaCodecCryptoInfo == NULL) {
+        return 0;
+    }
+    return AMediaCodecCryptoInfo_getNumSubSamples(mAMediaCodecCryptoInfo);
+}
+
+status_t AMediaCodecCryptoInfoWrapper::getKey(uint8_t *dst) {
+    if (mAMediaCodecCryptoInfo == NULL) {
+        return DEAD_OBJECT;
+    }
+    if (dst == NULL) {
+        return BAD_VALUE;
+    }
+    return translateErrorCode(
+        AMediaCodecCryptoInfo_getKey(mAMediaCodecCryptoInfo, dst));
+}
+
+status_t AMediaCodecCryptoInfoWrapper::getIV(uint8_t *dst) {
+    if (mAMediaCodecCryptoInfo == NULL) {
+        return DEAD_OBJECT;
+    }
+    if (dst == NULL) {
+        return BAD_VALUE;
+    }
+    return translateErrorCode(
+        AMediaCodecCryptoInfo_getIV(mAMediaCodecCryptoInfo, dst));
+}
+
+CryptoPlugin::Mode AMediaCodecCryptoInfoWrapper::getMode() {
+    if (mAMediaCodecCryptoInfo == NULL) {
+        return CryptoPlugin::kMode_Unencrypted;
+    }
+    return translateToCryptoPluginMode(
+        AMediaCodecCryptoInfo_getMode(mAMediaCodecCryptoInfo));
+}
+
+status_t AMediaCodecCryptoInfoWrapper::getClearBytes(size_t *dst) {
+    if (mAMediaCodecCryptoInfo == NULL) {
+        return DEAD_OBJECT;
+    }
+    if (dst == NULL) {
+        return BAD_VALUE;
+    }
+    return translateErrorCode(
+        AMediaCodecCryptoInfo_getClearBytes(mAMediaCodecCryptoInfo, dst));
+}
+
+status_t AMediaCodecCryptoInfoWrapper::getEncryptedBytes(size_t *dst) {
+    if (mAMediaCodecCryptoInfo == NULL) {
+        return DEAD_OBJECT;
+    }
+    if (dst == NULL) {
+        return BAD_VALUE;
+    }
+    return translateErrorCode(
+        AMediaCodecCryptoInfo_getEncryptedBytes(mAMediaCodecCryptoInfo, dst));
+}
+
+
+//////////// AMediaCodecWrapper
+// static
+sp<AMediaCodecWrapper> AMediaCodecWrapper::CreateCodecByName(const AString &name) {
+    AMediaCodec *aMediaCodec = AMediaCodec_createCodecByName(name.c_str());
+    return new AMediaCodecWrapper(aMediaCodec);
+}
+
+// static
+sp<AMediaCodecWrapper> AMediaCodecWrapper::CreateDecoderByType(const AString &mimeType) {
+    AMediaCodec *aMediaCodec = AMediaCodec_createDecoderByType(mimeType.c_str());
+    return new AMediaCodecWrapper(aMediaCodec);
+}
+
+// static
+void AMediaCodecWrapper::OnInputAvailableCB(
+        AMediaCodec * /* aMediaCodec */,
+        void *userdata,
+        int32_t index) {
+    ALOGV("OnInputAvailableCB: index(%d)", index);
+    sp<AMessage> msg = sp<AMessage>((AMessage *)userdata)->dup();
+    msg->setInt32("callbackID", CB_INPUT_AVAILABLE);
+    msg->setInt32("index", index);
+    msg->post();
+}
+
+// static
+void AMediaCodecWrapper::OnOutputAvailableCB(
+        AMediaCodec * /* aMediaCodec */,
+        void *userdata,
+        int32_t index,
+        AMediaCodecBufferInfo *bufferInfo) {
+    ALOGV("OnOutputAvailableCB: index(%d), (%d, %d, %lld, 0x%x)",
+          index, bufferInfo->offset, bufferInfo->size,
+          (long long)bufferInfo->presentationTimeUs, bufferInfo->flags);
+    sp<AMessage> msg = sp<AMessage>((AMessage *)userdata)->dup();
+    msg->setInt32("callbackID", CB_OUTPUT_AVAILABLE);
+    msg->setInt32("index", index);
+    msg->setSize("offset", (size_t)(bufferInfo->offset));
+    msg->setSize("size", (size_t)(bufferInfo->size));
+    msg->setInt64("timeUs", bufferInfo->presentationTimeUs);
+    msg->setInt32("flags", (int32_t)(bufferInfo->flags));
+    msg->post();
+}
+
+// static
+void AMediaCodecWrapper::OnFormatChangedCB(
+        AMediaCodec * /* aMediaCodec */,
+        void *userdata,
+        AMediaFormat *format) {
+    sp<AMediaFormatWrapper> formatWrapper = new AMediaFormatWrapper(format);
+    sp<AMessage> outputFormat = formatWrapper->toAMessage();
+    ALOGV("OnFormatChangedCB: format(%s)", outputFormat->debugString().c_str());
+
+    sp<AMessage> msg = sp<AMessage>((AMessage *)userdata)->dup();
+    msg->setInt32("callbackID", CB_OUTPUT_FORMAT_CHANGED);
+    msg->setMessage("format", outputFormat);
+    msg->post();
+}
+
+// static
+void AMediaCodecWrapper::OnErrorCB(
+        AMediaCodec * /* aMediaCodec */,
+        void *userdata,
+        media_status_t err,
+        int32_t actionCode,
+        const char *detail) {
+    ALOGV("OnErrorCB: err(%d), actionCode(%d), detail(%s)", err, actionCode, detail);
+    sp<AMessage> msg = sp<AMessage>((AMessage *)userdata)->dup();
+    msg->setInt32("callbackID", CB_ERROR);
+    msg->setInt32("err", translateErrorCode(err));
+    msg->setInt32("actionCode", translateActionCode(actionCode));
+    msg->setString("detail", detail);
+    msg->post();
+}
+
+AMediaCodecWrapper::AMediaCodecWrapper(AMediaCodec *aMediaCodec)
+    : mAMediaCodec(aMediaCodec) {
+}
+
+AMediaCodecWrapper::~AMediaCodecWrapper() {
+    release();
+}
+
+status_t AMediaCodecWrapper::release() {
+    if (mAMediaCodec != NULL) {
+        AMediaCodecOnAsyncNotifyCallback aCB = {};
+        AMediaCodec_setAsyncNotifyCallback(mAMediaCodec, aCB, NULL);
+        mCallback = NULL;
+
+        media_status_t err = AMediaCodec_delete(mAMediaCodec);
+        mAMediaCodec = NULL;
+        return translateErrorCode(err);
+    }
+    return OK;
+}
+
+AMediaCodec *AMediaCodecWrapper::getAMediaCodec() const {
+    return mAMediaCodec;
+}
+
+status_t AMediaCodecWrapper::getName(AString *outComponentName) const {
+    if (mAMediaCodec == NULL) {
+        return DEAD_OBJECT;
+    }
+    char *name = NULL;
+    media_status_t err = AMediaCodec_getName(mAMediaCodec, &name);
+    if (err != AMEDIA_OK) {
+        return translateErrorCode(err);
+    }
+
+    *outComponentName = AString(name);
+    AMediaCodec_releaseName(mAMediaCodec, name);
+    return OK;
+}
+
+status_t AMediaCodecWrapper::configure(
+    const sp<AMediaFormatWrapper> &format,
+    const sp<Surface> &surface,
+    const sp<AMediaCryptoWrapper> &crypto,
+    uint32_t flags) {
+    if (mAMediaCodec == NULL) {
+        return DEAD_OBJECT;
+    }
+
+    media_status_t err = AMediaCodec_configure(
+            mAMediaCodec,
+            format->getAMediaFormat(),
+            surface.get(),
+            crypto == NULL ? NULL : crypto->getAMediaCrypto(),
+            flags);
+
+    return translateErrorCode(err);
+}
+
+status_t AMediaCodecWrapper::setCallback(const sp<AMessage> &callback) {
+    if (mAMediaCodec == NULL) {
+        return DEAD_OBJECT;
+    }
+
+    mCallback = callback;
+
+    AMediaCodecOnAsyncNotifyCallback aCB = {
+        OnInputAvailableCB,
+        OnOutputAvailableCB,
+        OnFormatChangedCB,
+        OnErrorCB
+    };
+
+    return translateErrorCode(
+            AMediaCodec_setAsyncNotifyCallback(mAMediaCodec, aCB, callback.get()));
+}
+
+status_t AMediaCodecWrapper::releaseCrypto() {
+    if (mAMediaCodec == NULL) {
+        return DEAD_OBJECT;
+    }
+    return translateErrorCode(AMediaCodec_releaseCrypto(mAMediaCodec));
+}
+
+status_t AMediaCodecWrapper::start() {
+    if (mAMediaCodec == NULL) {
+        return DEAD_OBJECT;
+    }
+    return translateErrorCode(AMediaCodec_start(mAMediaCodec));
+}
+
+status_t AMediaCodecWrapper::stop() {
+    if (mAMediaCodec == NULL) {
+        return DEAD_OBJECT;
+    }
+    return translateErrorCode(AMediaCodec_stop(mAMediaCodec));
+}
+
+status_t AMediaCodecWrapper::flush() {
+    if (mAMediaCodec == NULL) {
+        return DEAD_OBJECT;
+    }
+    return translateErrorCode(AMediaCodec_flush(mAMediaCodec));
+}
+
+uint8_t* AMediaCodecWrapper::getInputBuffer(size_t idx, size_t *out_size) {
+    if (mAMediaCodec == NULL) {
+        return NULL;
+    }
+    return AMediaCodec_getInputBuffer(mAMediaCodec, idx, out_size);
+}
+
+uint8_t* AMediaCodecWrapper::getOutputBuffer(size_t idx, size_t *out_size) {
+    if (mAMediaCodec == NULL) {
+        return NULL;
+    }
+    return AMediaCodec_getOutputBuffer(mAMediaCodec, idx, out_size);
+}
+
+status_t AMediaCodecWrapper::queueInputBuffer(
+        size_t idx,
+        size_t offset,
+        size_t size,
+        uint64_t time,
+        uint32_t flags) {
+    if (mAMediaCodec == NULL) {
+        return DEAD_OBJECT;
+    }
+    return translateErrorCode(
+        AMediaCodec_queueInputBuffer(mAMediaCodec, idx, offset, size, time, flags));
+}
+
+status_t AMediaCodecWrapper::queueSecureInputBuffer(
+        size_t idx,
+        size_t offset,
+        sp<AMediaCodecCryptoInfoWrapper> &codecCryptoInfo,
+        uint64_t time,
+        uint32_t flags) {
+    if (mAMediaCodec == NULL) {
+        return DEAD_OBJECT;
+    }
+    return translateErrorCode(
+        AMediaCodec_queueSecureInputBuffer(
+            mAMediaCodec,
+            idx,
+            offset,
+            codecCryptoInfo->getAMediaCodecCryptoInfo(),
+            time,
+            flags));
+}
+
+sp<AMediaFormatWrapper> AMediaCodecWrapper::getOutputFormat() {
+    if (mAMediaCodec == NULL) {
+        return NULL;
+    }
+    return new AMediaFormatWrapper(AMediaCodec_getOutputFormat(mAMediaCodec));
+}
+
+sp<AMediaFormatWrapper> AMediaCodecWrapper::getInputFormat() {
+    if (mAMediaCodec == NULL) {
+        return NULL;
+    }
+    return new AMediaFormatWrapper(AMediaCodec_getInputFormat(mAMediaCodec));
+}
+
+status_t AMediaCodecWrapper::releaseOutputBuffer(size_t idx, bool render) {
+    if (mAMediaCodec == NULL) {
+        return DEAD_OBJECT;
+    }
+    return translateErrorCode(
+        AMediaCodec_releaseOutputBuffer(mAMediaCodec, idx, render));
+}
+
+status_t AMediaCodecWrapper::setOutputSurface(const sp<Surface> &surface) {
+    if (mAMediaCodec == NULL) {
+        return DEAD_OBJECT;
+    }
+    return translateErrorCode(
+        AMediaCodec_setOutputSurface(mAMediaCodec, surface.get()));
+}
+
+status_t AMediaCodecWrapper::releaseOutputBufferAtTime(size_t idx, int64_t timestampNs) {
+    if (mAMediaCodec == NULL) {
+        return DEAD_OBJECT;
+    }
+    return translateErrorCode(
+        AMediaCodec_releaseOutputBufferAtTime(mAMediaCodec, idx, timestampNs));
+}
+
+status_t AMediaCodecWrapper::setParameters(const sp<AMediaFormatWrapper> &params) {
+    if (mAMediaCodec == NULL) {
+        return DEAD_OBJECT;
+    }
+    return translateErrorCode(
+        AMediaCodec_setParameters(mAMediaCodec, params->getAMediaFormat()));
+}
+
+}  // namespace android
diff --git a/media/libmedia/nuplayer2/NdkWrapper.h b/media/libmedia/nuplayer2/NdkWrapper.h
new file mode 100644
index 0000000..cf06f5e
--- /dev/null
+++ b/media/libmedia/nuplayer2/NdkWrapper.h
@@ -0,0 +1,257 @@
+/*
+ * Copyright 2017, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NDK_WRAPPER_H_
+
+#define NDK_WRAPPER_H_
+
+#include <media/NdkMediaError.h>
+#include <media/hardware/CryptoAPI.h>
+#include <media/stagefright/foundation/ABase.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+
+struct AMediaCodec;
+struct AMediaCodecBufferInfo;
+struct AMediaCodecCryptoInfo;
+struct AMediaCrypto;
+struct AMediaDrm;
+struct AMediaFormat;
+
+namespace android {
+
+struct AMessage;
+class MetaData;
+class Surface;
+
+struct AMediaFormatWrapper : public RefBase {
+    static sp<AMediaFormatWrapper> Create(const sp<AMessage> &message);
+
+    AMediaFormatWrapper();
+    AMediaFormatWrapper(AMediaFormat *aMediaFormat);
+
+    // the returned AMediaFormat is still owned by this wrapper.
+    AMediaFormat *getAMediaFormat() const;
+
+    sp<AMessage> toAMessage() const ;
+    const char* toString() const ;
+
+    status_t release();
+
+    bool getInt32(const char *name, int32_t *out) const;
+    bool getInt64(const char *name, int64_t *out) const;
+    bool getFloat(const char *name, float *out) const;
+    bool getDouble(const char *name, double *out) const;
+    bool getSize(const char *name, size_t *out) const;
+    bool getRect(const char *name,
+                 int32_t *left, int32_t *top, int32_t *right, int32_t *bottom) const;
+    bool getBuffer(const char *name, void** data, size_t *outsize) const;
+    bool getString(const char *name, AString *out) const;
+
+    void setInt32(const char* name, int32_t value);
+    void setInt64(const char* name, int64_t value);
+    void setFloat(const char* name, float value);
+    void setDouble(const char *name, double value);
+    void setSize(const char* name, size_t value);
+    void setRect(const char* name,
+                 int32_t left, int32_t top, int32_t right, int32_t bottom);
+    void setString(const char* name, const AString &value);
+    void setBuffer(const char* name, void* data, size_t size);
+
+protected:
+    virtual ~AMediaFormatWrapper();
+
+private:
+    AMediaFormat *mAMediaFormat;
+
+    DISALLOW_EVIL_CONSTRUCTORS(AMediaFormatWrapper);
+};
+
+struct AMediaDrmWrapper : public RefBase {
+    AMediaDrmWrapper(const uint8_t uuid[16]);
+    AMediaDrmWrapper(AMediaDrm *aMediaDrm);
+
+    // the returned AMediaDrm is still owned by this wrapper.
+    AMediaDrm *getAMediaDrm() const;
+
+    status_t release();
+
+    static bool isCryptoSchemeSupported(const uint8_t uuid[16], const char *mimeType);
+
+protected:
+    virtual ~AMediaDrmWrapper();
+
+private:
+    AMediaDrm *mAMediaDrm;
+
+    DISALLOW_EVIL_CONSTRUCTORS(AMediaDrmWrapper);
+};
+
+struct AMediaCryptoWrapper : public RefBase {
+    AMediaCryptoWrapper(const uint8_t uuid[16],
+                        const void *initData,
+                        size_t initDataSize);
+    AMediaCryptoWrapper(AMediaCrypto *aMediaCrypto);
+
+    // the returned AMediaCrypto is still owned by this wrapper.
+    AMediaCrypto *getAMediaCrypto() const;
+
+    status_t release();
+
+    bool isCryptoSchemeSupported(const uint8_t uuid[16]);
+
+    bool requiresSecureDecoderComponent(const char *mime);
+
+protected:
+    virtual ~AMediaCryptoWrapper();
+
+private:
+    AMediaCrypto *mAMediaCrypto;
+
+    DISALLOW_EVIL_CONSTRUCTORS(AMediaCryptoWrapper);
+};
+
+struct AMediaCodecCryptoInfoWrapper : public RefBase {
+    static sp<AMediaCodecCryptoInfoWrapper> Create(sp<MetaData> meta);
+
+    AMediaCodecCryptoInfoWrapper(int numsubsamples,
+                                 uint8_t key[16],
+                                 uint8_t iv[16],
+                                 CryptoPlugin::Mode mode,
+                                 size_t *clearbytes,
+                                 size_t *encryptedbytes);
+    AMediaCodecCryptoInfoWrapper(AMediaCodecCryptoInfo *aMediaCodecCryptoInfo);
+
+    // the returned AMediaCryptoInfo is still owned by this wrapper.
+    AMediaCodecCryptoInfo *getAMediaCodecCryptoInfo() const;
+
+    status_t release();
+
+    void setPattern(CryptoPlugin::Pattern *pattern);
+
+    size_t getNumSubSamples();
+
+    status_t getKey(uint8_t *dst);
+
+    status_t getIV(uint8_t *dst);
+
+    CryptoPlugin::Mode getMode();
+
+    status_t getClearBytes(size_t *dst);
+
+    status_t getEncryptedBytes(size_t *dst);
+
+protected:
+    virtual ~AMediaCodecCryptoInfoWrapper();
+
+private:
+    AMediaCodecCryptoInfo *mAMediaCodecCryptoInfo;
+
+    DISALLOW_EVIL_CONSTRUCTORS(AMediaCodecCryptoInfoWrapper);
+};
+
+struct AMediaCodecWrapper : public RefBase {
+    enum {
+        CB_INPUT_AVAILABLE = 1,
+        CB_OUTPUT_AVAILABLE = 2,
+        CB_ERROR = 3,
+        CB_OUTPUT_FORMAT_CHANGED = 4,
+    };
+
+    static sp<AMediaCodecWrapper> CreateCodecByName(const AString &name);
+    static sp<AMediaCodecWrapper> CreateDecoderByType(const AString &mimeType);
+
+    static void OnInputAvailableCB(AMediaCodec *codec,
+                                   void *userdata,
+                                   int32_t index);
+    static void OnOutputAvailableCB(AMediaCodec *codec,
+                                    void *userdata,
+                                    int32_t index,
+                                    AMediaCodecBufferInfo *bufferInfo);
+    static void OnFormatChangedCB(AMediaCodec *codec,
+                                  void *userdata,
+                                  AMediaFormat *format);
+    static void OnErrorCB(AMediaCodec *codec,
+                          void *userdata,
+                          media_status_t err,
+                          int32_t actionCode,
+                          const char *detail);
+
+    AMediaCodecWrapper(AMediaCodec *aMediaCodec);
+
+    // the returned AMediaCodec is still owned by this wrapper.
+    AMediaCodec *getAMediaCodec() const;
+
+    status_t release();
+
+    status_t getName(AString* outComponentName) const;
+
+    status_t configure(
+            const sp<AMediaFormatWrapper> &format,
+            const sp<Surface> &surface,
+            const sp<AMediaCryptoWrapper> &crypto,
+            uint32_t flags);
+
+    status_t setCallback(const sp<AMessage> &callback);
+
+    status_t releaseCrypto();
+
+    status_t start();
+    status_t stop();
+    status_t flush();
+
+    uint8_t* getInputBuffer(size_t idx, size_t *out_size);
+    uint8_t* getOutputBuffer(size_t idx, size_t *out_size);
+
+    status_t queueInputBuffer(
+            size_t idx,
+            size_t offset,
+            size_t size,
+            uint64_t time,
+            uint32_t flags);
+
+    status_t queueSecureInputBuffer(
+            size_t idx,
+            size_t offset,
+            sp<AMediaCodecCryptoInfoWrapper> &codecCryptoInfo,
+            uint64_t time,
+            uint32_t flags);
+
+    sp<AMediaFormatWrapper> getOutputFormat();
+    sp<AMediaFormatWrapper> getInputFormat();
+
+    status_t releaseOutputBuffer(size_t idx, bool render);
+
+    status_t setOutputSurface(const sp<Surface> &surface);
+
+    status_t releaseOutputBufferAtTime(size_t idx, int64_t timestampNs);
+
+    status_t setParameters(const sp<AMediaFormatWrapper> &params);
+
+protected:
+    virtual ~AMediaCodecWrapper();
+
+private:
+    AMediaCodec *mAMediaCodec;
+
+    sp<AMessage> mCallback;
+
+    DISALLOW_EVIL_CONSTRUCTORS(AMediaCodecWrapper);
+};
+
+}  // namespace android
+
+#endif  // NDK_WRAPPER_H_
diff --git a/media/libmedia/nuplayer2/NuPlayer2.cpp b/media/libmedia/nuplayer2/NuPlayer2.cpp
new file mode 100644
index 0000000..aa0c8d1
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2.cpp
@@ -0,0 +1,3000 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayer2"
+
+#include <inttypes.h>
+
+#include <utils/Log.h>
+
+#include "NuPlayer2.h"
+
+#include "HTTPLiveSource.h"
+#include "NdkWrapper.h"
+#include "NuPlayer2CCDecoder.h"
+#include "NuPlayer2Decoder.h"
+#include "NuPlayer2DecoderBase.h"
+#include "NuPlayer2DecoderPassThrough.h"
+#include "NuPlayer2Driver.h"
+#include "NuPlayer2Renderer.h"
+#include "NuPlayer2Source.h"
+#include "RTSPSource.h"
+#include "StreamingSource.h"
+#include "GenericSource.h"
+#include "TextDescriptions.h"
+
+#include "ATSParser.h"
+
+#include <cutils/properties.h>
+
+#include <media/AudioParameter.h>
+#include <media/AudioResamplerPublic.h>
+#include <media/AVSyncSettings.h>
+#include <media/MediaCodecBuffer.h>
+
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/avc_utils.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaClock.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/Surface.h>
+
+
+#include "ESDS.h"
+#include <media/stagefright/Utils.h>
+
+namespace android {
+
+static status_t sendMetaDataToHal(sp<MediaPlayer2Base::AudioSink>& sink,
+                                  const sp<MetaData>& meta) {
+    int32_t sampleRate = 0;
+    int32_t bitRate = 0;
+    int32_t channelMask = 0;
+    int32_t delaySamples = 0;
+    int32_t paddingSamples = 0;
+
+    AudioParameter param = AudioParameter();
+
+    if (meta->findInt32(kKeySampleRate, &sampleRate)) {
+        param.addInt(String8(AUDIO_OFFLOAD_CODEC_SAMPLE_RATE), sampleRate);
+    }
+    if (meta->findInt32(kKeyChannelMask, &channelMask)) {
+        param.addInt(String8(AUDIO_OFFLOAD_CODEC_NUM_CHANNEL), channelMask);
+    }
+    if (meta->findInt32(kKeyBitRate, &bitRate)) {
+        param.addInt(String8(AUDIO_OFFLOAD_CODEC_AVG_BIT_RATE), bitRate);
+    }
+    if (meta->findInt32(kKeyEncoderDelay, &delaySamples)) {
+        param.addInt(String8(AUDIO_OFFLOAD_CODEC_DELAY_SAMPLES), delaySamples);
+    }
+    if (meta->findInt32(kKeyEncoderPadding, &paddingSamples)) {
+        param.addInt(String8(AUDIO_OFFLOAD_CODEC_PADDING_SAMPLES), paddingSamples);
+    }
+
+    ALOGV("sendMetaDataToHal: bitRate %d, sampleRate %d, chanMask %d,"
+          "delaySample %d, paddingSample %d", bitRate, sampleRate,
+          channelMask, delaySamples, paddingSamples);
+
+    sink->setParameters(param.toString());
+    return OK;
+}
+
+
+struct NuPlayer2::Action : public RefBase {
+    Action() {}
+
+    virtual void execute(NuPlayer2 *player) = 0;
+
+private:
+    DISALLOW_EVIL_CONSTRUCTORS(Action);
+};
+
+struct NuPlayer2::SeekAction : public Action {
+    explicit SeekAction(int64_t seekTimeUs, MediaPlayer2SeekMode mode)
+        : mSeekTimeUs(seekTimeUs),
+          mMode(mode) {
+    }
+
+    virtual void execute(NuPlayer2 *player) {
+        player->performSeek(mSeekTimeUs, mMode);
+    }
+
+private:
+    int64_t mSeekTimeUs;
+    MediaPlayer2SeekMode mMode;
+
+    DISALLOW_EVIL_CONSTRUCTORS(SeekAction);
+};
+
+struct NuPlayer2::ResumeDecoderAction : public Action {
+    explicit ResumeDecoderAction(bool needNotify)
+        : mNeedNotify(needNotify) {
+    }
+
+    virtual void execute(NuPlayer2 *player) {
+        player->performResumeDecoders(mNeedNotify);
+    }
+
+private:
+    bool mNeedNotify;
+
+    DISALLOW_EVIL_CONSTRUCTORS(ResumeDecoderAction);
+};
+
+struct NuPlayer2::SetSurfaceAction : public Action {
+    explicit SetSurfaceAction(const sp<Surface> &surface)
+        : mSurface(surface) {
+    }
+
+    virtual void execute(NuPlayer2 *player) {
+        player->performSetSurface(mSurface);
+    }
+
+private:
+    sp<Surface> mSurface;
+
+    DISALLOW_EVIL_CONSTRUCTORS(SetSurfaceAction);
+};
+
+struct NuPlayer2::FlushDecoderAction : public Action {
+    FlushDecoderAction(FlushCommand audio, FlushCommand video)
+        : mAudio(audio),
+          mVideo(video) {
+    }
+
+    virtual void execute(NuPlayer2 *player) {
+        player->performDecoderFlush(mAudio, mVideo);
+    }
+
+private:
+    FlushCommand mAudio;
+    FlushCommand mVideo;
+
+    DISALLOW_EVIL_CONSTRUCTORS(FlushDecoderAction);
+};
+
+struct NuPlayer2::PostMessageAction : public Action {
+    explicit PostMessageAction(const sp<AMessage> &msg)
+        : mMessage(msg) {
+    }
+
+    virtual void execute(NuPlayer2 *) {
+        mMessage->post();
+    }
+
+private:
+    sp<AMessage> mMessage;
+
+    DISALLOW_EVIL_CONSTRUCTORS(PostMessageAction);
+};
+
+// Use this if there's no state necessary to save in order to execute
+// the action.
+struct NuPlayer2::SimpleAction : public Action {
+    typedef void (NuPlayer2::*ActionFunc)();
+
+    explicit SimpleAction(ActionFunc func)
+        : mFunc(func) {
+    }
+
+    virtual void execute(NuPlayer2 *player) {
+        (player->*mFunc)();
+    }
+
+private:
+    ActionFunc mFunc;
+
+    DISALLOW_EVIL_CONSTRUCTORS(SimpleAction);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+NuPlayer2::NuPlayer2(pid_t pid, const sp<MediaClock> &mediaClock)
+    : mUIDValid(false),
+      mPID(pid),
+      mMediaClock(mediaClock),
+      mSourceFlags(0),
+      mOffloadAudio(false),
+      mAudioDecoderGeneration(0),
+      mVideoDecoderGeneration(0),
+      mRendererGeneration(0),
+      mLastStartedPlayingTimeNs(0),
+      mPreviousSeekTimeUs(0),
+      mAudioEOS(false),
+      mVideoEOS(false),
+      mScanSourcesPending(false),
+      mScanSourcesGeneration(0),
+      mPollDurationGeneration(0),
+      mTimedTextGeneration(0),
+      mFlushingAudio(NONE),
+      mFlushingVideo(NONE),
+      mResumePending(false),
+      mVideoScalingMode(NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW),
+      mPlaybackSettings(AUDIO_PLAYBACK_RATE_DEFAULT),
+      mVideoFpsHint(-1.f),
+      mStarted(false),
+      mPrepared(false),
+      mResetting(false),
+      mSourceStarted(false),
+      mAudioDecoderError(false),
+      mVideoDecoderError(false),
+      mPaused(false),
+      mPausedByClient(true),
+      mPausedForBuffering(false),
+      mIsDrmProtected(false),
+      mDataSourceType(DATA_SOURCE_TYPE_NONE) {
+    CHECK(mediaClock != NULL);
+    clearFlushComplete();
+}
+
+NuPlayer2::~NuPlayer2() {
+}
+
+void NuPlayer2::setUID(uid_t uid) {
+    mUIDValid = true;
+    mUID = uid;
+}
+
+void NuPlayer2::setDriver(const wp<NuPlayer2Driver> &driver) {
+    mDriver = driver;
+}
+
+void NuPlayer2::setDataSourceAsync(const sp<IStreamSource> &source) {
+    sp<AMessage> msg = new AMessage(kWhatSetDataSource, this);
+
+    sp<AMessage> notify = new AMessage(kWhatSourceNotify, this);
+
+    msg->setObject("source", new StreamingSource(notify, source));
+    msg->post();
+    mDataSourceType = DATA_SOURCE_TYPE_STREAM;
+}
+
+static bool IsHTTPLiveURL(const char *url) {
+    if (!strncasecmp("http://", url, 7)
+            || !strncasecmp("https://", url, 8)
+            || !strncasecmp("file://", url, 7)) {
+        size_t len = strlen(url);
+        if (len >= 5 && !strcasecmp(".m3u8", &url[len - 5])) {
+            return true;
+        }
+
+        if (strstr(url,"m3u8")) {
+            return true;
+        }
+    }
+
+    return false;
+}
+
+void NuPlayer2::setDataSourceAsync(
+        const sp<MediaHTTPService> &httpService,
+        const char *url,
+        const KeyedVector<String8, String8> *headers) {
+
+    sp<AMessage> msg = new AMessage(kWhatSetDataSource, this);
+    size_t len = strlen(url);
+
+    sp<AMessage> notify = new AMessage(kWhatSourceNotify, this);
+
+    sp<Source> source;
+    if (IsHTTPLiveURL(url)) {
+        source = new HTTPLiveSource(notify, httpService, url, headers);
+        ALOGV("setDataSourceAsync HTTPLiveSource %s", url);
+        mDataSourceType = DATA_SOURCE_TYPE_HTTP_LIVE;
+    } else if (!strncasecmp(url, "rtsp://", 7)) {
+        source = new RTSPSource(
+                notify, httpService, url, headers, mUIDValid, mUID);
+        ALOGV("setDataSourceAsync RTSPSource %s", url);
+        mDataSourceType = DATA_SOURCE_TYPE_RTSP;
+    } else if ((!strncasecmp(url, "http://", 7)
+                || !strncasecmp(url, "https://", 8))
+                    && ((len >= 4 && !strcasecmp(".sdp", &url[len - 4]))
+                    || strstr(url, ".sdp?"))) {
+        source = new RTSPSource(
+                notify, httpService, url, headers, mUIDValid, mUID, true);
+        ALOGV("setDataSourceAsync RTSPSource http/https/.sdp %s", url);
+        mDataSourceType = DATA_SOURCE_TYPE_RTSP;
+    } else {
+        ALOGV("setDataSourceAsync GenericSource %s", url);
+
+        sp<GenericSource> genericSource =
+                new GenericSource(notify, mUIDValid, mUID, mMediaClock);
+
+        status_t err = genericSource->setDataSource(httpService, url, headers);
+
+        if (err == OK) {
+            source = genericSource;
+        } else {
+            ALOGE("Failed to set data source!");
+        }
+
+        // regardless of success/failure
+        mDataSourceType = DATA_SOURCE_TYPE_GENERIC_URL;
+    }
+    msg->setObject("source", source);
+    msg->post();
+}
+
+void NuPlayer2::setDataSourceAsync(int fd, int64_t offset, int64_t length) {
+    sp<AMessage> msg = new AMessage(kWhatSetDataSource, this);
+
+    sp<AMessage> notify = new AMessage(kWhatSourceNotify, this);
+
+    sp<GenericSource> source =
+            new GenericSource(notify, mUIDValid, mUID, mMediaClock);
+
+    ALOGV("setDataSourceAsync fd %d/%lld/%lld source: %p",
+            fd, (long long)offset, (long long)length, source.get());
+
+    status_t err = source->setDataSource(fd, offset, length);
+
+    if (err != OK) {
+        ALOGE("Failed to set data source!");
+        source = NULL;
+    }
+
+    msg->setObject("source", source);
+    msg->post();
+    mDataSourceType = DATA_SOURCE_TYPE_GENERIC_FD;
+}
+
+void NuPlayer2::setDataSourceAsync(const sp<DataSource> &dataSource) {
+    sp<AMessage> msg = new AMessage(kWhatSetDataSource, this);
+    sp<AMessage> notify = new AMessage(kWhatSourceNotify, this);
+
+    sp<GenericSource> source = new GenericSource(notify, mUIDValid, mUID, mMediaClock);
+    status_t err = source->setDataSource(dataSource);
+
+    if (err != OK) {
+        ALOGE("Failed to set data source!");
+        source = NULL;
+    }
+
+    msg->setObject("source", source);
+    msg->post();
+    mDataSourceType = DATA_SOURCE_TYPE_MEDIA;
+}
+
+status_t NuPlayer2::getBufferingSettings(
+        BufferingSettings *buffering /* nonnull */) {
+    sp<AMessage> msg = new AMessage(kWhatGetBufferingSettings, this);
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+    if (err == OK && response != NULL) {
+        CHECK(response->findInt32("err", &err));
+        if (err == OK) {
+            readFromAMessage(response, buffering);
+        }
+    }
+    return err;
+}
+
+status_t NuPlayer2::setBufferingSettings(const BufferingSettings& buffering) {
+    sp<AMessage> msg = new AMessage(kWhatSetBufferingSettings, this);
+    writeToAMessage(msg, buffering);
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+    if (err == OK && response != NULL) {
+        CHECK(response->findInt32("err", &err));
+    }
+    return err;
+}
+
+void NuPlayer2::prepareAsync() {
+    ALOGV("prepareAsync");
+
+    (new AMessage(kWhatPrepare, this))->post();
+}
+
+void NuPlayer2::setVideoSurfaceTextureAsync(
+        const sp<IGraphicBufferProducer> &bufferProducer) {
+    sp<AMessage> msg = new AMessage(kWhatSetVideoSurface, this);
+
+    if (bufferProducer == NULL) {
+        msg->setObject("surface", NULL);
+    } else {
+        msg->setObject("surface", new Surface(bufferProducer, true /* controlledByApp */));
+    }
+
+    msg->post();
+}
+
+void NuPlayer2::setAudioSink(const sp<MediaPlayer2Base::AudioSink> &sink) {
+    sp<AMessage> msg = new AMessage(kWhatSetAudioSink, this);
+    msg->setObject("sink", sink);
+    msg->post();
+}
+
+void NuPlayer2::start() {
+    (new AMessage(kWhatStart, this))->post();
+}
+
+status_t NuPlayer2::setPlaybackSettings(const AudioPlaybackRate &rate) {
+    // do some cursory validation of the settings here. audio modes are
+    // only validated when set on the audiosink.
+     if ((rate.mSpeed != 0.f && rate.mSpeed < AUDIO_TIMESTRETCH_SPEED_MIN)
+            || rate.mSpeed > AUDIO_TIMESTRETCH_SPEED_MAX
+            || rate.mPitch < AUDIO_TIMESTRETCH_SPEED_MIN
+            || rate.mPitch > AUDIO_TIMESTRETCH_SPEED_MAX) {
+        return BAD_VALUE;
+    }
+    sp<AMessage> msg = new AMessage(kWhatConfigPlayback, this);
+    writeToAMessage(msg, rate);
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+    if (err == OK && response != NULL) {
+        CHECK(response->findInt32("err", &err));
+    }
+    return err;
+}
+
+status_t NuPlayer2::getPlaybackSettings(AudioPlaybackRate *rate /* nonnull */) {
+    sp<AMessage> msg = new AMessage(kWhatGetPlaybackSettings, this);
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+    if (err == OK && response != NULL) {
+        CHECK(response->findInt32("err", &err));
+        if (err == OK) {
+            readFromAMessage(response, rate);
+        }
+    }
+    return err;
+}
+
+status_t NuPlayer2::setSyncSettings(const AVSyncSettings &sync, float videoFpsHint) {
+    sp<AMessage> msg = new AMessage(kWhatConfigSync, this);
+    writeToAMessage(msg, sync, videoFpsHint);
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+    if (err == OK && response != NULL) {
+        CHECK(response->findInt32("err", &err));
+    }
+    return err;
+}
+
+status_t NuPlayer2::getSyncSettings(
+        AVSyncSettings *sync /* nonnull */, float *videoFps /* nonnull */) {
+    sp<AMessage> msg = new AMessage(kWhatGetSyncSettings, this);
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+    if (err == OK && response != NULL) {
+        CHECK(response->findInt32("err", &err));
+        if (err == OK) {
+            readFromAMessage(response, sync, videoFps);
+        }
+    }
+    return err;
+}
+
+void NuPlayer2::pause() {
+    (new AMessage(kWhatPause, this))->post();
+}
+
+void NuPlayer2::resetAsync() {
+    sp<Source> source;
+    {
+        Mutex::Autolock autoLock(mSourceLock);
+        source = mSource;
+    }
+
+    if (source != NULL) {
+        // During a reset, the data source might be unresponsive already, we need to
+        // disconnect explicitly so that reads exit promptly.
+        // We can't queue the disconnect request to the looper, as it might be
+        // queued behind a stuck read and never gets processed.
+        // Doing a disconnect outside the looper to allows the pending reads to exit
+        // (either successfully or with error).
+        source->disconnect();
+    }
+
+    (new AMessage(kWhatReset, this))->post();
+}
+
+status_t NuPlayer2::notifyAt(int64_t mediaTimeUs) {
+    sp<AMessage> notify = new AMessage(kWhatNotifyTime, this);
+    notify->setInt64("timerUs", mediaTimeUs);
+    mMediaClock->addTimer(notify, mediaTimeUs);
+    return OK;
+}
+
+void NuPlayer2::seekToAsync(int64_t seekTimeUs, MediaPlayer2SeekMode mode, bool needNotify) {
+    sp<AMessage> msg = new AMessage(kWhatSeek, this);
+    msg->setInt64("seekTimeUs", seekTimeUs);
+    msg->setInt32("mode", mode);
+    msg->setInt32("needNotify", needNotify);
+    msg->post();
+}
+
+
+void NuPlayer2::writeTrackInfo(
+        Parcel* reply, const sp<AMessage>& format) const {
+    if (format == NULL) {
+        ALOGE("NULL format");
+        return;
+    }
+    int32_t trackType;
+    if (!format->findInt32("type", &trackType)) {
+        ALOGE("no track type");
+        return;
+    }
+
+    AString mime;
+    if (!format->findString("mime", &mime)) {
+        // Java MediaPlayer only uses mimetype for subtitle and timedtext tracks.
+        // If we can't find the mimetype here it means that we wouldn't be needing
+        // the mimetype on the Java end. We still write a placeholder mime to keep the
+        // (de)serialization logic simple.
+        if (trackType == MEDIA_TRACK_TYPE_AUDIO) {
+            mime = "audio/";
+        } else if (trackType == MEDIA_TRACK_TYPE_VIDEO) {
+            mime = "video/";
+        } else {
+            ALOGE("unknown track type: %d", trackType);
+            return;
+        }
+    }
+
+    AString lang;
+    if (!format->findString("language", &lang)) {
+        ALOGE("no language");
+        return;
+    }
+
+    reply->writeInt32(2); // write something non-zero
+    reply->writeInt32(trackType);
+    reply->writeString16(String16(mime.c_str()));
+    reply->writeString16(String16(lang.c_str()));
+
+    if (trackType == MEDIA_TRACK_TYPE_SUBTITLE) {
+        int32_t isAuto, isDefault, isForced;
+        CHECK(format->findInt32("auto", &isAuto));
+        CHECK(format->findInt32("default", &isDefault));
+        CHECK(format->findInt32("forced", &isForced));
+
+        reply->writeInt32(isAuto);
+        reply->writeInt32(isDefault);
+        reply->writeInt32(isForced);
+    }
+}
+
+void NuPlayer2::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatSetDataSource:
+        {
+            ALOGV("kWhatSetDataSource");
+
+            CHECK(mSource == NULL);
+
+            status_t err = OK;
+            sp<RefBase> obj;
+            CHECK(msg->findObject("source", &obj));
+            if (obj != NULL) {
+                Mutex::Autolock autoLock(mSourceLock);
+                mSource = static_cast<Source *>(obj.get());
+            } else {
+                err = UNKNOWN_ERROR;
+            }
+
+            CHECK(mDriver != NULL);
+            sp<NuPlayer2Driver> driver = mDriver.promote();
+            if (driver != NULL) {
+                driver->notifySetDataSourceCompleted(err);
+            }
+            break;
+        }
+
+        case kWhatGetBufferingSettings:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            ALOGV("kWhatGetBufferingSettings");
+            BufferingSettings buffering;
+            status_t err = OK;
+            if (mSource != NULL) {
+                err = mSource->getBufferingSettings(&buffering);
+            } else {
+                err = INVALID_OPERATION;
+            }
+            sp<AMessage> response = new AMessage;
+            if (err == OK) {
+                writeToAMessage(response, buffering);
+            }
+            response->setInt32("err", err);
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatSetBufferingSettings:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            ALOGV("kWhatSetBufferingSettings");
+            BufferingSettings buffering;
+            readFromAMessage(msg, &buffering);
+            status_t err = OK;
+            if (mSource != NULL) {
+                err = mSource->setBufferingSettings(buffering);
+            } else {
+                err = INVALID_OPERATION;
+            }
+            sp<AMessage> response = new AMessage;
+            response->setInt32("err", err);
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatPrepare:
+        {
+            ALOGV("onMessageReceived kWhatPrepare");
+
+            mSource->prepareAsync();
+            break;
+        }
+
+        case kWhatGetTrackInfo:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            Parcel* reply;
+            CHECK(msg->findPointer("reply", (void**)&reply));
+
+            size_t inbandTracks = 0;
+            if (mSource != NULL) {
+                inbandTracks = mSource->getTrackCount();
+            }
+
+            size_t ccTracks = 0;
+            if (mCCDecoder != NULL) {
+                ccTracks = mCCDecoder->getTrackCount();
+            }
+
+            // total track count
+            reply->writeInt32(inbandTracks + ccTracks);
+
+            // write inband tracks
+            for (size_t i = 0; i < inbandTracks; ++i) {
+                writeTrackInfo(reply, mSource->getTrackInfo(i));
+            }
+
+            // write CC track
+            for (size_t i = 0; i < ccTracks; ++i) {
+                writeTrackInfo(reply, mCCDecoder->getTrackInfo(i));
+            }
+
+            sp<AMessage> response = new AMessage;
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatGetSelectedTrack:
+        {
+            status_t err = INVALID_OPERATION;
+            if (mSource != NULL) {
+                err = OK;
+
+                int32_t type32;
+                CHECK(msg->findInt32("type", (int32_t*)&type32));
+                media_track_type type = (media_track_type)type32;
+                ssize_t selectedTrack = mSource->getSelectedTrack(type);
+
+                Parcel* reply;
+                CHECK(msg->findPointer("reply", (void**)&reply));
+                reply->writeInt32(selectedTrack);
+            }
+
+            sp<AMessage> response = new AMessage;
+            response->setInt32("err", err);
+
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatSelectTrack:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            size_t trackIndex;
+            int32_t select;
+            int64_t timeUs;
+            CHECK(msg->findSize("trackIndex", &trackIndex));
+            CHECK(msg->findInt32("select", &select));
+            CHECK(msg->findInt64("timeUs", &timeUs));
+
+            status_t err = INVALID_OPERATION;
+
+            size_t inbandTracks = 0;
+            if (mSource != NULL) {
+                inbandTracks = mSource->getTrackCount();
+            }
+            size_t ccTracks = 0;
+            if (mCCDecoder != NULL) {
+                ccTracks = mCCDecoder->getTrackCount();
+            }
+
+            if (trackIndex < inbandTracks) {
+                err = mSource->selectTrack(trackIndex, select, timeUs);
+
+                if (!select && err == OK) {
+                    int32_t type;
+                    sp<AMessage> info = mSource->getTrackInfo(trackIndex);
+                    if (info != NULL
+                            && info->findInt32("type", &type)
+                            && type == MEDIA_TRACK_TYPE_TIMEDTEXT) {
+                        ++mTimedTextGeneration;
+                    }
+                }
+            } else {
+                trackIndex -= inbandTracks;
+
+                if (trackIndex < ccTracks) {
+                    err = mCCDecoder->selectTrack(trackIndex, select);
+                }
+            }
+
+            sp<AMessage> response = new AMessage;
+            response->setInt32("err", err);
+
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatPollDuration:
+        {
+            int32_t generation;
+            CHECK(msg->findInt32("generation", &generation));
+
+            if (generation != mPollDurationGeneration) {
+                // stale
+                break;
+            }
+
+            int64_t durationUs;
+            if (mDriver != NULL && mSource->getDuration(&durationUs) == OK) {
+                sp<NuPlayer2Driver> driver = mDriver.promote();
+                if (driver != NULL) {
+                    driver->notifyDuration(durationUs);
+                }
+            }
+
+            msg->post(1000000ll);  // poll again in a second.
+            break;
+        }
+
+        case kWhatSetVideoSurface:
+        {
+
+            sp<RefBase> obj;
+            CHECK(msg->findObject("surface", &obj));
+            sp<Surface> surface = static_cast<Surface *>(obj.get());
+
+            ALOGD("onSetVideoSurface(%p, %s video decoder)",
+                    surface.get(),
+                    (mSource != NULL && mStarted && mSource->getFormat(false /* audio */) != NULL
+                            && mVideoDecoder != NULL) ? "have" : "no");
+
+            // Need to check mStarted before calling mSource->getFormat because NuPlayer2 might
+            // be in preparing state and it could take long time.
+            // When mStarted is true, mSource must have been set.
+            if (mSource == NULL || !mStarted || mSource->getFormat(false /* audio */) == NULL
+                    // NOTE: mVideoDecoder's mSurface is always non-null
+                    || (mVideoDecoder != NULL && mVideoDecoder->setVideoSurface(surface) == OK)) {
+                performSetSurface(surface);
+                break;
+            }
+
+            mDeferredActions.push_back(
+                    new FlushDecoderAction(
+                            (obj != NULL ? FLUSH_CMD_FLUSH : FLUSH_CMD_NONE) /* audio */,
+                                           FLUSH_CMD_SHUTDOWN /* video */));
+
+            mDeferredActions.push_back(new SetSurfaceAction(surface));
+
+            if (obj != NULL) {
+                if (mStarted) {
+                    // Issue a seek to refresh the video screen only if started otherwise
+                    // the extractor may not yet be started and will assert.
+                    // If the video decoder is not set (perhaps audio only in this case)
+                    // do not perform a seek as it is not needed.
+                    int64_t currentPositionUs = 0;
+                    if (getCurrentPosition(&currentPositionUs) == OK) {
+                        mDeferredActions.push_back(
+                                new SeekAction(currentPositionUs,
+                                        MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC /* mode */));
+                    }
+                }
+
+                // If there is a new surface texture, instantiate decoders
+                // again if possible.
+                mDeferredActions.push_back(
+                        new SimpleAction(&NuPlayer2::performScanSources));
+
+                // After a flush without shutdown, decoder is paused.
+                // Don't resume it until source seek is done, otherwise it could
+                // start pulling stale data too soon.
+                mDeferredActions.push_back(
+                        new ResumeDecoderAction(false /* needNotify */));
+            }
+
+            processDeferredActions();
+            break;
+        }
+
+        case kWhatSetAudioSink:
+        {
+            ALOGV("kWhatSetAudioSink");
+
+            sp<RefBase> obj;
+            CHECK(msg->findObject("sink", &obj));
+
+            mAudioSink = static_cast<MediaPlayer2Base::AudioSink *>(obj.get());
+            break;
+        }
+
+        case kWhatStart:
+        {
+            ALOGV("kWhatStart");
+            if (mStarted) {
+                // do not resume yet if the source is still buffering
+                if (!mPausedForBuffering) {
+                    onResume();
+                }
+            } else {
+                onStart();
+            }
+            mPausedByClient = false;
+            break;
+        }
+
+        case kWhatConfigPlayback:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+            AudioPlaybackRate rate /* sanitized */;
+            readFromAMessage(msg, &rate);
+            status_t err = OK;
+            if (mRenderer != NULL) {
+                // AudioSink allows only 1.f and 0.f for offload mode.
+                // For other speed, switch to non-offload mode.
+                if (mOffloadAudio && ((rate.mSpeed != 0.f && rate.mSpeed != 1.f)
+                        || rate.mPitch != 1.f)) {
+                    int64_t currentPositionUs;
+                    if (getCurrentPosition(&currentPositionUs) != OK) {
+                        currentPositionUs = mPreviousSeekTimeUs;
+                    }
+
+                    // Set mPlaybackSettings so that the new audio decoder can
+                    // be created correctly.
+                    mPlaybackSettings = rate;
+                    if (!mPaused) {
+                        mRenderer->pause();
+                    }
+                    restartAudio(
+                            currentPositionUs, true /* forceNonOffload */,
+                            true /* needsToCreateAudioDecoder */);
+                    if (!mPaused) {
+                        mRenderer->resume();
+                    }
+                }
+
+                err = mRenderer->setPlaybackSettings(rate);
+            }
+            if (err == OK) {
+                if (rate.mSpeed == 0.f) {
+                    onPause();
+                    mPausedByClient = true;
+                    // save all other settings (using non-paused speed)
+                    // so we can restore them on start
+                    AudioPlaybackRate newRate = rate;
+                    newRate.mSpeed = mPlaybackSettings.mSpeed;
+                    mPlaybackSettings = newRate;
+                } else { /* rate.mSpeed != 0.f */
+                    mPlaybackSettings = rate;
+                    if (mStarted) {
+                        // do not resume yet if the source is still buffering
+                        if (!mPausedForBuffering) {
+                            onResume();
+                        }
+                    } else if (mPrepared) {
+                        onStart();
+                    }
+
+                    mPausedByClient = false;
+                }
+            }
+
+            if (mVideoDecoder != NULL) {
+                sp<AMessage> params = new AMessage();
+                params->setFloat("playback-speed", mPlaybackSettings.mSpeed);
+                mVideoDecoder->setParameters(params);
+            }
+
+            sp<AMessage> response = new AMessage;
+            response->setInt32("err", err);
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatGetPlaybackSettings:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+            AudioPlaybackRate rate = mPlaybackSettings;
+            status_t err = OK;
+            if (mRenderer != NULL) {
+                err = mRenderer->getPlaybackSettings(&rate);
+            }
+            if (err == OK) {
+                // get playback settings used by renderer, as it may be
+                // slightly off due to audiosink not taking small changes.
+                mPlaybackSettings = rate;
+                if (mPaused) {
+                    rate.mSpeed = 0.f;
+                }
+            }
+            sp<AMessage> response = new AMessage;
+            if (err == OK) {
+                writeToAMessage(response, rate);
+            }
+            response->setInt32("err", err);
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatConfigSync:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            ALOGV("kWhatConfigSync");
+            AVSyncSettings sync;
+            float videoFpsHint;
+            readFromAMessage(msg, &sync, &videoFpsHint);
+            status_t err = OK;
+            if (mRenderer != NULL) {
+                err = mRenderer->setSyncSettings(sync, videoFpsHint);
+            }
+            if (err == OK) {
+                mSyncSettings = sync;
+                mVideoFpsHint = videoFpsHint;
+            }
+            sp<AMessage> response = new AMessage;
+            response->setInt32("err", err);
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatGetSyncSettings:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+            AVSyncSettings sync = mSyncSettings;
+            float videoFps = mVideoFpsHint;
+            status_t err = OK;
+            if (mRenderer != NULL) {
+                err = mRenderer->getSyncSettings(&sync, &videoFps);
+                if (err == OK) {
+                    mSyncSettings = sync;
+                    mVideoFpsHint = videoFps;
+                }
+            }
+            sp<AMessage> response = new AMessage;
+            if (err == OK) {
+                writeToAMessage(response, sync, videoFps);
+            }
+            response->setInt32("err", err);
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatScanSources:
+        {
+            int32_t generation;
+            CHECK(msg->findInt32("generation", &generation));
+            if (generation != mScanSourcesGeneration) {
+                // Drop obsolete msg.
+                break;
+            }
+
+            mScanSourcesPending = false;
+
+            ALOGV("scanning sources haveAudio=%d, haveVideo=%d",
+                 mAudioDecoder != NULL, mVideoDecoder != NULL);
+
+            bool mHadAnySourcesBefore =
+                (mAudioDecoder != NULL) || (mVideoDecoder != NULL);
+            bool rescan = false;
+
+            // initialize video before audio because successful initialization of
+            // video may change deep buffer mode of audio.
+            if (mSurface != NULL) {
+                if (instantiateDecoder(false, &mVideoDecoder) == -EWOULDBLOCK) {
+                    rescan = true;
+                }
+            }
+
+            // Don't try to re-open audio sink if there's an existing decoder.
+            if (mAudioSink != NULL && mAudioDecoder == NULL) {
+                if (instantiateDecoder(true, &mAudioDecoder) == -EWOULDBLOCK) {
+                    rescan = true;
+                }
+            }
+
+            if (!mHadAnySourcesBefore
+                    && (mAudioDecoder != NULL || mVideoDecoder != NULL)) {
+                // This is the first time we've found anything playable.
+
+                if (mSourceFlags & Source::FLAG_DYNAMIC_DURATION) {
+                    schedulePollDuration();
+                }
+            }
+
+            status_t err;
+            if ((err = mSource->feedMoreTSData()) != OK) {
+                if (mAudioDecoder == NULL && mVideoDecoder == NULL) {
+                    // We're not currently decoding anything (no audio or
+                    // video tracks found) and we just ran out of input data.
+
+                    if (err == ERROR_END_OF_STREAM) {
+                        notifyListener(MEDIA2_PLAYBACK_COMPLETE, 0, 0);
+                    } else {
+                        notifyListener(MEDIA2_ERROR, MEDIA2_ERROR_UNKNOWN, err);
+                    }
+                }
+                break;
+            }
+
+            if (rescan) {
+                msg->post(100000ll);
+                mScanSourcesPending = true;
+            }
+            break;
+        }
+
+        case kWhatVideoNotify:
+        case kWhatAudioNotify:
+        {
+            bool audio = msg->what() == kWhatAudioNotify;
+
+            int32_t currentDecoderGeneration =
+                (audio? mAudioDecoderGeneration : mVideoDecoderGeneration);
+            int32_t requesterGeneration = currentDecoderGeneration - 1;
+            CHECK(msg->findInt32("generation", &requesterGeneration));
+
+            if (requesterGeneration != currentDecoderGeneration) {
+                ALOGV("got message from old %s decoder, generation(%d:%d)",
+                        audio ? "audio" : "video", requesterGeneration,
+                        currentDecoderGeneration);
+                sp<AMessage> reply;
+                if (!(msg->findMessage("reply", &reply))) {
+                    return;
+                }
+
+                reply->setInt32("err", INFO_DISCONTINUITY);
+                reply->post();
+                return;
+            }
+
+            int32_t what;
+            CHECK(msg->findInt32("what", &what));
+
+            if (what == DecoderBase::kWhatInputDiscontinuity) {
+                int32_t formatChange;
+                CHECK(msg->findInt32("formatChange", &formatChange));
+
+                ALOGV("%s discontinuity: formatChange %d",
+                        audio ? "audio" : "video", formatChange);
+
+                if (formatChange) {
+                    mDeferredActions.push_back(
+                            new FlushDecoderAction(
+                                audio ? FLUSH_CMD_SHUTDOWN : FLUSH_CMD_NONE,
+                                audio ? FLUSH_CMD_NONE : FLUSH_CMD_SHUTDOWN));
+                }
+
+                mDeferredActions.push_back(
+                        new SimpleAction(
+                                &NuPlayer2::performScanSources));
+
+                processDeferredActions();
+            } else if (what == DecoderBase::kWhatEOS) {
+                int32_t err;
+                CHECK(msg->findInt32("err", &err));
+
+                if (err == ERROR_END_OF_STREAM) {
+                    ALOGV("got %s decoder EOS", audio ? "audio" : "video");
+                } else {
+                    ALOGV("got %s decoder EOS w/ error %d",
+                         audio ? "audio" : "video",
+                         err);
+                }
+
+                mRenderer->queueEOS(audio, err);
+            } else if (what == DecoderBase::kWhatFlushCompleted) {
+                ALOGV("decoder %s flush completed", audio ? "audio" : "video");
+
+                handleFlushComplete(audio, true /* isDecoder */);
+                finishFlushIfPossible();
+            } else if (what == DecoderBase::kWhatVideoSizeChanged) {
+                sp<AMessage> format;
+                CHECK(msg->findMessage("format", &format));
+
+                sp<AMessage> inputFormat =
+                        mSource->getFormat(false /* audio */);
+
+                setVideoScalingMode(mVideoScalingMode);
+                updateVideoSize(inputFormat, format);
+            } else if (what == DecoderBase::kWhatShutdownCompleted) {
+                ALOGV("%s shutdown completed", audio ? "audio" : "video");
+                if (audio) {
+                    mAudioDecoder.clear();
+                    mAudioDecoderError = false;
+                    ++mAudioDecoderGeneration;
+
+                    CHECK_EQ((int)mFlushingAudio, (int)SHUTTING_DOWN_DECODER);
+                    mFlushingAudio = SHUT_DOWN;
+                } else {
+                    mVideoDecoder.clear();
+                    mVideoDecoderError = false;
+                    ++mVideoDecoderGeneration;
+
+                    CHECK_EQ((int)mFlushingVideo, (int)SHUTTING_DOWN_DECODER);
+                    mFlushingVideo = SHUT_DOWN;
+                }
+
+                finishFlushIfPossible();
+            } else if (what == DecoderBase::kWhatResumeCompleted) {
+                finishResume();
+            } else if (what == DecoderBase::kWhatError) {
+                status_t err;
+                if (!msg->findInt32("err", &err) || err == OK) {
+                    err = UNKNOWN_ERROR;
+                }
+
+                // Decoder errors can be due to Source (e.g. from streaming),
+                // or from decoding corrupted bitstreams, or from other decoder
+                // MediaCodec operations (e.g. from an ongoing reset or seek).
+                // They may also be due to openAudioSink failure at
+                // decoder start or after a format change.
+                //
+                // We try to gracefully shut down the affected decoder if possible,
+                // rather than trying to force the shutdown with something
+                // similar to performReset(). This method can lead to a hang
+                // if MediaCodec functions block after an error, but they should
+                // typically return INVALID_OPERATION instead of blocking.
+
+                FlushStatus *flushing = audio ? &mFlushingAudio : &mFlushingVideo;
+                ALOGE("received error(%#x) from %s decoder, flushing(%d), now shutting down",
+                        err, audio ? "audio" : "video", *flushing);
+
+                switch (*flushing) {
+                    case NONE:
+                        mDeferredActions.push_back(
+                                new FlushDecoderAction(
+                                    audio ? FLUSH_CMD_SHUTDOWN : FLUSH_CMD_NONE,
+                                    audio ? FLUSH_CMD_NONE : FLUSH_CMD_SHUTDOWN));
+                        processDeferredActions();
+                        break;
+                    case FLUSHING_DECODER:
+                        *flushing = FLUSHING_DECODER_SHUTDOWN; // initiate shutdown after flush.
+                        break; // Wait for flush to complete.
+                    case FLUSHING_DECODER_SHUTDOWN:
+                        break; // Wait for flush to complete.
+                    case SHUTTING_DOWN_DECODER:
+                        break; // Wait for shutdown to complete.
+                    case FLUSHED:
+                        getDecoder(audio)->initiateShutdown(); // In the middle of a seek.
+                        *flushing = SHUTTING_DOWN_DECODER;     // Shut down.
+                        break;
+                    case SHUT_DOWN:
+                        finishFlushIfPossible();  // Should not occur.
+                        break;                    // Finish anyways.
+                }
+                if (mSource != nullptr) {
+                    if (audio) {
+                        if (mVideoDecoderError || mSource->getFormat(false /* audio */) == NULL
+                                || mSurface == NULL || mVideoDecoder == NULL) {
+                            // When both audio and video have error, or this stream has only audio
+                            // which has error, notify client of error.
+                            notifyListener(MEDIA2_ERROR, MEDIA2_ERROR_UNKNOWN, err);
+                        } else {
+                            // Only audio track has error. Video track could be still good to play.
+                            notifyListener(MEDIA2_INFO, MEDIA2_INFO_PLAY_AUDIO_ERROR, err);
+                        }
+                        mAudioDecoderError = true;
+                    } else {
+                        if (mAudioDecoderError || mSource->getFormat(true /* audio */) == NULL
+                                || mAudioSink == NULL || mAudioDecoder == NULL) {
+                            // When both audio and video have error, or this stream has only video
+                            // which has error, notify client of error.
+                            notifyListener(MEDIA2_ERROR, MEDIA2_ERROR_UNKNOWN, err);
+                        } else {
+                            // Only video track has error. Audio track could be still good to play.
+                            notifyListener(MEDIA2_INFO, MEDIA2_INFO_PLAY_VIDEO_ERROR, err);
+                        }
+                        mVideoDecoderError = true;
+                    }
+                }
+            } else {
+                ALOGV("Unhandled decoder notification %d '%c%c%c%c'.",
+                      what,
+                      what >> 24,
+                      (what >> 16) & 0xff,
+                      (what >> 8) & 0xff,
+                      what & 0xff);
+            }
+
+            break;
+        }
+
+        case kWhatRendererNotify:
+        {
+            int32_t requesterGeneration = mRendererGeneration - 1;
+            CHECK(msg->findInt32("generation", &requesterGeneration));
+            if (requesterGeneration != mRendererGeneration) {
+                ALOGV("got message from old renderer, generation(%d:%d)",
+                        requesterGeneration, mRendererGeneration);
+                return;
+            }
+
+            int32_t what;
+            CHECK(msg->findInt32("what", &what));
+
+            if (what == Renderer::kWhatEOS) {
+                int32_t audio;
+                CHECK(msg->findInt32("audio", &audio));
+
+                int32_t finalResult;
+                CHECK(msg->findInt32("finalResult", &finalResult));
+
+                if (audio) {
+                    mAudioEOS = true;
+                } else {
+                    mVideoEOS = true;
+                }
+
+                if (finalResult == ERROR_END_OF_STREAM) {
+                    ALOGV("reached %s EOS", audio ? "audio" : "video");
+                } else {
+                    ALOGE("%s track encountered an error (%d)",
+                         audio ? "audio" : "video", finalResult);
+
+                    notifyListener(
+                            MEDIA2_ERROR, MEDIA2_ERROR_UNKNOWN, finalResult);
+                }
+
+                if ((mAudioEOS || mAudioDecoder == NULL)
+                        && (mVideoEOS || mVideoDecoder == NULL)) {
+                    notifyListener(MEDIA2_PLAYBACK_COMPLETE, 0, 0);
+                }
+            } else if (what == Renderer::kWhatFlushComplete) {
+                int32_t audio;
+                CHECK(msg->findInt32("audio", &audio));
+
+                if (audio) {
+                    mAudioEOS = false;
+                } else {
+                    mVideoEOS = false;
+                }
+
+                ALOGV("renderer %s flush completed.", audio ? "audio" : "video");
+                if (audio && (mFlushingAudio == NONE || mFlushingAudio == FLUSHED
+                        || mFlushingAudio == SHUT_DOWN)) {
+                    // Flush has been handled by tear down.
+                    break;
+                }
+                handleFlushComplete(audio, false /* isDecoder */);
+                finishFlushIfPossible();
+            } else if (what == Renderer::kWhatVideoRenderingStart) {
+                notifyListener(MEDIA2_INFO, MEDIA2_INFO_RENDERING_START, 0);
+            } else if (what == Renderer::kWhatMediaRenderingStart) {
+                ALOGV("media rendering started");
+                notifyListener(MEDIA2_STARTED, 0, 0);
+            } else if (what == Renderer::kWhatAudioTearDown) {
+                int32_t reason;
+                CHECK(msg->findInt32("reason", &reason));
+                ALOGV("Tear down audio with reason %d.", reason);
+                if (reason == Renderer::kDueToTimeout && !(mPaused && mOffloadAudio)) {
+                    // TimeoutWhenPaused is only for offload mode.
+                    ALOGW("Receive a stale message for teardown.");
+                    break;
+                }
+                int64_t positionUs;
+                if (!msg->findInt64("positionUs", &positionUs)) {
+                    positionUs = mPreviousSeekTimeUs;
+                }
+
+                restartAudio(
+                        positionUs, reason == Renderer::kForceNonOffload /* forceNonOffload */,
+                        reason != Renderer::kDueToTimeout /* needsToCreateAudioDecoder */);
+            }
+            break;
+        }
+
+        case kWhatMoreDataQueued:
+        {
+            break;
+        }
+
+        case kWhatReset:
+        {
+            ALOGV("kWhatReset");
+
+            mResetting = true;
+            stopPlaybackTimer("kWhatReset");
+            stopRebufferingTimer(true);
+
+            mDeferredActions.push_back(
+                    new FlushDecoderAction(
+                        FLUSH_CMD_SHUTDOWN /* audio */,
+                        FLUSH_CMD_SHUTDOWN /* video */));
+
+            mDeferredActions.push_back(
+                    new SimpleAction(&NuPlayer2::performReset));
+
+            processDeferredActions();
+            break;
+        }
+
+        case kWhatNotifyTime:
+        {
+            ALOGV("kWhatNotifyTime");
+            int64_t timerUs;
+            CHECK(msg->findInt64("timerUs", &timerUs));
+
+            notifyListener(MEDIA2_NOTIFY_TIME, timerUs, 0);
+            break;
+        }
+
+        case kWhatSeek:
+        {
+            int64_t seekTimeUs;
+            int32_t mode;
+            int32_t needNotify;
+            CHECK(msg->findInt64("seekTimeUs", &seekTimeUs));
+            CHECK(msg->findInt32("mode", &mode));
+            CHECK(msg->findInt32("needNotify", &needNotify));
+
+            ALOGV("kWhatSeek seekTimeUs=%lld us, mode=%d, needNotify=%d",
+                    (long long)seekTimeUs, mode, needNotify);
+
+            if (!mStarted) {
+                // Seek before the player is started. In order to preview video,
+                // need to start the player and pause it. This branch is called
+                // only once if needed. After the player is started, any seek
+                // operation will go through normal path.
+                // Audio-only cases are handled separately.
+                onStart(seekTimeUs, (MediaPlayer2SeekMode)mode);
+                if (mStarted) {
+                    onPause();
+                    mPausedByClient = true;
+                }
+                if (needNotify) {
+                    notifyDriverSeekComplete();
+                }
+                break;
+            }
+
+            mDeferredActions.push_back(
+                    new FlushDecoderAction(FLUSH_CMD_FLUSH /* audio */,
+                                           FLUSH_CMD_FLUSH /* video */));
+
+            mDeferredActions.push_back(
+                    new SeekAction(seekTimeUs, (MediaPlayer2SeekMode)mode));
+
+            // After a flush without shutdown, decoder is paused.
+            // Don't resume it until source seek is done, otherwise it could
+            // start pulling stale data too soon.
+            mDeferredActions.push_back(
+                    new ResumeDecoderAction(needNotify));
+
+            processDeferredActions();
+            break;
+        }
+
+        case kWhatPause:
+        {
+            onPause();
+            mPausedByClient = true;
+            break;
+        }
+
+        case kWhatSourceNotify:
+        {
+            onSourceNotify(msg);
+            break;
+        }
+
+        case kWhatClosedCaptionNotify:
+        {
+            onClosedCaptionNotify(msg);
+            break;
+        }
+
+        case kWhatPrepareDrm:
+        {
+            status_t status = onPrepareDrm(msg);
+
+            sp<AMessage> response = new AMessage;
+            response->setInt32("status", status);
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatReleaseDrm:
+        {
+            status_t status = onReleaseDrm();
+
+            sp<AMessage> response = new AMessage;
+            response->setInt32("status", status);
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+            response->postReply(replyID);
+            break;
+        }
+
+        default:
+            TRESPASS();
+            break;
+    }
+}
+
+void NuPlayer2::onResume() {
+    if (!mPaused || mResetting) {
+        ALOGD_IF(mResetting, "resetting, onResume discarded");
+        return;
+    }
+    mPaused = false;
+    if (mSource != NULL) {
+        mSource->resume();
+    } else {
+        ALOGW("resume called when source is gone or not set");
+    }
+    // |mAudioDecoder| may have been released due to the pause timeout, so re-create it if
+    // needed.
+    if (audioDecoderStillNeeded() && mAudioDecoder == NULL) {
+        instantiateDecoder(true /* audio */, &mAudioDecoder);
+    }
+    if (mRenderer != NULL) {
+        mRenderer->resume();
+    } else {
+        ALOGW("resume called when renderer is gone or not set");
+    }
+
+    startPlaybackTimer("onresume");
+}
+
+status_t NuPlayer2::onInstantiateSecureDecoders() {
+    status_t err;
+    if (!(mSourceFlags & Source::FLAG_SECURE)) {
+        return BAD_TYPE;
+    }
+
+    if (mRenderer != NULL) {
+        ALOGE("renderer should not be set when instantiating secure decoders");
+        return UNKNOWN_ERROR;
+    }
+
+    // TRICKY: We rely on mRenderer being null, so that decoder does not start requesting
+    // data on instantiation.
+    if (mSurface != NULL) {
+        err = instantiateDecoder(false, &mVideoDecoder);
+        if (err != OK) {
+            return err;
+        }
+    }
+
+    if (mAudioSink != NULL) {
+        err = instantiateDecoder(true, &mAudioDecoder);
+        if (err != OK) {
+            return err;
+        }
+    }
+    return OK;
+}
+
+void NuPlayer2::onStart(int64_t startPositionUs, MediaPlayer2SeekMode mode) {
+    ALOGV("onStart: mCrypto: %p", mCrypto.get());
+
+    if (!mSourceStarted) {
+        mSourceStarted = true;
+        mSource->start();
+    }
+    if (startPositionUs > 0) {
+        performSeek(startPositionUs, mode);
+        if (mSource->getFormat(false /* audio */) == NULL) {
+            return;
+        }
+    }
+
+    mOffloadAudio = false;
+    mAudioEOS = false;
+    mVideoEOS = false;
+    mStarted = true;
+    mPaused = false;
+
+    uint32_t flags = 0;
+
+    if (mSource->isRealTime()) {
+        flags |= Renderer::FLAG_REAL_TIME;
+    }
+
+    bool hasAudio = (mSource->getFormat(true /* audio */) != NULL);
+    bool hasVideo = (mSource->getFormat(false /* audio */) != NULL);
+    if (!hasAudio && !hasVideo) {
+        ALOGE("no metadata for either audio or video source");
+        mSource->stop();
+        mSourceStarted = false;
+        notifyListener(MEDIA2_ERROR, MEDIA2_ERROR_UNKNOWN, ERROR_MALFORMED);
+        return;
+    }
+    ALOGV_IF(!hasAudio, "no metadata for audio source");  // video only stream
+
+    sp<MetaData> audioMeta = mSource->getFormatMeta(true /* audio */);
+
+    audio_stream_type_t streamType = AUDIO_STREAM_MUSIC;
+    if (mAudioSink != NULL) {
+        streamType = mAudioSink->getAudioStreamType();
+    }
+
+    mOffloadAudio =
+        canOffloadStream(audioMeta, hasVideo, mSource->isStreaming(), streamType)
+                && (mPlaybackSettings.mSpeed == 1.f && mPlaybackSettings.mPitch == 1.f);
+
+    // Modular DRM: Disabling audio offload if the source is protected
+    if (mOffloadAudio && mIsDrmProtected) {
+        mOffloadAudio = false;
+        ALOGV("onStart: Disabling mOffloadAudio now that the source is protected.");
+    }
+
+    if (mOffloadAudio) {
+        flags |= Renderer::FLAG_OFFLOAD_AUDIO;
+    }
+
+    sp<AMessage> notify = new AMessage(kWhatRendererNotify, this);
+    ++mRendererGeneration;
+    notify->setInt32("generation", mRendererGeneration);
+    mRenderer = new Renderer(mAudioSink, mMediaClock, notify, flags);
+    mRendererLooper = new ALooper;
+    mRendererLooper->setName("NuPlayerRenderer");
+    mRendererLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+    mRendererLooper->registerHandler(mRenderer);
+
+    status_t err = mRenderer->setPlaybackSettings(mPlaybackSettings);
+    if (err != OK) {
+        mSource->stop();
+        mSourceStarted = false;
+        notifyListener(MEDIA2_ERROR, MEDIA2_ERROR_UNKNOWN, err);
+        return;
+    }
+
+    float rate = getFrameRate();
+    if (rate > 0) {
+        mRenderer->setVideoFrameRate(rate);
+    }
+
+    if (mVideoDecoder != NULL) {
+        mVideoDecoder->setRenderer(mRenderer);
+    }
+    if (mAudioDecoder != NULL) {
+        mAudioDecoder->setRenderer(mRenderer);
+    }
+
+    startPlaybackTimer("onstart");
+
+    postScanSources();
+}
+
+void NuPlayer2::startPlaybackTimer(const char *where) {
+    Mutex::Autolock autoLock(mPlayingTimeLock);
+    if (mLastStartedPlayingTimeNs == 0) {
+        mLastStartedPlayingTimeNs = systemTime();
+        ALOGV("startPlaybackTimer() time %20" PRId64 " (%s)",  mLastStartedPlayingTimeNs, where);
+    }
+}
+
+void NuPlayer2::stopPlaybackTimer(const char *where) {
+    Mutex::Autolock autoLock(mPlayingTimeLock);
+
+    ALOGV("stopPlaybackTimer()  time %20" PRId64 " (%s)", mLastStartedPlayingTimeNs, where);
+
+    if (mLastStartedPlayingTimeNs != 0) {
+        sp<NuPlayer2Driver> driver = mDriver.promote();
+        if (driver != NULL) {
+            int64_t now = systemTime();
+            int64_t played = now - mLastStartedPlayingTimeNs;
+            ALOGV("stopPlaybackTimer()  log  %20" PRId64 "", played);
+
+            if (played > 0) {
+                driver->notifyMorePlayingTimeUs((played+500)/1000);
+            }
+        }
+        mLastStartedPlayingTimeNs = 0;
+    }
+}
+
+void NuPlayer2::startRebufferingTimer() {
+    Mutex::Autolock autoLock(mPlayingTimeLock);
+    if (mLastStartedRebufferingTimeNs == 0) {
+        mLastStartedRebufferingTimeNs = systemTime();
+        ALOGV("startRebufferingTimer() time %20" PRId64 "",  mLastStartedRebufferingTimeNs);
+    }
+}
+
+void NuPlayer2::stopRebufferingTimer(bool exitingPlayback) {
+    Mutex::Autolock autoLock(mPlayingTimeLock);
+
+    ALOGV("stopRebufferTimer()  time %20" PRId64 " (exiting %d)", mLastStartedRebufferingTimeNs, exitingPlayback);
+
+    if (mLastStartedRebufferingTimeNs != 0) {
+        sp<NuPlayer2Driver> driver = mDriver.promote();
+        if (driver != NULL) {
+            int64_t now = systemTime();
+            int64_t rebuffered = now - mLastStartedRebufferingTimeNs;
+            ALOGV("stopRebufferingTimer()  log  %20" PRId64 "", rebuffered);
+
+            if (rebuffered > 0) {
+                driver->notifyMoreRebufferingTimeUs((rebuffered+500)/1000);
+                if (exitingPlayback) {
+                    driver->notifyRebufferingWhenExit(true);
+                }
+            }
+        }
+        mLastStartedRebufferingTimeNs = 0;
+    }
+}
+
+void NuPlayer2::onPause() {
+
+    stopPlaybackTimer("onPause");
+
+    if (mPaused) {
+        return;
+    }
+    mPaused = true;
+    if (mSource != NULL) {
+        mSource->pause();
+    } else {
+        ALOGW("pause called when source is gone or not set");
+    }
+    if (mRenderer != NULL) {
+        mRenderer->pause();
+    } else {
+        ALOGW("pause called when renderer is gone or not set");
+    }
+
+}
+
+bool NuPlayer2::audioDecoderStillNeeded() {
+    // Audio decoder is no longer needed if it's in shut/shutting down status.
+    return ((mFlushingAudio != SHUT_DOWN) && (mFlushingAudio != SHUTTING_DOWN_DECODER));
+}
+
+void NuPlayer2::handleFlushComplete(bool audio, bool isDecoder) {
+    // We wait for both the decoder flush and the renderer flush to complete
+    // before entering either the FLUSHED or the SHUTTING_DOWN_DECODER state.
+
+    mFlushComplete[audio][isDecoder] = true;
+    if (!mFlushComplete[audio][!isDecoder]) {
+        return;
+    }
+
+    FlushStatus *state = audio ? &mFlushingAudio : &mFlushingVideo;
+    switch (*state) {
+        case FLUSHING_DECODER:
+        {
+            *state = FLUSHED;
+            break;
+        }
+
+        case FLUSHING_DECODER_SHUTDOWN:
+        {
+            *state = SHUTTING_DOWN_DECODER;
+
+            ALOGV("initiating %s decoder shutdown", audio ? "audio" : "video");
+            getDecoder(audio)->initiateShutdown();
+            break;
+        }
+
+        default:
+            // decoder flush completes only occur in a flushing state.
+            LOG_ALWAYS_FATAL_IF(isDecoder, "decoder flush in invalid state %d", *state);
+            break;
+    }
+}
+
+void NuPlayer2::finishFlushIfPossible() {
+    if (mFlushingAudio != NONE && mFlushingAudio != FLUSHED
+            && mFlushingAudio != SHUT_DOWN) {
+        return;
+    }
+
+    if (mFlushingVideo != NONE && mFlushingVideo != FLUSHED
+            && mFlushingVideo != SHUT_DOWN) {
+        return;
+    }
+
+    ALOGV("both audio and video are flushed now.");
+
+    mFlushingAudio = NONE;
+    mFlushingVideo = NONE;
+
+    clearFlushComplete();
+
+    processDeferredActions();
+}
+
+void NuPlayer2::postScanSources() {
+    if (mScanSourcesPending) {
+        return;
+    }
+
+    sp<AMessage> msg = new AMessage(kWhatScanSources, this);
+    msg->setInt32("generation", mScanSourcesGeneration);
+    msg->post();
+
+    mScanSourcesPending = true;
+}
+
+void NuPlayer2::tryOpenAudioSinkForOffload(
+        const sp<AMessage> &format, const sp<MetaData> &audioMeta, bool hasVideo) {
+    // Note: This is called early in NuPlayer2 to determine whether offloading
+    // is possible; otherwise the decoders call the renderer openAudioSink directly.
+
+    status_t err = mRenderer->openAudioSink(
+            format, true /* offloadOnly */, hasVideo,
+            AUDIO_OUTPUT_FLAG_NONE, &mOffloadAudio, mSource->isStreaming());
+    if (err != OK) {
+        // Any failure we turn off mOffloadAudio.
+        mOffloadAudio = false;
+    } else if (mOffloadAudio) {
+        sendMetaDataToHal(mAudioSink, audioMeta);
+    }
+}
+
+void NuPlayer2::closeAudioSink() {
+    mRenderer->closeAudioSink();
+}
+
+void NuPlayer2::restartAudio(
+        int64_t currentPositionUs, bool forceNonOffload, bool needsToCreateAudioDecoder) {
+    if (mAudioDecoder != NULL) {
+        mAudioDecoder->pause();
+        mAudioDecoder.clear();
+        mAudioDecoderError = false;
+        ++mAudioDecoderGeneration;
+    }
+    if (mFlushingAudio == FLUSHING_DECODER) {
+        mFlushComplete[1 /* audio */][1 /* isDecoder */] = true;
+        mFlushingAudio = FLUSHED;
+        finishFlushIfPossible();
+    } else if (mFlushingAudio == FLUSHING_DECODER_SHUTDOWN
+            || mFlushingAudio == SHUTTING_DOWN_DECODER) {
+        mFlushComplete[1 /* audio */][1 /* isDecoder */] = true;
+        mFlushingAudio = SHUT_DOWN;
+        finishFlushIfPossible();
+        needsToCreateAudioDecoder = false;
+    }
+    if (mRenderer == NULL) {
+        return;
+    }
+    closeAudioSink();
+    mRenderer->flush(true /* audio */, false /* notifyComplete */);
+    if (mVideoDecoder != NULL) {
+        mRenderer->flush(false /* audio */, false /* notifyComplete */);
+    }
+
+    performSeek(currentPositionUs, MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC /* mode */);
+
+    if (forceNonOffload) {
+        mRenderer->signalDisableOffloadAudio();
+        mOffloadAudio = false;
+    }
+    if (needsToCreateAudioDecoder) {
+        instantiateDecoder(true /* audio */, &mAudioDecoder, !forceNonOffload);
+    }
+}
+
+void NuPlayer2::determineAudioModeChange(const sp<AMessage> &audioFormat) {
+    if (mSource == NULL || mAudioSink == NULL) {
+        return;
+    }
+
+    if (mRenderer == NULL) {
+        ALOGW("No renderer can be used to determine audio mode. Use non-offload for safety.");
+        mOffloadAudio = false;
+        return;
+    }
+
+    sp<MetaData> audioMeta = mSource->getFormatMeta(true /* audio */);
+    sp<AMessage> videoFormat = mSource->getFormat(false /* audio */);
+    audio_stream_type_t streamType = mAudioSink->getAudioStreamType();
+    const bool hasVideo = (videoFormat != NULL);
+    bool canOffload = canOffloadStream(
+            audioMeta, hasVideo, mSource->isStreaming(), streamType)
+                    && (mPlaybackSettings.mSpeed == 1.f && mPlaybackSettings.mPitch == 1.f);
+
+    // Modular DRM: Disabling audio offload if the source is protected
+    if (canOffload && mIsDrmProtected) {
+        canOffload = false;
+        ALOGV("determineAudioModeChange: Disabling mOffloadAudio b/c the source is protected.");
+    }
+
+    if (canOffload) {
+        if (!mOffloadAudio) {
+            mRenderer->signalEnableOffloadAudio();
+        }
+        // open audio sink early under offload mode.
+        tryOpenAudioSinkForOffload(audioFormat, audioMeta, hasVideo);
+    } else {
+        if (mOffloadAudio) {
+            mRenderer->signalDisableOffloadAudio();
+            mOffloadAudio = false;
+        }
+    }
+}
+
+status_t NuPlayer2::instantiateDecoder(
+        bool audio, sp<DecoderBase> *decoder, bool checkAudioModeChange) {
+    // The audio decoder could be cleared by tear down. If still in shut down
+    // process, no need to create a new audio decoder.
+    if (*decoder != NULL || (audio && mFlushingAudio == SHUT_DOWN)) {
+        return OK;
+    }
+
+    sp<AMessage> format = mSource->getFormat(audio);
+
+    if (format == NULL) {
+        return UNKNOWN_ERROR;
+    } else {
+        status_t err;
+        if (format->findInt32("err", &err) && err) {
+            return err;
+        }
+    }
+
+    format->setInt32("priority", 0 /* realtime */);
+
+    if (!audio) {
+        AString mime;
+        CHECK(format->findString("mime", &mime));
+
+        sp<AMessage> ccNotify = new AMessage(kWhatClosedCaptionNotify, this);
+        if (mCCDecoder == NULL) {
+            mCCDecoder = new CCDecoder(ccNotify);
+        }
+
+        if (mSourceFlags & Source::FLAG_SECURE) {
+            format->setInt32("secure", true);
+        }
+
+        if (mSourceFlags & Source::FLAG_PROTECTED) {
+            format->setInt32("protected", true);
+        }
+
+        float rate = getFrameRate();
+        if (rate > 0) {
+            format->setFloat("operating-rate", rate * mPlaybackSettings.mSpeed);
+        }
+    }
+
+    if (audio) {
+        sp<AMessage> notify = new AMessage(kWhatAudioNotify, this);
+        ++mAudioDecoderGeneration;
+        notify->setInt32("generation", mAudioDecoderGeneration);
+
+        if (checkAudioModeChange) {
+            determineAudioModeChange(format);
+        }
+        if (mOffloadAudio) {
+            mSource->setOffloadAudio(true /* offload */);
+
+            const bool hasVideo = (mSource->getFormat(false /*audio */) != NULL);
+            format->setInt32("has-video", hasVideo);
+            *decoder = new DecoderPassThrough(notify, mSource, mRenderer);
+            ALOGV("instantiateDecoder audio DecoderPassThrough  hasVideo: %d", hasVideo);
+        } else {
+            mSource->setOffloadAudio(false /* offload */);
+
+            *decoder = new Decoder(notify, mSource, mPID, mUID, mRenderer);
+            ALOGV("instantiateDecoder audio Decoder");
+        }
+        mAudioDecoderError = false;
+    } else {
+        sp<AMessage> notify = new AMessage(kWhatVideoNotify, this);
+        ++mVideoDecoderGeneration;
+        notify->setInt32("generation", mVideoDecoderGeneration);
+
+        *decoder = new Decoder(
+                notify, mSource, mPID, mUID, mRenderer, mSurface, mCCDecoder);
+        mVideoDecoderError = false;
+
+        // enable FRC if high-quality AV sync is requested, even if not
+        // directly queuing to display, as this will even improve textureview
+        // playback.
+        {
+            if (property_get_bool("persist.sys.media.avsync", false)) {
+                format->setInt32("auto-frc", 1);
+            }
+        }
+    }
+    (*decoder)->init();
+
+    // Modular DRM
+    if (mIsDrmProtected) {
+        format->setObject("crypto", mCrypto);
+        ALOGV("instantiateDecoder: mCrypto: %p isSecure: %d", mCrypto.get(),
+                (mSourceFlags & Source::FLAG_SECURE) != 0);
+    }
+
+    (*decoder)->configure(format);
+
+    if (!audio) {
+        sp<AMessage> params = new AMessage();
+        float rate = getFrameRate();
+        if (rate > 0) {
+            params->setFloat("frame-rate-total", rate);
+        }
+
+        sp<MetaData> fileMeta = getFileMeta();
+        if (fileMeta != NULL) {
+            int32_t videoTemporalLayerCount;
+            if (fileMeta->findInt32(kKeyTemporalLayerCount, &videoTemporalLayerCount)
+                    && videoTemporalLayerCount > 0) {
+                params->setInt32("temporal-layer-count", videoTemporalLayerCount);
+            }
+        }
+
+        if (params->countEntries() > 0) {
+            (*decoder)->setParameters(params);
+        }
+    }
+    return OK;
+}
+
+void NuPlayer2::updateVideoSize(
+        const sp<AMessage> &inputFormat,
+        const sp<AMessage> &outputFormat) {
+    if (inputFormat == NULL) {
+        ALOGW("Unknown video size, reporting 0x0!");
+        notifyListener(MEDIA2_SET_VIDEO_SIZE, 0, 0);
+        return;
+    }
+    int32_t err = OK;
+    inputFormat->findInt32("err", &err);
+    if (err == -EWOULDBLOCK) {
+        ALOGW("Video meta is not available yet!");
+        return;
+    }
+    if (err != OK) {
+        ALOGW("Something is wrong with video meta!");
+        return;
+    }
+
+    int32_t displayWidth, displayHeight;
+    if (outputFormat != NULL) {
+        int32_t width, height;
+        CHECK(outputFormat->findInt32("width", &width));
+        CHECK(outputFormat->findInt32("height", &height));
+
+        int32_t cropLeft, cropTop, cropRight, cropBottom;
+        CHECK(outputFormat->findRect(
+                    "crop",
+                    &cropLeft, &cropTop, &cropRight, &cropBottom));
+
+        displayWidth = cropRight - cropLeft + 1;
+        displayHeight = cropBottom - cropTop + 1;
+
+        ALOGV("Video output format changed to %d x %d "
+             "(crop: %d x %d @ (%d, %d))",
+             width, height,
+             displayWidth,
+             displayHeight,
+             cropLeft, cropTop);
+    } else {
+        CHECK(inputFormat->findInt32("width", &displayWidth));
+        CHECK(inputFormat->findInt32("height", &displayHeight));
+
+        ALOGV("Video input format %d x %d", displayWidth, displayHeight);
+    }
+
+    // Take into account sample aspect ratio if necessary:
+    int32_t sarWidth, sarHeight;
+    if (inputFormat->findInt32("sar-width", &sarWidth)
+            && inputFormat->findInt32("sar-height", &sarHeight)
+            && sarWidth > 0 && sarHeight > 0) {
+        ALOGV("Sample aspect ratio %d : %d", sarWidth, sarHeight);
+
+        displayWidth = (displayWidth * sarWidth) / sarHeight;
+
+        ALOGV("display dimensions %d x %d", displayWidth, displayHeight);
+    } else {
+        int32_t width, height;
+        if (inputFormat->findInt32("display-width", &width)
+                && inputFormat->findInt32("display-height", &height)
+                && width > 0 && height > 0
+                && displayWidth > 0 && displayHeight > 0) {
+            if (displayHeight * (int64_t)width / height > (int64_t)displayWidth) {
+                displayHeight = (int32_t)(displayWidth * (int64_t)height / width);
+            } else {
+                displayWidth = (int32_t)(displayHeight * (int64_t)width / height);
+            }
+            ALOGV("Video display width and height are overridden to %d x %d",
+                 displayWidth, displayHeight);
+        }
+    }
+
+    int32_t rotationDegrees;
+    if (!inputFormat->findInt32("rotation-degrees", &rotationDegrees)) {
+        rotationDegrees = 0;
+    }
+
+    if (rotationDegrees == 90 || rotationDegrees == 270) {
+        int32_t tmp = displayWidth;
+        displayWidth = displayHeight;
+        displayHeight = tmp;
+    }
+
+    notifyListener(
+            MEDIA2_SET_VIDEO_SIZE,
+            displayWidth,
+            displayHeight);
+}
+
+void NuPlayer2::notifyListener(int msg, int ext1, int ext2, const Parcel *in) {
+    if (mDriver == NULL) {
+        return;
+    }
+
+    sp<NuPlayer2Driver> driver = mDriver.promote();
+
+    if (driver == NULL) {
+        return;
+    }
+
+    driver->notifyListener(msg, ext1, ext2, in);
+}
+
+void NuPlayer2::flushDecoder(bool audio, bool needShutdown) {
+    ALOGV("[%s] flushDecoder needShutdown=%d",
+          audio ? "audio" : "video", needShutdown);
+
+    const sp<DecoderBase> &decoder = getDecoder(audio);
+    if (decoder == NULL) {
+        ALOGI("flushDecoder %s without decoder present",
+             audio ? "audio" : "video");
+        return;
+    }
+
+    // Make sure we don't continue to scan sources until we finish flushing.
+    ++mScanSourcesGeneration;
+    if (mScanSourcesPending) {
+        if (!needShutdown) {
+            mDeferredActions.push_back(
+                    new SimpleAction(&NuPlayer2::performScanSources));
+        }
+        mScanSourcesPending = false;
+    }
+
+    decoder->signalFlush();
+
+    FlushStatus newStatus =
+        needShutdown ? FLUSHING_DECODER_SHUTDOWN : FLUSHING_DECODER;
+
+    mFlushComplete[audio][false /* isDecoder */] = (mRenderer == NULL);
+    mFlushComplete[audio][true /* isDecoder */] = false;
+    if (audio) {
+        ALOGE_IF(mFlushingAudio != NONE,
+                "audio flushDecoder() is called in state %d", mFlushingAudio);
+        mFlushingAudio = newStatus;
+    } else {
+        ALOGE_IF(mFlushingVideo != NONE,
+                "video flushDecoder() is called in state %d", mFlushingVideo);
+        mFlushingVideo = newStatus;
+    }
+}
+
+void NuPlayer2::queueDecoderShutdown(
+        bool audio, bool video, const sp<AMessage> &reply) {
+    ALOGI("queueDecoderShutdown audio=%d, video=%d", audio, video);
+
+    mDeferredActions.push_back(
+            new FlushDecoderAction(
+                audio ? FLUSH_CMD_SHUTDOWN : FLUSH_CMD_NONE,
+                video ? FLUSH_CMD_SHUTDOWN : FLUSH_CMD_NONE));
+
+    mDeferredActions.push_back(
+            new SimpleAction(&NuPlayer2::performScanSources));
+
+    mDeferredActions.push_back(new PostMessageAction(reply));
+
+    processDeferredActions();
+}
+
+status_t NuPlayer2::setVideoScalingMode(int32_t mode) {
+    mVideoScalingMode = mode;
+    if (mSurface != NULL) {
+        status_t ret = native_window_set_scaling_mode(mSurface.get(), mVideoScalingMode);
+        if (ret != OK) {
+            ALOGE("Failed to set scaling mode (%d): %s",
+                -ret, strerror(-ret));
+            return ret;
+        }
+    }
+    return OK;
+}
+
+status_t NuPlayer2::getTrackInfo(Parcel* reply) const {
+    sp<AMessage> msg = new AMessage(kWhatGetTrackInfo, this);
+    msg->setPointer("reply", reply);
+
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+    return err;
+}
+
+status_t NuPlayer2::getSelectedTrack(int32_t type, Parcel* reply) const {
+    sp<AMessage> msg = new AMessage(kWhatGetSelectedTrack, this);
+    msg->setPointer("reply", reply);
+    msg->setInt32("type", type);
+
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+    if (err == OK && response != NULL) {
+        CHECK(response->findInt32("err", &err));
+    }
+    return err;
+}
+
+status_t NuPlayer2::selectTrack(size_t trackIndex, bool select, int64_t timeUs) {
+    sp<AMessage> msg = new AMessage(kWhatSelectTrack, this);
+    msg->setSize("trackIndex", trackIndex);
+    msg->setInt32("select", select);
+    msg->setInt64("timeUs", timeUs);
+
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+
+    if (err != OK) {
+        return err;
+    }
+
+    if (!response->findInt32("err", &err)) {
+        err = OK;
+    }
+
+    return err;
+}
+
+status_t NuPlayer2::getCurrentPosition(int64_t *mediaUs) {
+    sp<Renderer> renderer = mRenderer;
+    if (renderer == NULL) {
+        return NO_INIT;
+    }
+
+    return renderer->getCurrentPosition(mediaUs);
+}
+
+void NuPlayer2::getStats(Vector<sp<AMessage> > *mTrackStats) {
+    CHECK(mTrackStats != NULL);
+
+    mTrackStats->clear();
+    if (mVideoDecoder != NULL) {
+        mTrackStats->push_back(mVideoDecoder->getStats());
+    }
+    if (mAudioDecoder != NULL) {
+        mTrackStats->push_back(mAudioDecoder->getStats());
+    }
+}
+
+sp<MetaData> NuPlayer2::getFileMeta() {
+    return mSource->getFileFormatMeta();
+}
+
+float NuPlayer2::getFrameRate() {
+    sp<MetaData> meta = mSource->getFormatMeta(false /* audio */);
+    if (meta == NULL) {
+        return 0;
+    }
+    int32_t rate;
+    if (!meta->findInt32(kKeyFrameRate, &rate)) {
+        // fall back to try file meta
+        sp<MetaData> fileMeta = getFileMeta();
+        if (fileMeta == NULL) {
+            ALOGW("source has video meta but not file meta");
+            return -1;
+        }
+        int32_t fileMetaRate;
+        if (!fileMeta->findInt32(kKeyFrameRate, &fileMetaRate)) {
+            return -1;
+        }
+        return fileMetaRate;
+    }
+    return rate;
+}
+
+void NuPlayer2::schedulePollDuration() {
+    sp<AMessage> msg = new AMessage(kWhatPollDuration, this);
+    msg->setInt32("generation", mPollDurationGeneration);
+    msg->post();
+}
+
+void NuPlayer2::cancelPollDuration() {
+    ++mPollDurationGeneration;
+}
+
+void NuPlayer2::processDeferredActions() {
+    while (!mDeferredActions.empty()) {
+        // We won't execute any deferred actions until we're no longer in
+        // an intermediate state, i.e. one more more decoders are currently
+        // flushing or shutting down.
+
+        if (mFlushingAudio != NONE || mFlushingVideo != NONE) {
+            // We're currently flushing, postpone the reset until that's
+            // completed.
+
+            ALOGV("postponing action mFlushingAudio=%d, mFlushingVideo=%d",
+                  mFlushingAudio, mFlushingVideo);
+
+            break;
+        }
+
+        sp<Action> action = *mDeferredActions.begin();
+        mDeferredActions.erase(mDeferredActions.begin());
+
+        action->execute(this);
+    }
+}
+
+void NuPlayer2::performSeek(int64_t seekTimeUs, MediaPlayer2SeekMode mode) {
+    ALOGV("performSeek seekTimeUs=%lld us (%.2f secs), mode=%d",
+          (long long)seekTimeUs, seekTimeUs / 1E6, mode);
+
+    if (mSource == NULL) {
+        // This happens when reset occurs right before the loop mode
+        // asynchronously seeks to the start of the stream.
+        LOG_ALWAYS_FATAL_IF(mAudioDecoder != NULL || mVideoDecoder != NULL,
+                "mSource is NULL and decoders not NULL audio(%p) video(%p)",
+                mAudioDecoder.get(), mVideoDecoder.get());
+        return;
+    }
+    mPreviousSeekTimeUs = seekTimeUs;
+    mSource->seekTo(seekTimeUs, mode);
+    ++mTimedTextGeneration;
+
+    // everything's flushed, continue playback.
+}
+
+void NuPlayer2::performDecoderFlush(FlushCommand audio, FlushCommand video) {
+    ALOGV("performDecoderFlush audio=%d, video=%d", audio, video);
+
+    if ((audio == FLUSH_CMD_NONE || mAudioDecoder == NULL)
+            && (video == FLUSH_CMD_NONE || mVideoDecoder == NULL)) {
+        return;
+    }
+
+    if (audio != FLUSH_CMD_NONE && mAudioDecoder != NULL) {
+        flushDecoder(true /* audio */, (audio == FLUSH_CMD_SHUTDOWN));
+    }
+
+    if (video != FLUSH_CMD_NONE && mVideoDecoder != NULL) {
+        flushDecoder(false /* audio */, (video == FLUSH_CMD_SHUTDOWN));
+    }
+}
+
+void NuPlayer2::performReset() {
+    ALOGV("performReset");
+
+    CHECK(mAudioDecoder == NULL);
+    CHECK(mVideoDecoder == NULL);
+
+    stopPlaybackTimer("performReset");
+    stopRebufferingTimer(true);
+
+    cancelPollDuration();
+
+    ++mScanSourcesGeneration;
+    mScanSourcesPending = false;
+
+    if (mRendererLooper != NULL) {
+        if (mRenderer != NULL) {
+            mRendererLooper->unregisterHandler(mRenderer->id());
+        }
+        mRendererLooper->stop();
+        mRendererLooper.clear();
+    }
+    mRenderer.clear();
+    ++mRendererGeneration;
+
+    if (mSource != NULL) {
+        mSource->stop();
+
+        Mutex::Autolock autoLock(mSourceLock);
+        mSource.clear();
+    }
+
+    if (mDriver != NULL) {
+        sp<NuPlayer2Driver> driver = mDriver.promote();
+        if (driver != NULL) {
+            driver->notifyResetComplete();
+        }
+    }
+
+    mStarted = false;
+    mPrepared = false;
+    mResetting = false;
+    mSourceStarted = false;
+
+    // Modular DRM
+    if (mCrypto != NULL) {
+        // decoders will be flushed before this so their mCrypto would go away on their own
+        // TODO change to ALOGV
+        ALOGD("performReset mCrypto: %p", mCrypto.get());
+        mCrypto.clear();
+    }
+    mIsDrmProtected = false;
+}
+
+void NuPlayer2::performScanSources() {
+    ALOGV("performScanSources");
+
+    if (!mStarted) {
+        return;
+    }
+
+    if (mAudioDecoder == NULL || mVideoDecoder == NULL) {
+        postScanSources();
+    }
+}
+
+void NuPlayer2::performSetSurface(const sp<Surface> &surface) {
+    ALOGV("performSetSurface");
+
+    mSurface = surface;
+
+    // XXX - ignore error from setVideoScalingMode for now
+    setVideoScalingMode(mVideoScalingMode);
+
+    if (mDriver != NULL) {
+        sp<NuPlayer2Driver> driver = mDriver.promote();
+        if (driver != NULL) {
+            driver->notifySetSurfaceComplete();
+        }
+    }
+}
+
+void NuPlayer2::performResumeDecoders(bool needNotify) {
+    if (needNotify) {
+        mResumePending = true;
+        if (mVideoDecoder == NULL) {
+            // if audio-only, we can notify seek complete now,
+            // as the resume operation will be relatively fast.
+            finishResume();
+        }
+    }
+
+    if (mVideoDecoder != NULL) {
+        // When there is continuous seek, MediaPlayer will cache the seek
+        // position, and send down new seek request when previous seek is
+        // complete. Let's wait for at least one video output frame before
+        // notifying seek complete, so that the video thumbnail gets updated
+        // when seekbar is dragged.
+        mVideoDecoder->signalResume(needNotify);
+    }
+
+    if (mAudioDecoder != NULL) {
+        mAudioDecoder->signalResume(false /* needNotify */);
+    }
+}
+
+void NuPlayer2::finishResume() {
+    if (mResumePending) {
+        mResumePending = false;
+        notifyDriverSeekComplete();
+    }
+}
+
+void NuPlayer2::notifyDriverSeekComplete() {
+    if (mDriver != NULL) {
+        sp<NuPlayer2Driver> driver = mDriver.promote();
+        if (driver != NULL) {
+            driver->notifySeekComplete();
+        }
+    }
+}
+
+void NuPlayer2::onSourceNotify(const sp<AMessage> &msg) {
+    int32_t what;
+    CHECK(msg->findInt32("what", &what));
+
+    switch (what) {
+        case Source::kWhatInstantiateSecureDecoders:
+        {
+            if (mSource == NULL) {
+                // This is a stale notification from a source that was
+                // asynchronously preparing when the client called reset().
+                // We handled the reset, the source is gone.
+                break;
+            }
+
+            sp<AMessage> reply;
+            CHECK(msg->findMessage("reply", &reply));
+            status_t err = onInstantiateSecureDecoders();
+            reply->setInt32("err", err);
+            reply->post();
+            break;
+        }
+
+        case Source::kWhatPrepared:
+        {
+            ALOGV("NuPlayer2::onSourceNotify Source::kWhatPrepared source: %p", mSource.get());
+            if (mSource == NULL) {
+                // This is a stale notification from a source that was
+                // asynchronously preparing when the client called reset().
+                // We handled the reset, the source is gone.
+                break;
+            }
+
+            int32_t err;
+            CHECK(msg->findInt32("err", &err));
+
+            if (err != OK) {
+                // shut down potential secure codecs in case client never calls reset
+                mDeferredActions.push_back(
+                        new FlushDecoderAction(FLUSH_CMD_SHUTDOWN /* audio */,
+                                               FLUSH_CMD_SHUTDOWN /* video */));
+                processDeferredActions();
+            } else {
+                mPrepared = true;
+            }
+
+            sp<NuPlayer2Driver> driver = mDriver.promote();
+            if (driver != NULL) {
+                // notify duration first, so that it's definitely set when
+                // the app received the "prepare complete" callback.
+                int64_t durationUs;
+                if (mSource->getDuration(&durationUs) == OK) {
+                    driver->notifyDuration(durationUs);
+                }
+                driver->notifyPrepareCompleted(err);
+            }
+
+            break;
+        }
+
+        // Modular DRM
+        case Source::kWhatDrmInfo:
+        {
+            Parcel parcel;
+            sp<ABuffer> drmInfo;
+            CHECK(msg->findBuffer("drmInfo", &drmInfo));
+            parcel.setData(drmInfo->data(), drmInfo->size());
+
+            ALOGV("onSourceNotify() kWhatDrmInfo MEDIA2_DRM_INFO drmInfo: %p  parcel size: %zu",
+                    drmInfo.get(), parcel.dataSize());
+
+            notifyListener(MEDIA2_DRM_INFO, 0 /* ext1 */, 0 /* ext2 */, &parcel);
+
+            break;
+        }
+
+        case Source::kWhatFlagsChanged:
+        {
+            uint32_t flags;
+            CHECK(msg->findInt32("flags", (int32_t *)&flags));
+
+            sp<NuPlayer2Driver> driver = mDriver.promote();
+            if (driver != NULL) {
+
+                ALOGV("onSourceNotify() kWhatFlagsChanged  FLAG_CAN_PAUSE: %d  "
+                        "FLAG_CAN_SEEK_BACKWARD: %d \n\t\t\t\t FLAG_CAN_SEEK_FORWARD: %d  "
+                        "FLAG_CAN_SEEK: %d  FLAG_DYNAMIC_DURATION: %d \n"
+                        "\t\t\t\t FLAG_SECURE: %d  FLAG_PROTECTED: %d",
+                        (flags & Source::FLAG_CAN_PAUSE) != 0,
+                        (flags & Source::FLAG_CAN_SEEK_BACKWARD) != 0,
+                        (flags & Source::FLAG_CAN_SEEK_FORWARD) != 0,
+                        (flags & Source::FLAG_CAN_SEEK) != 0,
+                        (flags & Source::FLAG_DYNAMIC_DURATION) != 0,
+                        (flags & Source::FLAG_SECURE) != 0,
+                        (flags & Source::FLAG_PROTECTED) != 0);
+
+                if ((flags & NuPlayer2::Source::FLAG_CAN_SEEK) == 0) {
+                    driver->notifyListener(
+                            MEDIA2_INFO, MEDIA2_INFO_NOT_SEEKABLE, 0);
+                }
+                driver->notifyFlagsChanged(flags);
+            }
+
+            if ((mSourceFlags & Source::FLAG_DYNAMIC_DURATION)
+                    && (!(flags & Source::FLAG_DYNAMIC_DURATION))) {
+                cancelPollDuration();
+            } else if (!(mSourceFlags & Source::FLAG_DYNAMIC_DURATION)
+                    && (flags & Source::FLAG_DYNAMIC_DURATION)
+                    && (mAudioDecoder != NULL || mVideoDecoder != NULL)) {
+                schedulePollDuration();
+            }
+
+            mSourceFlags = flags;
+            break;
+        }
+
+        case Source::kWhatVideoSizeChanged:
+        {
+            sp<AMessage> format;
+            CHECK(msg->findMessage("format", &format));
+
+            updateVideoSize(format);
+            break;
+        }
+
+        case Source::kWhatBufferingUpdate:
+        {
+            int32_t percentage;
+            CHECK(msg->findInt32("percentage", &percentage));
+
+            notifyListener(MEDIA2_BUFFERING_UPDATE, percentage, 0);
+            break;
+        }
+
+        case Source::kWhatPauseOnBufferingStart:
+        {
+            // ignore if not playing
+            if (mStarted) {
+                ALOGI("buffer low, pausing...");
+
+                startRebufferingTimer();
+                mPausedForBuffering = true;
+                onPause();
+            }
+            notifyListener(MEDIA2_INFO, MEDIA2_INFO_BUFFERING_START, 0);
+            break;
+        }
+
+        case Source::kWhatResumeOnBufferingEnd:
+        {
+            // ignore if not playing
+            if (mStarted) {
+                ALOGI("buffer ready, resuming...");
+
+                stopRebufferingTimer(false);
+                mPausedForBuffering = false;
+
+                // do not resume yet if client didn't unpause
+                if (!mPausedByClient) {
+                    onResume();
+                }
+            }
+            notifyListener(MEDIA2_INFO, MEDIA2_INFO_BUFFERING_END, 0);
+            break;
+        }
+
+        case Source::kWhatCacheStats:
+        {
+            int32_t kbps;
+            CHECK(msg->findInt32("bandwidth", &kbps));
+
+            notifyListener(MEDIA2_INFO, MEDIA2_INFO_NETWORK_BANDWIDTH, kbps);
+            break;
+        }
+
+        case Source::kWhatSubtitleData:
+        {
+            sp<ABuffer> buffer;
+            CHECK(msg->findBuffer("buffer", &buffer));
+
+            sendSubtitleData(buffer, 0 /* baseIndex */);
+            break;
+        }
+
+        case Source::kWhatTimedMetaData:
+        {
+            sp<ABuffer> buffer;
+            if (!msg->findBuffer("buffer", &buffer)) {
+                notifyListener(MEDIA2_INFO, MEDIA2_INFO_METADATA_UPDATE, 0);
+            } else {
+                sendTimedMetaData(buffer);
+            }
+            break;
+        }
+
+        case Source::kWhatTimedTextData:
+        {
+            int32_t generation;
+            if (msg->findInt32("generation", &generation)
+                    && generation != mTimedTextGeneration) {
+                break;
+            }
+
+            sp<ABuffer> buffer;
+            CHECK(msg->findBuffer("buffer", &buffer));
+
+            sp<NuPlayer2Driver> driver = mDriver.promote();
+            if (driver == NULL) {
+                break;
+            }
+
+            int posMs;
+            int64_t timeUs, posUs;
+            driver->getCurrentPosition(&posMs);
+            posUs = (int64_t) posMs * 1000ll;
+            CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+
+            if (posUs < timeUs) {
+                if (!msg->findInt32("generation", &generation)) {
+                    msg->setInt32("generation", mTimedTextGeneration);
+                }
+                msg->post(timeUs - posUs);
+            } else {
+                sendTimedTextData(buffer);
+            }
+            break;
+        }
+
+        case Source::kWhatQueueDecoderShutdown:
+        {
+            int32_t audio, video;
+            CHECK(msg->findInt32("audio", &audio));
+            CHECK(msg->findInt32("video", &video));
+
+            sp<AMessage> reply;
+            CHECK(msg->findMessage("reply", &reply));
+
+            queueDecoderShutdown(audio, video, reply);
+            break;
+        }
+
+        case Source::kWhatDrmNoLicense:
+        {
+            notifyListener(MEDIA2_ERROR, MEDIA2_ERROR_UNKNOWN, ERROR_DRM_NO_LICENSE);
+            break;
+        }
+
+        default:
+            TRESPASS();
+    }
+}
+
+void NuPlayer2::onClosedCaptionNotify(const sp<AMessage> &msg) {
+    int32_t what;
+    CHECK(msg->findInt32("what", &what));
+
+    switch (what) {
+        case NuPlayer2::CCDecoder::kWhatClosedCaptionData:
+        {
+            sp<ABuffer> buffer;
+            CHECK(msg->findBuffer("buffer", &buffer));
+
+            size_t inbandTracks = 0;
+            if (mSource != NULL) {
+                inbandTracks = mSource->getTrackCount();
+            }
+
+            sendSubtitleData(buffer, inbandTracks);
+            break;
+        }
+
+        case NuPlayer2::CCDecoder::kWhatTrackAdded:
+        {
+            notifyListener(MEDIA2_INFO, MEDIA2_INFO_METADATA_UPDATE, 0);
+
+            break;
+        }
+
+        default:
+            TRESPASS();
+    }
+
+
+}
+
+void NuPlayer2::sendSubtitleData(const sp<ABuffer> &buffer, int32_t baseIndex) {
+    int32_t trackIndex;
+    int64_t timeUs, durationUs;
+    CHECK(buffer->meta()->findInt32("trackIndex", &trackIndex));
+    CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+    CHECK(buffer->meta()->findInt64("durationUs", &durationUs));
+
+    Parcel in;
+    in.writeInt32(trackIndex + baseIndex);
+    in.writeInt64(timeUs);
+    in.writeInt64(durationUs);
+    in.writeInt32(buffer->size());
+    in.writeInt32(buffer->size());
+    in.write(buffer->data(), buffer->size());
+
+    notifyListener(MEDIA2_SUBTITLE_DATA, 0, 0, &in);
+}
+
+void NuPlayer2::sendTimedMetaData(const sp<ABuffer> &buffer) {
+    int64_t timeUs;
+    CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+
+    Parcel in;
+    in.writeInt64(timeUs);
+    in.writeInt32(buffer->size());
+    in.writeInt32(buffer->size());
+    in.write(buffer->data(), buffer->size());
+
+    notifyListener(MEDIA2_META_DATA, 0, 0, &in);
+}
+
+void NuPlayer2::sendTimedTextData(const sp<ABuffer> &buffer) {
+    const void *data;
+    size_t size = 0;
+    int64_t timeUs;
+    int32_t flag = TextDescriptions::IN_BAND_TEXT_3GPP;
+
+    AString mime;
+    CHECK(buffer->meta()->findString("mime", &mime));
+    CHECK(strcasecmp(mime.c_str(), MEDIA_MIMETYPE_TEXT_3GPP) == 0);
+
+    data = buffer->data();
+    size = buffer->size();
+
+    Parcel parcel;
+    if (size > 0) {
+        CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+        int32_t global = 0;
+        if (buffer->meta()->findInt32("global", &global) && global) {
+            flag |= TextDescriptions::GLOBAL_DESCRIPTIONS;
+        } else {
+            flag |= TextDescriptions::LOCAL_DESCRIPTIONS;
+        }
+        TextDescriptions::getParcelOfDescriptions(
+                (const uint8_t *)data, size, flag, timeUs / 1000, &parcel);
+    }
+
+    if ((parcel.dataSize() > 0)) {
+        notifyListener(MEDIA2_TIMED_TEXT, 0, 0, &parcel);
+    } else {  // send an empty timed text
+        notifyListener(MEDIA2_TIMED_TEXT, 0, 0);
+    }
+}
+
+const char *NuPlayer2::getDataSourceType() {
+    switch (mDataSourceType) {
+        case DATA_SOURCE_TYPE_HTTP_LIVE:
+            return "HTTPLive";
+
+        case DATA_SOURCE_TYPE_RTSP:
+            return "RTSP";
+
+        case DATA_SOURCE_TYPE_GENERIC_URL:
+            return "GenURL";
+
+        case DATA_SOURCE_TYPE_GENERIC_FD:
+            return "GenFD";
+
+        case DATA_SOURCE_TYPE_MEDIA:
+            return "Media";
+
+        case DATA_SOURCE_TYPE_STREAM:
+            return "Stream";
+
+        case DATA_SOURCE_TYPE_NONE:
+        default:
+            return "None";
+    }
+ }
+
+// Modular DRM begin
+status_t NuPlayer2::prepareDrm(const uint8_t uuid[16], const Vector<uint8_t> &drmSessionId)
+{
+    ALOGV("prepareDrm ");
+
+    // Passing to the looper anyway; called in a pre-config prepared state so no race on mCrypto
+    sp<AMessage> msg = new AMessage(kWhatPrepareDrm, this);
+    // synchronous call so just passing the address but with local copies of "const" args
+    uint8_t UUID[16];
+    memcpy(UUID, uuid, sizeof(UUID));
+    Vector<uint8_t> sessionId = drmSessionId;
+    msg->setPointer("uuid", (void*)UUID);
+    msg->setPointer("drmSessionId", (void*)&sessionId);
+
+    sp<AMessage> response;
+    status_t status = msg->postAndAwaitResponse(&response);
+
+    if (status == OK && response != NULL) {
+        CHECK(response->findInt32("status", &status));
+        ALOGV("prepareDrm ret: %d ", status);
+    } else {
+        ALOGE("prepareDrm err: %d", status);
+    }
+
+    return status;
+}
+
+status_t NuPlayer2::releaseDrm()
+{
+    ALOGV("releaseDrm ");
+
+    sp<AMessage> msg = new AMessage(kWhatReleaseDrm, this);
+
+    sp<AMessage> response;
+    status_t status = msg->postAndAwaitResponse(&response);
+
+    if (status == OK && response != NULL) {
+        CHECK(response->findInt32("status", &status));
+        ALOGV("releaseDrm ret: %d ", status);
+    } else {
+        ALOGE("releaseDrm err: %d", status);
+    }
+
+    return status;
+}
+
+status_t NuPlayer2::onPrepareDrm(const sp<AMessage> &msg)
+{
+    // TODO change to ALOGV
+    ALOGD("onPrepareDrm ");
+
+    status_t status = INVALID_OPERATION;
+    if (mSource == NULL) {
+        ALOGE("onPrepareDrm: No source. onPrepareDrm failed with %d.", status);
+        return status;
+    }
+
+    uint8_t *uuid;
+    Vector<uint8_t> *drmSessionId;
+    CHECK(msg->findPointer("uuid", (void**)&uuid));
+    CHECK(msg->findPointer("drmSessionId", (void**)&drmSessionId));
+
+    status = OK;
+    sp<AMediaCryptoWrapper> crypto = NULL;
+
+    status = mSource->prepareDrm(uuid, *drmSessionId, &crypto);
+    if (crypto == NULL) {
+        ALOGE("onPrepareDrm: mSource->prepareDrm failed. status: %d", status);
+        return status;
+    }
+    ALOGV("onPrepareDrm: mSource->prepareDrm succeeded");
+
+    if (mCrypto != NULL) {
+        ALOGE("onPrepareDrm: Unexpected. Already having mCrypto: %p", mCrypto.get());
+        mCrypto.clear();
+    }
+
+    mCrypto = crypto;
+    mIsDrmProtected = true;
+    // TODO change to ALOGV
+    ALOGD("onPrepareDrm: mCrypto: %p", mCrypto.get());
+
+    return status;
+}
+
+status_t NuPlayer2::onReleaseDrm()
+{
+    // TODO change to ALOGV
+    ALOGD("onReleaseDrm ");
+
+    if (!mIsDrmProtected) {
+        ALOGW("onReleaseDrm: Unexpected. mIsDrmProtected is already false.");
+    }
+
+    mIsDrmProtected = false;
+
+    status_t status;
+    if (mCrypto != NULL) {
+        // notifying the source first before removing crypto from codec
+        if (mSource != NULL) {
+            mSource->releaseDrm();
+        }
+
+        status=OK;
+        // first making sure the codecs have released their crypto reference
+        const sp<DecoderBase> &videoDecoder = getDecoder(false/*audio*/);
+        if (videoDecoder != NULL) {
+            status = videoDecoder->releaseCrypto();
+            ALOGV("onReleaseDrm: video decoder ret: %d", status);
+        }
+
+        const sp<DecoderBase> &audioDecoder = getDecoder(true/*audio*/);
+        if (audioDecoder != NULL) {
+            status_t status_audio = audioDecoder->releaseCrypto();
+            if (status == OK) {   // otherwise, returning the first error
+                status = status_audio;
+            }
+            ALOGV("onReleaseDrm: audio decoder ret: %d", status_audio);
+        }
+
+        // TODO change to ALOGV
+        ALOGD("onReleaseDrm: mCrypto: %p", mCrypto.get());
+        mCrypto.clear();
+    } else {   // mCrypto == NULL
+        ALOGE("onReleaseDrm: Unexpected. There is no crypto.");
+        status = INVALID_OPERATION;
+    }
+
+    return status;
+}
+// Modular DRM end
+////////////////////////////////////////////////////////////////////////////////
+
+sp<AMessage> NuPlayer2::Source::getFormat(bool audio) {
+    sp<MetaData> meta = getFormatMeta(audio);
+
+    if (meta == NULL) {
+        return NULL;
+    }
+
+    sp<AMessage> msg = new AMessage;
+
+    if(convertMetaDataToMessage(meta, &msg) == OK) {
+        return msg;
+    }
+    return NULL;
+}
+
+void NuPlayer2::Source::notifyFlagsChanged(uint32_t flags) {
+    sp<AMessage> notify = dupNotify();
+    notify->setInt32("what", kWhatFlagsChanged);
+    notify->setInt32("flags", flags);
+    notify->post();
+}
+
+void NuPlayer2::Source::notifyVideoSizeChanged(const sp<AMessage> &format) {
+    sp<AMessage> notify = dupNotify();
+    notify->setInt32("what", kWhatVideoSizeChanged);
+    notify->setMessage("format", format);
+    notify->post();
+}
+
+void NuPlayer2::Source::notifyPrepared(status_t err) {
+    ALOGV("Source::notifyPrepared %d", err);
+    sp<AMessage> notify = dupNotify();
+    notify->setInt32("what", kWhatPrepared);
+    notify->setInt32("err", err);
+    notify->post();
+}
+
+void NuPlayer2::Source::notifyDrmInfo(const sp<ABuffer> &drmInfoBuffer)
+{
+    ALOGV("Source::notifyDrmInfo");
+
+    sp<AMessage> notify = dupNotify();
+    notify->setInt32("what", kWhatDrmInfo);
+    notify->setBuffer("drmInfo", drmInfoBuffer);
+
+    notify->post();
+}
+
+void NuPlayer2::Source::notifyInstantiateSecureDecoders(const sp<AMessage> &reply) {
+    sp<AMessage> notify = dupNotify();
+    notify->setInt32("what", kWhatInstantiateSecureDecoders);
+    notify->setMessage("reply", reply);
+    notify->post();
+}
+
+void NuPlayer2::Source::onMessageReceived(const sp<AMessage> & /* msg */) {
+    TRESPASS();
+}
+
+}  // namespace android
diff --git a/media/libmedia/nuplayer2/NuPlayer2.h b/media/libmedia/nuplayer2/NuPlayer2.h
new file mode 100644
index 0000000..6dc6442
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2.h
@@ -0,0 +1,346 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NU_PLAYER2_H_
+
+#define NU_PLAYER2_H_
+
+#include <media/AudioResamplerPublic.h>
+#include <media/MediaPlayer2Interface.h>
+#include <media/stagefright/foundation/AHandler.h>
+
+namespace android {
+
+struct ABuffer;
+struct AMediaCryptoWrapper;
+struct AMessage;
+struct AudioPlaybackRate;
+struct AVSyncSettings;
+class IDataSource;
+struct MediaClock;
+struct MediaHTTPService;
+class MetaData;
+struct NuPlayer2Driver;
+
+struct NuPlayer2 : public AHandler {
+    explicit NuPlayer2(pid_t pid, const sp<MediaClock> &mediaClock);
+
+    void setUID(uid_t uid);
+
+    void setDriver(const wp<NuPlayer2Driver> &driver);
+
+    void setDataSourceAsync(const sp<IStreamSource> &source);
+
+    void setDataSourceAsync(
+            const sp<MediaHTTPService> &httpService,
+            const char *url,
+            const KeyedVector<String8, String8> *headers);
+
+    void setDataSourceAsync(int fd, int64_t offset, int64_t length);
+
+    void setDataSourceAsync(const sp<DataSource> &source);
+
+    status_t getBufferingSettings(BufferingSettings* buffering /* nonnull */);
+    status_t setBufferingSettings(const BufferingSettings& buffering);
+
+    void prepareAsync();
+
+    void setVideoSurfaceTextureAsync(
+            const sp<IGraphicBufferProducer> &bufferProducer);
+
+    void setAudioSink(const sp<MediaPlayer2Base::AudioSink> &sink);
+    status_t setPlaybackSettings(const AudioPlaybackRate &rate);
+    status_t getPlaybackSettings(AudioPlaybackRate *rate /* nonnull */);
+    status_t setSyncSettings(const AVSyncSettings &sync, float videoFpsHint);
+    status_t getSyncSettings(AVSyncSettings *sync /* nonnull */, float *videoFps /* nonnull */);
+
+    void start();
+
+    void pause();
+
+    // Will notify the driver through "notifyResetComplete" once finished.
+    void resetAsync();
+
+    // Request a notification when specified media time is reached.
+    status_t notifyAt(int64_t mediaTimeUs);
+
+    // Will notify the driver through "notifySeekComplete" once finished
+    // and needNotify is true.
+    void seekToAsync(
+            int64_t seekTimeUs,
+            MediaPlayer2SeekMode mode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC,
+            bool needNotify = false);
+
+    status_t setVideoScalingMode(int32_t mode);
+    status_t getTrackInfo(Parcel* reply) const;
+    status_t getSelectedTrack(int32_t type, Parcel* reply) const;
+    status_t selectTrack(size_t trackIndex, bool select, int64_t timeUs);
+    status_t getCurrentPosition(int64_t *mediaUs);
+    void getStats(Vector<sp<AMessage> > *mTrackStats);
+
+    sp<MetaData> getFileMeta();
+    float getFrameRate();
+
+    // Modular DRM
+    status_t prepareDrm(const uint8_t uuid[16], const Vector<uint8_t> &drmSessionId);
+    status_t releaseDrm();
+
+    const char *getDataSourceType();
+
+protected:
+    virtual ~NuPlayer2();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+public:
+    struct StreamListener;
+    struct Source;
+
+private:
+    struct Decoder;
+    struct DecoderBase;
+    struct DecoderPassThrough;
+    struct CCDecoder;
+    struct GenericSource;
+    struct HTTPLiveSource;
+    struct Renderer;
+    struct RTSPSource;
+    struct StreamingSource;
+    struct Action;
+    struct SeekAction;
+    struct SetSurfaceAction;
+    struct ResumeDecoderAction;
+    struct FlushDecoderAction;
+    struct PostMessageAction;
+    struct SimpleAction;
+
+    enum {
+        kWhatSetDataSource              = '=DaS',
+        kWhatPrepare                    = 'prep',
+        kWhatSetVideoSurface            = '=VSu',
+        kWhatSetAudioSink               = '=AuS',
+        kWhatMoreDataQueued             = 'more',
+        kWhatConfigPlayback             = 'cfPB',
+        kWhatConfigSync                 = 'cfSy',
+        kWhatGetPlaybackSettings        = 'gPbS',
+        kWhatGetSyncSettings            = 'gSyS',
+        kWhatStart                      = 'strt',
+        kWhatScanSources                = 'scan',
+        kWhatVideoNotify                = 'vidN',
+        kWhatAudioNotify                = 'audN',
+        kWhatClosedCaptionNotify        = 'capN',
+        kWhatRendererNotify             = 'renN',
+        kWhatReset                      = 'rset',
+        kWhatNotifyTime                 = 'nfyT',
+        kWhatSeek                       = 'seek',
+        kWhatPause                      = 'paus',
+        kWhatResume                     = 'rsme',
+        kWhatPollDuration               = 'polD',
+        kWhatSourceNotify               = 'srcN',
+        kWhatGetTrackInfo               = 'gTrI',
+        kWhatGetSelectedTrack           = 'gSel',
+        kWhatSelectTrack                = 'selT',
+        kWhatGetBufferingSettings       = 'gBus',
+        kWhatSetBufferingSettings       = 'sBuS',
+        kWhatPrepareDrm                 = 'pDrm',
+        kWhatReleaseDrm                 = 'rDrm',
+    };
+
+    wp<NuPlayer2Driver> mDriver;
+    bool mUIDValid;
+    uid_t mUID;
+    pid_t mPID;
+    const sp<MediaClock> mMediaClock;
+    Mutex mSourceLock;  // guard |mSource|.
+    sp<Source> mSource;
+    uint32_t mSourceFlags;
+    sp<Surface> mSurface;
+    sp<MediaPlayer2Base::AudioSink> mAudioSink;
+    sp<DecoderBase> mVideoDecoder;
+    bool mOffloadAudio;
+    sp<DecoderBase> mAudioDecoder;
+    sp<CCDecoder> mCCDecoder;
+    sp<Renderer> mRenderer;
+    sp<ALooper> mRendererLooper;
+    int32_t mAudioDecoderGeneration;
+    int32_t mVideoDecoderGeneration;
+    int32_t mRendererGeneration;
+
+    Mutex mPlayingTimeLock;
+    int64_t mLastStartedPlayingTimeNs;
+    void stopPlaybackTimer(const char *where);
+    void startPlaybackTimer(const char *where);
+
+    int64_t mLastStartedRebufferingTimeNs;
+    void startRebufferingTimer();
+    void stopRebufferingTimer(bool exitingPlayback);
+
+    int64_t mPreviousSeekTimeUs;
+
+    List<sp<Action> > mDeferredActions;
+
+    bool mAudioEOS;
+    bool mVideoEOS;
+
+    bool mScanSourcesPending;
+    int32_t mScanSourcesGeneration;
+
+    int32_t mPollDurationGeneration;
+    int32_t mTimedTextGeneration;
+
+    enum FlushStatus {
+        NONE,
+        FLUSHING_DECODER,
+        FLUSHING_DECODER_SHUTDOWN,
+        SHUTTING_DOWN_DECODER,
+        FLUSHED,
+        SHUT_DOWN,
+    };
+
+    enum FlushCommand {
+        FLUSH_CMD_NONE,
+        FLUSH_CMD_FLUSH,
+        FLUSH_CMD_SHUTDOWN,
+    };
+
+    // Status of flush responses from the decoder and renderer.
+    bool mFlushComplete[2][2];
+
+    FlushStatus mFlushingAudio;
+    FlushStatus mFlushingVideo;
+
+    // Status of flush responses from the decoder and renderer.
+    bool mResumePending;
+
+    int32_t mVideoScalingMode;
+
+    AudioPlaybackRate mPlaybackSettings;
+    AVSyncSettings mSyncSettings;
+    float mVideoFpsHint;
+    bool mStarted;
+    bool mPrepared;
+    bool mResetting;
+    bool mSourceStarted;
+    bool mAudioDecoderError;
+    bool mVideoDecoderError;
+
+    // Actual pause state, either as requested by client or due to buffering.
+    bool mPaused;
+
+    // Pause state as requested by client. Note that if mPausedByClient is
+    // true, mPaused is always true; if mPausedByClient is false, mPaused could
+    // still become true, when we pause internally due to buffering.
+    bool mPausedByClient;
+
+    // Pause state as requested by source (internally) due to buffering
+    bool mPausedForBuffering;
+
+    // Modular DRM
+    sp<AMediaCryptoWrapper> mCrypto;
+    bool mIsDrmProtected;
+
+    typedef enum {
+        DATA_SOURCE_TYPE_NONE,
+        DATA_SOURCE_TYPE_HTTP_LIVE,
+        DATA_SOURCE_TYPE_RTSP,
+        DATA_SOURCE_TYPE_GENERIC_URL,
+        DATA_SOURCE_TYPE_GENERIC_FD,
+        DATA_SOURCE_TYPE_MEDIA,
+        DATA_SOURCE_TYPE_STREAM,
+    } DATA_SOURCE_TYPE;
+
+    std::atomic<DATA_SOURCE_TYPE> mDataSourceType;
+
+    inline const sp<DecoderBase> &getDecoder(bool audio) {
+        return audio ? mAudioDecoder : mVideoDecoder;
+    }
+
+    inline void clearFlushComplete() {
+        mFlushComplete[0][0] = false;
+        mFlushComplete[0][1] = false;
+        mFlushComplete[1][0] = false;
+        mFlushComplete[1][1] = false;
+    }
+
+    void tryOpenAudioSinkForOffload(
+            const sp<AMessage> &format, const sp<MetaData> &audioMeta, bool hasVideo);
+    void closeAudioSink();
+    void restartAudio(
+            int64_t currentPositionUs, bool forceNonOffload, bool needsToCreateAudioDecoder);
+    void determineAudioModeChange(const sp<AMessage> &audioFormat);
+
+    status_t instantiateDecoder(
+            bool audio, sp<DecoderBase> *decoder, bool checkAudioModeChange = true);
+
+    status_t onInstantiateSecureDecoders();
+
+    void updateVideoSize(
+            const sp<AMessage> &inputFormat,
+            const sp<AMessage> &outputFormat = NULL);
+
+    void notifyListener(int msg, int ext1, int ext2, const Parcel *in = NULL);
+
+    void handleFlushComplete(bool audio, bool isDecoder);
+    void finishFlushIfPossible();
+
+    void onStart(
+            int64_t startPositionUs = -1,
+            MediaPlayer2SeekMode mode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC);
+    void onResume();
+    void onPause();
+
+    bool audioDecoderStillNeeded();
+
+    void flushDecoder(bool audio, bool needShutdown);
+
+    void finishResume();
+    void notifyDriverSeekComplete();
+
+    void postScanSources();
+
+    void schedulePollDuration();
+    void cancelPollDuration();
+
+    void processDeferredActions();
+
+    void performSeek(int64_t seekTimeUs, MediaPlayer2SeekMode mode);
+    void performDecoderFlush(FlushCommand audio, FlushCommand video);
+    void performReset();
+    void performScanSources();
+    void performSetSurface(const sp<Surface> &wrapper);
+    void performResumeDecoders(bool needNotify);
+
+    void onSourceNotify(const sp<AMessage> &msg);
+    void onClosedCaptionNotify(const sp<AMessage> &msg);
+
+    void queueDecoderShutdown(
+            bool audio, bool video, const sp<AMessage> &reply);
+
+    void sendSubtitleData(const sp<ABuffer> &buffer, int32_t baseIndex);
+    void sendTimedMetaData(const sp<ABuffer> &buffer);
+    void sendTimedTextData(const sp<ABuffer> &buffer);
+
+    void writeTrackInfo(Parcel* reply, const sp<AMessage>& format) const;
+
+    status_t onPrepareDrm(const sp<AMessage> &msg);
+    status_t onReleaseDrm();
+
+    DISALLOW_EVIL_CONSTRUCTORS(NuPlayer2);
+};
+
+}  // namespace android
+
+#endif  // NU_PLAYER2_H_
diff --git a/media/libmedia/nuplayer2/NuPlayer2CCDecoder.cpp b/media/libmedia/nuplayer2/NuPlayer2CCDecoder.cpp
new file mode 100644
index 0000000..e4afd5b
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2CCDecoder.cpp
@@ -0,0 +1,579 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayer2CCDecoder"
+#include <utils/Log.h>
+#include <inttypes.h>
+
+#include "NuPlayer2CCDecoder.h"
+
+#include <media/stagefright/foundation/ABitReader.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/avc_utils.h>
+#include <media/stagefright/MediaDefs.h>
+
+namespace android {
+
+// In CEA-708B, the maximum bandwidth of CC is set to 9600bps.
+static const size_t kMaxBandwithSizeBytes = 9600 / 8;
+
+struct CCData {
+    CCData(uint8_t type, uint8_t data1, uint8_t data2)
+        : mType(type), mData1(data1), mData2(data2) {
+    }
+    bool getChannel(size_t *channel) const {
+        if (mData1 >= 0x10 && mData1 <= 0x1f) {
+            *channel = (mData1 >= 0x18 ? 1 : 0) + (mType ? 2 : 0);
+            return true;
+        }
+        return false;
+    }
+
+    uint8_t mType;
+    uint8_t mData1;
+    uint8_t mData2;
+};
+
+static bool isNullPad(CCData *cc) {
+    return cc->mData1 < 0x10 && cc->mData2 < 0x10;
+}
+
+static void dumpBytePair(const sp<ABuffer> &ccBuf) __attribute__ ((unused));
+static void dumpBytePair(const sp<ABuffer> &ccBuf) {
+    size_t offset = 0;
+    AString out;
+
+    while (offset < ccBuf->size()) {
+        char tmp[128];
+
+        CCData *cc = (CCData *) (ccBuf->data() + offset);
+
+        if (isNullPad(cc)) {
+            // 1 null pad or XDS metadata, ignore
+            offset += sizeof(CCData);
+            continue;
+        }
+
+        if (cc->mData1 >= 0x20 && cc->mData1 <= 0x7f) {
+            // 2 basic chars
+            snprintf(tmp, sizeof(tmp), "[%d]Basic: %c %c", cc->mType, cc->mData1, cc->mData2);
+        } else if ((cc->mData1 == 0x11 || cc->mData1 == 0x19)
+                 && cc->mData2 >= 0x30 && cc->mData2 <= 0x3f) {
+            // 1 special char
+            snprintf(tmp, sizeof(tmp), "[%d]Special: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+        } else if ((cc->mData1 == 0x12 || cc->mData1 == 0x1A)
+                 && cc->mData2 >= 0x20 && cc->mData2 <= 0x3f){
+            // 1 Spanish/French char
+            snprintf(tmp, sizeof(tmp), "[%d]Spanish: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+        } else if ((cc->mData1 == 0x13 || cc->mData1 == 0x1B)
+                 && cc->mData2 >= 0x20 && cc->mData2 <= 0x3f){
+            // 1 Portuguese/German/Danish char
+            snprintf(tmp, sizeof(tmp), "[%d]German: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+        } else if ((cc->mData1 == 0x11 || cc->mData1 == 0x19)
+                 && cc->mData2 >= 0x20 && cc->mData2 <= 0x2f){
+            // Mid-Row Codes (Table 69)
+            snprintf(tmp, sizeof(tmp), "[%d]Mid-row: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+        } else if (((cc->mData1 == 0x14 || cc->mData1 == 0x1c)
+                  && cc->mData2 >= 0x20 && cc->mData2 <= 0x2f)
+                  ||
+                   ((cc->mData1 == 0x17 || cc->mData1 == 0x1f)
+                  && cc->mData2 >= 0x21 && cc->mData2 <= 0x23)){
+            // Misc Control Codes (Table 70)
+            snprintf(tmp, sizeof(tmp), "[%d]Ctrl: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+        } else if ((cc->mData1 & 0x70) == 0x10
+                && (cc->mData2 & 0x40) == 0x40
+                && ((cc->mData1 & 0x07) || !(cc->mData2 & 0x20)) ) {
+            // Preamble Address Codes (Table 71)
+            snprintf(tmp, sizeof(tmp), "[%d]PAC: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+        } else {
+            snprintf(tmp, sizeof(tmp), "[%d]Invalid: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+        }
+
+        if (out.size() > 0) {
+            out.append(", ");
+        }
+
+        out.append(tmp);
+
+        offset += sizeof(CCData);
+    }
+
+    ALOGI("%s", out.c_str());
+}
+
+NuPlayer2::CCDecoder::CCDecoder(const sp<AMessage> &notify)
+    : mNotify(notify),
+      mSelectedTrack(-1),
+      mDTVCCPacket(new ABuffer(kMaxBandwithSizeBytes)) {
+    mDTVCCPacket->setRange(0, 0);
+
+    // In CEA-608, streams from packets which have the value 0 of cc_type contain CC1 and CC2, and
+    // streams from packets which have the value 1 of cc_type contain CC3 and CC4.
+    // The following array indicates the current transmitting channels for each value of cc_type.
+    mLine21Channels[0] = 0; // CC1
+    mLine21Channels[1] = 2; // CC3
+}
+
+size_t NuPlayer2::CCDecoder::getTrackCount() const {
+    return mTracks.size();
+}
+
+sp<AMessage> NuPlayer2::CCDecoder::getTrackInfo(size_t index) const {
+    if (!isTrackValid(index)) {
+        return NULL;
+    }
+
+    sp<AMessage> format = new AMessage();
+
+    CCTrack track = mTracks[index];
+
+    format->setInt32("type", MEDIA_TRACK_TYPE_SUBTITLE);
+    format->setString("language", "und");
+
+    switch (track.mTrackType) {
+        case kTrackTypeCEA608:
+            format->setString("mime", MEDIA_MIMETYPE_TEXT_CEA_608);
+            break;
+        case kTrackTypeCEA708:
+            format->setString("mime", MEDIA_MIMETYPE_TEXT_CEA_708);
+            break;
+        default:
+            ALOGE("Unknown track type: %d", track.mTrackType);
+            return NULL;
+    }
+
+    // For CEA-608 CC1, field 0 channel 0
+    bool isDefaultAuto = track.mTrackType == kTrackTypeCEA608
+            && track.mTrackChannel == 0;
+    // For CEA-708, Primary Caption Service.
+    bool isDefaultOnly = track.mTrackType == kTrackTypeCEA708
+            && track.mTrackChannel == 1;
+    format->setInt32("auto", isDefaultAuto);
+    format->setInt32("default", isDefaultAuto || isDefaultOnly);
+    format->setInt32("forced", 0);
+
+    return format;
+}
+
+status_t NuPlayer2::CCDecoder::selectTrack(size_t index, bool select) {
+    if (!isTrackValid(index)) {
+        return BAD_VALUE;
+    }
+
+    if (select) {
+        if (mSelectedTrack == (ssize_t)index) {
+            ALOGE("track %zu already selected", index);
+            return BAD_VALUE;
+        }
+        ALOGV("selected track %zu", index);
+        mSelectedTrack = index;
+    } else {
+        if (mSelectedTrack != (ssize_t)index) {
+            ALOGE("track %zu is not selected", index);
+            return BAD_VALUE;
+        }
+        ALOGV("unselected track %zu", index);
+        mSelectedTrack = -1;
+    }
+
+    // Clear the previous track payloads
+    mCCMap.clear();
+
+    return OK;
+}
+
+bool NuPlayer2::CCDecoder::isSelected() const {
+    return mSelectedTrack >= 0 && mSelectedTrack < (int32_t)getTrackCount();
+}
+
+bool NuPlayer2::CCDecoder::isTrackValid(size_t index) const {
+    return index < getTrackCount();
+}
+
+// returns true if a new CC track is found
+bool NuPlayer2::CCDecoder::extractFromSEI(const sp<ABuffer> &accessUnit) {
+    sp<ABuffer> sei;
+    if (!accessUnit->meta()->findBuffer("sei", &sei) || sei == NULL) {
+        return false;
+    }
+
+    int64_t timeUs;
+    CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
+
+    bool trackAdded = false;
+
+    const NALPosition *nal = (NALPosition *)sei->data();
+
+    for (size_t i = 0; i < sei->size() / sizeof(NALPosition); ++i, ++nal) {
+        trackAdded |= parseSEINalUnit(
+                timeUs, accessUnit->data() + nal->nalOffset, nal->nalSize);
+    }
+
+    return trackAdded;
+}
+
+// returns true if a new CC track is found
+bool NuPlayer2::CCDecoder::parseSEINalUnit(int64_t timeUs, const uint8_t *data, size_t size) {
+    unsigned nalType = data[0] & 0x1f;
+
+    // the buffer should only have SEI in it
+    if (nalType != 6) {
+        return false;
+    }
+
+    bool trackAdded = false;
+    NALBitReader br(data + 1, size - 1);
+
+    // sei_message()
+    while (br.atLeastNumBitsLeft(16)) { // at least 16-bit for sei_message()
+        uint32_t payload_type = 0;
+        size_t payload_size = 0;
+        uint8_t last_byte;
+
+        do {
+            last_byte = br.getBits(8);
+            payload_type += last_byte;
+        } while (last_byte == 0xFF);
+
+        do {
+            last_byte = br.getBits(8);
+            payload_size += last_byte;
+        } while (last_byte == 0xFF);
+
+        if (payload_size > SIZE_MAX / 8
+                || !br.atLeastNumBitsLeft(payload_size * 8)) {
+            ALOGV("Malformed SEI payload");
+            break;
+        }
+
+        // sei_payload()
+        if (payload_type == 4) {
+            bool isCC = false;
+            if (payload_size > 1 + 2 + 4 + 1) {
+                // user_data_registered_itu_t_t35()
+
+                // ATSC A/72: 6.4.2
+                uint8_t itu_t_t35_country_code = br.getBits(8);
+                uint16_t itu_t_t35_provider_code = br.getBits(16);
+                uint32_t user_identifier = br.getBits(32);
+                uint8_t user_data_type_code = br.getBits(8);
+
+                payload_size -= 1 + 2 + 4 + 1;
+
+                isCC = itu_t_t35_country_code == 0xB5
+                        && itu_t_t35_provider_code == 0x0031
+                        && user_identifier == 'GA94'
+                        && user_data_type_code == 0x3;
+            }
+
+            if (isCC && payload_size > 2) {
+                trackAdded |= parseMPEGCCData(timeUs, br.data(), br.numBitsLeft() / 8);
+            } else {
+                ALOGV("Malformed SEI payload type 4");
+            }
+        } else {
+            ALOGV("Unsupported SEI payload type %d", payload_type);
+        }
+
+        // skipping remaining bits of this payload
+        br.skipBits(payload_size * 8);
+    }
+
+    return trackAdded;
+}
+
+// returns true if a new CC track is found
+bool NuPlayer2::CCDecoder::extractFromMPEGUserData(const sp<ABuffer> &accessUnit) {
+    sp<ABuffer> mpegUserData;
+    if (!accessUnit->meta()->findBuffer("mpegUserData", &mpegUserData)
+            || mpegUserData == NULL) {
+        return false;
+    }
+
+    int64_t timeUs;
+    CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
+
+    bool trackAdded = false;
+
+    const size_t *userData = (size_t *)mpegUserData->data();
+
+    for (size_t i = 0; i < mpegUserData->size() / sizeof(size_t); ++i) {
+        trackAdded |= parseMPEGUserDataUnit(
+                timeUs, accessUnit->data() + userData[i], accessUnit->size() - userData[i]);
+    }
+
+    return trackAdded;
+}
+
+// returns true if a new CC track is found
+bool NuPlayer2::CCDecoder::parseMPEGUserDataUnit(int64_t timeUs, const uint8_t *data, size_t size) {
+    ABitReader br(data + 4, 5);
+
+    uint32_t user_identifier = br.getBits(32);
+    uint8_t user_data_type = br.getBits(8);
+
+    if (user_identifier == 'GA94' && user_data_type == 0x3) {
+        return parseMPEGCCData(timeUs, data + 9, size - 9);
+    }
+
+    return false;
+}
+
+// returns true if a new CC track is found
+bool NuPlayer2::CCDecoder::parseMPEGCCData(int64_t timeUs, const uint8_t *data, size_t size) {
+    bool trackAdded = false;
+
+    // MPEG_cc_data()
+    // ATSC A/53 Part 4: 6.2.3.1
+    ABitReader br(data, size);
+
+    if (br.numBitsLeft() <= 16) {
+        return false;
+    }
+
+    br.skipBits(1);
+    bool process_cc_data_flag = br.getBits(1);
+    br.skipBits(1);
+    size_t cc_count = br.getBits(5);
+    br.skipBits(8);
+
+    if (!process_cc_data_flag || 3 * 8 * cc_count >= br.numBitsLeft()) {
+        return false;
+    }
+
+    sp<ABuffer> line21CCBuf = NULL;
+
+    for (size_t i = 0; i < cc_count; ++i) {
+        br.skipBits(5);
+        bool cc_valid = br.getBits(1);
+        uint8_t cc_type = br.getBits(2);
+
+        if (cc_valid) {
+            if (cc_type == 3) {
+                if (mDTVCCPacket->size() > 0) {
+                    trackAdded |= parseDTVCCPacket(
+                            timeUs, mDTVCCPacket->data(), mDTVCCPacket->size());
+                    mDTVCCPacket->setRange(0, 0);
+                }
+                memcpy(mDTVCCPacket->data() + mDTVCCPacket->size(), br.data(), 2);
+                mDTVCCPacket->setRange(0, mDTVCCPacket->size() + 2);
+                br.skipBits(16);
+            } else if (mDTVCCPacket->size() > 0 && cc_type == 2) {
+                memcpy(mDTVCCPacket->data() + mDTVCCPacket->size(), br.data(), 2);
+                mDTVCCPacket->setRange(0, mDTVCCPacket->size() + 2);
+                br.skipBits(16);
+            } else if (cc_type == 0 || cc_type == 1) {
+                uint8_t cc_data_1 = br.getBits(8) & 0x7f;
+                uint8_t cc_data_2 = br.getBits(8) & 0x7f;
+
+                CCData cc(cc_type, cc_data_1, cc_data_2);
+
+                if (isNullPad(&cc)) {
+                    continue;
+                }
+
+                size_t channel;
+                if (cc.getChannel(&channel)) {
+                    mLine21Channels[cc_type] = channel;
+
+                    // create a new track if it does not exist.
+                    getTrackIndex(kTrackTypeCEA608, channel, &trackAdded);
+                }
+
+                if (isSelected() && mTracks[mSelectedTrack].mTrackType == kTrackTypeCEA608
+                        && mTracks[mSelectedTrack].mTrackChannel == mLine21Channels[cc_type]) {
+                    if (line21CCBuf == NULL) {
+                        line21CCBuf = new ABuffer((cc_count - i) * sizeof(CCData));
+                        line21CCBuf->setRange(0, 0);
+                    }
+                    memcpy(line21CCBuf->data() + line21CCBuf->size(), &cc, sizeof(cc));
+                    line21CCBuf->setRange(0, line21CCBuf->size() + sizeof(CCData));
+                }
+            } else {
+                br.skipBits(16);
+            }
+        } else {
+            if ((cc_type == 3 || cc_type == 2) && mDTVCCPacket->size() > 0) {
+                trackAdded |= parseDTVCCPacket(timeUs, mDTVCCPacket->data(), mDTVCCPacket->size());
+                mDTVCCPacket->setRange(0, 0);
+            }
+            br.skipBits(16);
+        }
+    }
+
+    if (isSelected() && mTracks[mSelectedTrack].mTrackType == kTrackTypeCEA608
+            && line21CCBuf != NULL && line21CCBuf->size() > 0) {
+        mCCMap.add(timeUs, line21CCBuf);
+    }
+
+    return trackAdded;
+}
+
+// returns true if a new CC track is found
+bool NuPlayer2::CCDecoder::parseDTVCCPacket(int64_t timeUs, const uint8_t *data, size_t size) {
+    // CEA-708B 5 DTVCC Packet Layer.
+    ABitReader br(data, size);
+    br.skipBits(2);
+
+    size_t packet_size = br.getBits(6);
+    if (packet_size == 0) packet_size = 64;
+    packet_size *= 2;
+
+    if (size != packet_size) {
+        return false;
+    }
+
+    bool trackAdded = false;
+
+    while (br.numBitsLeft() >= 16) {
+        // CEA-708B Figure 5 and 6.
+        uint8_t service_number = br.getBits(3);
+        size_t block_size = br.getBits(5);
+
+        if (service_number == 64) {
+            br.skipBits(2);
+            service_number = br.getBits(6);
+
+            if (service_number < 64) {
+                return trackAdded;
+            }
+        }
+
+        if (br.numBitsLeft() < block_size * 8) {
+            return trackAdded;
+        }
+
+        if (block_size > 0) {
+            size_t trackIndex = getTrackIndex(kTrackTypeCEA708, service_number, &trackAdded);
+            if (mSelectedTrack == (ssize_t)trackIndex) {
+                sp<ABuffer> ccPacket = new ABuffer(block_size);
+                memcpy(ccPacket->data(), br.data(), block_size);
+                mCCMap.add(timeUs, ccPacket);
+            }
+        }
+        br.skipBits(block_size * 8);
+    }
+
+    return trackAdded;
+}
+
+// return the track index for a given type and channel.
+// if the track does not exist, creates a new one.
+size_t NuPlayer2::CCDecoder::getTrackIndex(
+        int32_t trackType, size_t channel, bool *trackAdded) {
+    CCTrack track(trackType, channel);
+    ssize_t index = mTrackIndices.indexOfKey(track);
+
+    if (index < 0) {
+        // A new track is added.
+        index = mTracks.size();
+        mTrackIndices.add(track, index);
+        mTracks.add(track);
+        *trackAdded = true;
+        return index;
+    }
+
+    return mTrackIndices.valueAt(index);
+}
+
+void NuPlayer2::CCDecoder::decode(const sp<ABuffer> &accessUnit) {
+    if (extractFromMPEGUserData(accessUnit) || extractFromSEI(accessUnit)) {
+        sp<AMessage> msg = mNotify->dup();
+        msg->setInt32("what", kWhatTrackAdded);
+        msg->post();
+    }
+    // TODO: extract CC from other sources
+}
+
+void NuPlayer2::CCDecoder::display(int64_t timeUs) {
+    if (!isSelected()) {
+        return;
+    }
+
+    ssize_t index = mCCMap.indexOfKey(timeUs);
+    if (index < 0) {
+        ALOGV("cc for timestamp %" PRId64 " not found", timeUs);
+        return;
+    }
+
+    sp<ABuffer> ccBuf;
+
+    if (index == 0) {
+        ccBuf = mCCMap.valueAt(index);
+    } else {
+        size_t size = 0;
+
+        for (ssize_t i = 0; i <= index; ++i) {
+            size += mCCMap.valueAt(i)->size();
+        }
+
+        ccBuf = new ABuffer(size);
+        ccBuf->setRange(0, 0);
+
+        for (ssize_t i = 0; i <= index; ++i) {
+            sp<ABuffer> buf = mCCMap.valueAt(i);
+            memcpy(ccBuf->data() + ccBuf->size(), buf->data(), buf->size());
+            ccBuf->setRange(0, ccBuf->size() + buf->size());
+        }
+    }
+
+    if (ccBuf->size() > 0) {
+#if 0
+        dumpBytePair(ccBuf);
+#endif
+
+        ccBuf->meta()->setInt32("trackIndex", mSelectedTrack);
+        ccBuf->meta()->setInt64("timeUs", timeUs);
+        ccBuf->meta()->setInt64("durationUs", 0ll);
+
+        sp<AMessage> msg = mNotify->dup();
+        msg->setInt32("what", kWhatClosedCaptionData);
+        msg->setBuffer("buffer", ccBuf);
+        msg->post();
+    }
+
+    // remove all entries before timeUs
+    mCCMap.removeItemsAt(0, index + 1);
+}
+
+void NuPlayer2::CCDecoder::flush() {
+    mCCMap.clear();
+    mDTVCCPacket->setRange(0, 0);
+}
+
+int32_t NuPlayer2::CCDecoder::CCTrack::compare(const NuPlayer2::CCDecoder::CCTrack& rhs) const {
+    int32_t cmp = mTrackType - rhs.mTrackType;
+    if (cmp != 0) return cmp;
+    return mTrackChannel - rhs.mTrackChannel;
+}
+
+bool NuPlayer2::CCDecoder::CCTrack::operator<(const NuPlayer2::CCDecoder::CCTrack& rhs) const {
+    return compare(rhs) < 0;
+}
+
+bool NuPlayer2::CCDecoder::CCTrack::operator==(const NuPlayer2::CCDecoder::CCTrack& rhs) const {
+    return compare(rhs) == 0;
+}
+
+bool NuPlayer2::CCDecoder::CCTrack::operator!=(const NuPlayer2::CCDecoder::CCTrack& rhs) const {
+    return compare(rhs) != 0;
+}
+
+}  // namespace android
+
diff --git a/media/libmedia/nuplayer2/NuPlayer2CCDecoder.h b/media/libmedia/nuplayer2/NuPlayer2CCDecoder.h
new file mode 100644
index 0000000..57d5ea2
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2CCDecoder.h
@@ -0,0 +1,97 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NUPLAYER2_CCDECODER_H_
+
+#define NUPLAYER2_CCDECODER_H_
+
+#include "NuPlayer2.h"
+
+namespace android {
+
+struct NuPlayer2::CCDecoder : public RefBase {
+    enum {
+        kWhatClosedCaptionData,
+        kWhatTrackAdded,
+    };
+
+    enum {
+        kTrackTypeCEA608,
+        kTrackTypeCEA708,
+    };
+
+    explicit CCDecoder(const sp<AMessage> &notify);
+
+    size_t getTrackCount() const;
+    sp<AMessage> getTrackInfo(size_t index) const;
+    status_t selectTrack(size_t index, bool select);
+    bool isSelected() const;
+    void decode(const sp<ABuffer> &accessUnit);
+    void display(int64_t timeUs);
+    void flush();
+
+private:
+    // CC track identifier.
+    struct CCTrack {
+        CCTrack() : mTrackType(0), mTrackChannel(0) { }
+
+        CCTrack(const int32_t trackType, const size_t trackChannel)
+            : mTrackType(trackType), mTrackChannel(trackChannel) { }
+
+        int32_t mTrackType;
+        size_t mTrackChannel;
+
+        // The ordering of CCTracks is to build a map of track to index.
+        // It is necessary to find the index of the matched CCTrack when CC data comes.
+        int compare(const NuPlayer2::CCDecoder::CCTrack& rhs) const;
+        inline bool operator<(const NuPlayer2::CCDecoder::CCTrack& rhs) const;
+        inline bool operator==(const NuPlayer2::CCDecoder::CCTrack& rhs) const;
+        inline bool operator!=(const NuPlayer2::CCDecoder::CCTrack& rhs) const;
+    };
+
+    sp<AMessage> mNotify;
+    KeyedVector<int64_t, sp<ABuffer> > mCCMap;
+    ssize_t mSelectedTrack;
+    KeyedVector<CCTrack, size_t> mTrackIndices;
+    Vector<CCTrack> mTracks;
+
+    // CEA-608 closed caption
+    size_t mLine21Channels[2]; // The current channels of NTSC_CC_FIELD_{1, 2}
+
+    // CEA-708 closed caption
+    sp<ABuffer> mDTVCCPacket;
+
+    bool isTrackValid(size_t index) const;
+    size_t getTrackIndex(int32_t trackType, size_t channel, bool *trackAdded);
+
+    // Extract from H.264 SEIs
+    bool extractFromSEI(const sp<ABuffer> &accessUnit);
+    bool parseSEINalUnit(int64_t timeUs, const uint8_t *data, size_t size);
+
+    // Extract from MPEG user data
+    bool extractFromMPEGUserData(const sp<ABuffer> &accessUnit);
+    bool parseMPEGUserDataUnit(int64_t timeUs, const uint8_t *data, size_t size);
+
+    // Extract CC tracks from MPEG_cc_data
+    bool parseMPEGCCData(int64_t timeUs, const uint8_t *data, size_t size);
+    bool parseDTVCCPacket(int64_t timeUs, const uint8_t *data, size_t size);
+
+    DISALLOW_EVIL_CONSTRUCTORS(CCDecoder);
+};
+
+}  // namespace android
+
+#endif  // NUPLAYER2_CCDECODER_H_
diff --git a/media/libmedia/nuplayer2/NuPlayer2Decoder.cpp b/media/libmedia/nuplayer2/NuPlayer2Decoder.cpp
new file mode 100644
index 0000000..18207b0
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2Decoder.cpp
@@ -0,0 +1,1283 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayer2Decoder"
+#include <utils/Log.h>
+#include <inttypes.h>
+
+#include <algorithm>
+
+#include "NdkWrapper.h"
+#include "NuPlayer2CCDecoder.h"
+#include "NuPlayer2Decoder.h"
+#include "NuPlayer2Drm.h"
+#include "NuPlayer2Renderer.h"
+#include "NuPlayer2Source.h"
+
+#include <cutils/properties.h>
+#include <media/MediaCodecBuffer.h>
+#include <media/NdkMediaCodec.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/avc_utils.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/SurfaceUtils.h>
+#include <gui/Surface.h>
+
+#include "ATSParser.h"
+
+namespace android {
+
+static float kDisplayRefreshingRate = 60.f; // TODO: get this from the display
+
+// The default total video frame rate of a stream when that info is not available from
+// the source.
+static float kDefaultVideoFrameRateTotal = 30.f;
+
+static inline bool getAudioDeepBufferSetting() {
+    return property_get_bool("media.stagefright.audio.deep", false /* default_value */);
+}
+
+NuPlayer2::Decoder::Decoder(
+        const sp<AMessage> &notify,
+        const sp<Source> &source,
+        pid_t pid,
+        uid_t uid,
+        const sp<Renderer> &renderer,
+        const sp<Surface> &surface,
+        const sp<CCDecoder> &ccDecoder)
+    : DecoderBase(notify),
+      mSurface(surface),
+      mSource(source),
+      mRenderer(renderer),
+      mCCDecoder(ccDecoder),
+      mPid(pid),
+      mUid(uid),
+      mSkipRenderingUntilMediaTimeUs(-1ll),
+      mNumFramesTotal(0ll),
+      mNumInputFramesDropped(0ll),
+      mNumOutputFramesDropped(0ll),
+      mVideoWidth(0),
+      mVideoHeight(0),
+      mIsAudio(true),
+      mIsVideoAVC(false),
+      mIsSecure(false),
+      mIsEncrypted(false),
+      mIsEncryptedObservedEarlier(false),
+      mFormatChangePending(false),
+      mTimeChangePending(false),
+      mFrameRateTotal(kDefaultVideoFrameRateTotal),
+      mPlaybackSpeed(1.0f),
+      mNumVideoTemporalLayerTotal(1), // decode all layers
+      mNumVideoTemporalLayerAllowed(1),
+      mCurrentMaxVideoTemporalLayerId(0),
+      mResumePending(false),
+      mComponentName("decoder") {
+    mVideoTemporalLayerAggregateFps[0] = mFrameRateTotal;
+}
+
+NuPlayer2::Decoder::~Decoder() {
+    // Need to stop looper first since mCodec could be accessed on the mDecoderLooper.
+    stopLooper();
+    if (mCodec != NULL) {
+        mCodec->release();
+    }
+    releaseAndResetMediaBuffers();
+}
+
+sp<AMessage> NuPlayer2::Decoder::getStats() const {
+    mStats->setInt64("frames-total", mNumFramesTotal);
+    mStats->setInt64("frames-dropped-input", mNumInputFramesDropped);
+    mStats->setInt64("frames-dropped-output", mNumOutputFramesDropped);
+    return mStats;
+}
+
+status_t NuPlayer2::Decoder::setVideoSurface(const sp<Surface> &surface) {
+    if (surface == NULL || ADebug::isExperimentEnabled("legacy-setsurface")) {
+        return BAD_VALUE;
+    }
+
+    sp<AMessage> msg = new AMessage(kWhatSetVideoSurface, this);
+
+    msg->setObject("surface", surface);
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+    if (err == OK && response != NULL) {
+        CHECK(response->findInt32("err", &err));
+    }
+    return err;
+}
+
+void NuPlayer2::Decoder::onMessageReceived(const sp<AMessage> &msg) {
+    ALOGV("[%s] onMessage: %s", mComponentName.c_str(), msg->debugString().c_str());
+
+    switch (msg->what()) {
+        case kWhatCodecNotify:
+        {
+            int32_t cbID;
+            CHECK(msg->findInt32("callbackID", &cbID));
+
+            ALOGV("[%s] kWhatCodecNotify: cbID = %d, paused = %d",
+                    mIsAudio ? "audio" : "video", cbID, mPaused);
+
+            if (mPaused) {
+                break;
+            }
+
+            switch (cbID) {
+                case AMediaCodecWrapper::CB_INPUT_AVAILABLE:
+                {
+                    int32_t index;
+                    CHECK(msg->findInt32("index", &index));
+
+                    handleAnInputBuffer(index);
+                    break;
+                }
+
+                case AMediaCodecWrapper::CB_OUTPUT_AVAILABLE:
+                {
+                    int32_t index;
+                    size_t offset;
+                    size_t size;
+                    int64_t timeUs;
+                    int32_t flags;
+
+                    CHECK(msg->findInt32("index", &index));
+                    CHECK(msg->findSize("offset", &offset));
+                    CHECK(msg->findSize("size", &size));
+                    CHECK(msg->findInt64("timeUs", &timeUs));
+                    CHECK(msg->findInt32("flags", &flags));
+
+                    handleAnOutputBuffer(index, offset, size, timeUs, flags);
+                    break;
+                }
+
+                case AMediaCodecWrapper::CB_OUTPUT_FORMAT_CHANGED:
+                {
+                    sp<AMessage> format;
+                    CHECK(msg->findMessage("format", &format));
+
+                    handleOutputFormatChange(format);
+                    break;
+                }
+
+                case AMediaCodecWrapper::CB_ERROR:
+                {
+                    status_t err;
+                    CHECK(msg->findInt32("err", &err));
+                    ALOGE("Decoder (%s) reported error : 0x%x",
+                            mIsAudio ? "audio" : "video", err);
+
+                    handleError(err);
+                    break;
+                }
+
+                default:
+                {
+                    TRESPASS();
+                    break;
+                }
+            }
+
+            break;
+        }
+
+        case kWhatRenderBuffer:
+        {
+            if (!isStaleReply(msg)) {
+                onRenderBuffer(msg);
+            }
+            break;
+        }
+
+        case kWhatAudioOutputFormatChanged:
+        {
+            if (!isStaleReply(msg)) {
+                status_t err;
+                if (msg->findInt32("err", &err) && err != OK) {
+                    ALOGE("Renderer reported 0x%x when changing audio output format", err);
+                    handleError(err);
+                }
+            }
+            break;
+        }
+
+        case kWhatSetVideoSurface:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            sp<RefBase> obj;
+            CHECK(msg->findObject("surface", &obj));
+            sp<Surface> surface = static_cast<Surface *>(obj.get()); // non-null
+            int32_t err = INVALID_OPERATION;
+            // NOTE: in practice mSurface is always non-null, but checking here for completeness
+            if (mCodec != NULL && mSurface != NULL) {
+                // TODO: once AwesomePlayer is removed, remove this automatic connecting
+                // to the surface by MediaPlayerService.
+                //
+                // at this point MediaPlayerService::client has already connected to the
+                // surface, which MediaCodec does not expect
+                err = nativeWindowDisconnect(surface.get(), "kWhatSetVideoSurface(surface)");
+                if (err == OK) {
+                    err = mCodec->setOutputSurface(surface);
+                    ALOGI_IF(err, "codec setOutputSurface returned: %d", err);
+                    if (err == OK) {
+                        // reconnect to the old surface as MPS::Client will expect to
+                        // be able to disconnect from it.
+                        (void)nativeWindowConnect(mSurface.get(), "kWhatSetVideoSurface(mSurface)");
+                        mSurface = surface;
+                    }
+                }
+                if (err != OK) {
+                    // reconnect to the new surface on error as MPS::Client will expect to
+                    // be able to disconnect from it.
+                    (void)nativeWindowConnect(surface.get(), "kWhatSetVideoSurface(err)");
+                }
+            }
+
+            sp<AMessage> response = new AMessage;
+            response->setInt32("err", err);
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatDrmReleaseCrypto:
+        {
+            ALOGV("kWhatDrmReleaseCrypto");
+            onReleaseCrypto(msg);
+            break;
+        }
+
+        default:
+            DecoderBase::onMessageReceived(msg);
+            break;
+    }
+}
+
+void NuPlayer2::Decoder::onConfigure(const sp<AMessage> &format) {
+    ALOGV("[%s] onConfigure (format=%s)", mComponentName.c_str(), format->debugString().c_str());
+    CHECK(mCodec == NULL);
+
+    mFormatChangePending = false;
+    mTimeChangePending = false;
+
+    ++mBufferGeneration;
+
+    AString mime;
+    CHECK(format->findString("mime", &mime));
+
+    mIsAudio = !strncasecmp("audio/", mime.c_str(), 6);
+    mIsVideoAVC = !strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime.c_str());
+
+    mComponentName = mime;
+    mComponentName.append(" decoder");
+    ALOGV("[%s] onConfigure (surface=%p)", mComponentName.c_str(), mSurface.get());
+
+    mCodec = AMediaCodecWrapper::CreateDecoderByType(mime);
+    int32_t secure = 0;
+    if (format->findInt32("secure", &secure) && secure != 0) {
+        if (mCodec != NULL) {
+            if (mCodec->getName(&mComponentName) == OK) {
+                mComponentName.append(".secure");
+                mCodec->release();
+                ALOGI("[%s] creating", mComponentName.c_str());
+                mCodec = AMediaCodecWrapper::CreateCodecByName(mComponentName);
+            } else {
+                mCodec = NULL;
+            }
+        }
+    }
+    if (mCodec == NULL) {
+        ALOGE("Failed to create %s%s decoder",
+                (secure ? "secure " : ""), mime.c_str());
+        handleError(NO_INIT);
+        return;
+    }
+    mIsSecure = secure;
+
+    mCodec->getName(&mComponentName);
+
+    status_t err;
+    if (mSurface != NULL) {
+        // disconnect from surface as MediaCodec will reconnect
+        err = nativeWindowDisconnect(mSurface.get(), "onConfigure");
+        // We treat this as a warning, as this is a preparatory step.
+        // Codec will try to connect to the surface, which is where
+        // any error signaling will occur.
+        ALOGW_IF(err != OK, "failed to disconnect from surface: %d", err);
+    }
+
+    // Modular DRM
+    sp<RefBase> objCrypto;
+    format->findObject("crypto", &objCrypto);
+    sp<AMediaCryptoWrapper> crypto = static_cast<AMediaCryptoWrapper *>(objCrypto.get());
+    // non-encrypted source won't have a crypto
+    mIsEncrypted = (crypto != NULL);
+    // configure is called once; still using OR in case the behavior changes.
+    mIsEncryptedObservedEarlier = mIsEncryptedObservedEarlier || mIsEncrypted;
+    ALOGV("onConfigure mCrypto: %p, mIsSecure: %d", crypto.get(), mIsSecure);
+
+    err = mCodec->configure(
+            AMediaFormatWrapper::Create(format),
+            mSurface,
+            crypto,
+            0 /* flags */);
+
+    if (err != OK) {
+        ALOGE("Failed to configure [%s] decoder (err=%d)", mComponentName.c_str(), err);
+        mCodec->release();
+        mCodec.clear();
+        handleError(err);
+        return;
+    }
+    rememberCodecSpecificData(format);
+
+    // the following should work in configured state
+    sp<AMediaFormatWrapper> outputFormat = mCodec->getOutputFormat();
+    if (outputFormat == NULL) {
+        handleError(INVALID_OPERATION);
+        return;
+    }
+    mInputFormat = mCodec->getInputFormat();
+    if (mInputFormat == NULL) {
+        handleError(INVALID_OPERATION);
+        return;
+    }
+
+    mStats->setString("mime", mime.c_str());
+    mStats->setString("component-name", mComponentName.c_str());
+
+    if (!mIsAudio) {
+        int32_t width, height;
+        if (outputFormat->getInt32("width", &width)
+                && outputFormat->getInt32("height", &height)) {
+            mStats->setInt32("width", width);
+            mStats->setInt32("height", height);
+        }
+    }
+
+    sp<AMessage> reply = new AMessage(kWhatCodecNotify, this);
+    mCodec->setCallback(reply);
+
+    err = mCodec->start();
+    if (err != OK) {
+        ALOGE("Failed to start [%s] decoder (err=%d)", mComponentName.c_str(), err);
+        mCodec->release();
+        mCodec.clear();
+        handleError(err);
+        return;
+    }
+
+    releaseAndResetMediaBuffers();
+
+    mPaused = false;
+    mResumePending = false;
+}
+
+void NuPlayer2::Decoder::onSetParameters(const sp<AMessage> &params) {
+    bool needAdjustLayers = false;
+    float frameRateTotal;
+    if (params->findFloat("frame-rate-total", &frameRateTotal)
+            && mFrameRateTotal != frameRateTotal) {
+        needAdjustLayers = true;
+        mFrameRateTotal = frameRateTotal;
+    }
+
+    int32_t numVideoTemporalLayerTotal;
+    if (params->findInt32("temporal-layer-count", &numVideoTemporalLayerTotal)
+            && numVideoTemporalLayerTotal >= 0
+            && numVideoTemporalLayerTotal <= kMaxNumVideoTemporalLayers
+            && mNumVideoTemporalLayerTotal != numVideoTemporalLayerTotal) {
+        needAdjustLayers = true;
+        mNumVideoTemporalLayerTotal = std::max(numVideoTemporalLayerTotal, 1);
+    }
+
+    if (needAdjustLayers && mNumVideoTemporalLayerTotal > 1) {
+        // TODO: For now, layer fps is calculated for some specific architectures.
+        // But it really should be extracted from the stream.
+        mVideoTemporalLayerAggregateFps[0] =
+            mFrameRateTotal / (float)(1ll << (mNumVideoTemporalLayerTotal - 1));
+        for (int32_t i = 1; i < mNumVideoTemporalLayerTotal; ++i) {
+            mVideoTemporalLayerAggregateFps[i] =
+                mFrameRateTotal / (float)(1ll << (mNumVideoTemporalLayerTotal - i))
+                + mVideoTemporalLayerAggregateFps[i - 1];
+        }
+    }
+
+    float playbackSpeed;
+    if (params->findFloat("playback-speed", &playbackSpeed)
+            && mPlaybackSpeed != playbackSpeed) {
+        needAdjustLayers = true;
+        mPlaybackSpeed = playbackSpeed;
+    }
+
+    if (needAdjustLayers) {
+        float decodeFrameRate = mFrameRateTotal;
+        // enable temporal layering optimization only if we know the layering depth
+        if (mNumVideoTemporalLayerTotal > 1) {
+            int32_t layerId;
+            for (layerId = 0; layerId < mNumVideoTemporalLayerTotal - 1; ++layerId) {
+                if (mVideoTemporalLayerAggregateFps[layerId] * mPlaybackSpeed
+                        >= kDisplayRefreshingRate * 0.9) {
+                    break;
+                }
+            }
+            mNumVideoTemporalLayerAllowed = layerId + 1;
+            decodeFrameRate = mVideoTemporalLayerAggregateFps[layerId];
+        }
+        ALOGV("onSetParameters: allowed layers=%d, decodeFps=%g",
+                mNumVideoTemporalLayerAllowed, decodeFrameRate);
+
+        if (mCodec == NULL) {
+            ALOGW("onSetParameters called before codec is created.");
+            return;
+        }
+
+        sp<AMediaFormatWrapper> codecParams = new AMediaFormatWrapper();
+        codecParams->setFloat("operating-rate", decodeFrameRate * mPlaybackSpeed);
+        mCodec->setParameters(codecParams);
+    }
+}
+
+void NuPlayer2::Decoder::onSetRenderer(const sp<Renderer> &renderer) {
+    mRenderer = renderer;
+}
+
+void NuPlayer2::Decoder::onResume(bool notifyComplete) {
+    mPaused = false;
+
+    if (notifyComplete) {
+        mResumePending = true;
+    }
+
+    if (mCodec == NULL) {
+        ALOGE("[%s] onResume without a valid codec", mComponentName.c_str());
+        handleError(NO_INIT);
+        return;
+    }
+    mCodec->start();
+}
+
+void NuPlayer2::Decoder::doFlush(bool notifyComplete) {
+    if (mCCDecoder != NULL) {
+        mCCDecoder->flush();
+    }
+
+    if (mRenderer != NULL) {
+        mRenderer->flush(mIsAudio, notifyComplete);
+        mRenderer->signalTimeDiscontinuity();
+    }
+
+    status_t err = OK;
+    if (mCodec != NULL) {
+        err = mCodec->flush();
+        mCSDsToSubmit = mCSDsForCurrentFormat; // copy operator
+        ++mBufferGeneration;
+    }
+
+    if (err != OK) {
+        ALOGE("failed to flush [%s] (err=%d)", mComponentName.c_str(), err);
+        handleError(err);
+        // finish with posting kWhatFlushCompleted.
+        // we attempt to release the buffers even if flush fails.
+    }
+    releaseAndResetMediaBuffers();
+    mPaused = true;
+}
+
+
+void NuPlayer2::Decoder::onFlush() {
+    doFlush(true);
+
+    if (isDiscontinuityPending()) {
+        // This could happen if the client starts seeking/shutdown
+        // after we queued an EOS for discontinuities.
+        // We can consider discontinuity handled.
+        finishHandleDiscontinuity(false /* flushOnTimeChange */);
+    }
+
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatFlushCompleted);
+    notify->post();
+}
+
+void NuPlayer2::Decoder::onShutdown(bool notifyComplete) {
+    status_t err = OK;
+
+    // if there is a pending resume request, notify complete now
+    notifyResumeCompleteIfNecessary();
+
+    if (mCodec != NULL) {
+        err = mCodec->release();
+        mCodec = NULL;
+        ++mBufferGeneration;
+
+        if (mSurface != NULL) {
+            // reconnect to surface as MediaCodec disconnected from it
+            status_t error = nativeWindowConnect(mSurface.get(), "onShutdown");
+            ALOGW_IF(error != NO_ERROR,
+                    "[%s] failed to connect to native window, error=%d",
+                    mComponentName.c_str(), error);
+        }
+        mComponentName = "decoder";
+    }
+
+    releaseAndResetMediaBuffers();
+
+    if (err != OK) {
+        ALOGE("failed to release [%s] (err=%d)", mComponentName.c_str(), err);
+        handleError(err);
+        // finish with posting kWhatShutdownCompleted.
+    }
+
+    if (notifyComplete) {
+        sp<AMessage> notify = mNotify->dup();
+        notify->setInt32("what", kWhatShutdownCompleted);
+        notify->post();
+        mPaused = true;
+    }
+}
+
+/*
+ * returns true if we should request more data
+ */
+bool NuPlayer2::Decoder::doRequestBuffers() {
+    if (isDiscontinuityPending()) {
+        return false;
+    }
+    status_t err = OK;
+    while (err == OK && !mDequeuedInputBuffers.empty()) {
+        size_t bufferIx = *mDequeuedInputBuffers.begin();
+        sp<AMessage> msg = new AMessage();
+        msg->setSize("buffer-ix", bufferIx);
+        err = fetchInputData(msg);
+        if (err != OK && err != ERROR_END_OF_STREAM) {
+            // if EOS, need to queue EOS buffer
+            break;
+        }
+        mDequeuedInputBuffers.erase(mDequeuedInputBuffers.begin());
+
+        if (!mPendingInputMessages.empty()
+                || !onInputBufferFetched(msg)) {
+            mPendingInputMessages.push_back(msg);
+        }
+    }
+
+    return err == -EWOULDBLOCK
+            && mSource->feedMoreTSData() == OK;
+}
+
+void NuPlayer2::Decoder::handleError(int32_t err)
+{
+    // We cannot immediately release the codec due to buffers still outstanding
+    // in the renderer.  We signal to the player the error so it can shutdown/release the
+    // decoder after flushing and increment the generation to discard unnecessary messages.
+
+    ++mBufferGeneration;
+
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatError);
+    notify->setInt32("err", err);
+    notify->post();
+}
+
+status_t NuPlayer2::Decoder::releaseCrypto()
+{
+    ALOGV("releaseCrypto");
+
+    sp<AMessage> msg = new AMessage(kWhatDrmReleaseCrypto, this);
+
+    sp<AMessage> response;
+    status_t status = msg->postAndAwaitResponse(&response);
+    if (status == OK && response != NULL) {
+        CHECK(response->findInt32("status", &status));
+        ALOGV("releaseCrypto ret: %d ", status);
+    } else {
+        ALOGE("releaseCrypto err: %d", status);
+    }
+
+    return status;
+}
+
+void NuPlayer2::Decoder::onReleaseCrypto(const sp<AMessage>& msg)
+{
+    status_t status = INVALID_OPERATION;
+    if (mCodec != NULL) {
+        status = mCodec->releaseCrypto();
+    } else {
+        // returning OK if the codec has been already released
+        status = OK;
+        ALOGE("onReleaseCrypto No mCodec. err: %d", status);
+    }
+
+    sp<AMessage> response = new AMessage;
+    response->setInt32("status", status);
+    // Clearing the state as it's tied to crypto. mIsEncryptedObservedEarlier is sticky though
+    // and lasts for the lifetime of this codec. See its use in fetchInputData.
+    mIsEncrypted = false;
+
+    sp<AReplyToken> replyID;
+    CHECK(msg->senderAwaitsResponse(&replyID));
+    response->postReply(replyID);
+}
+
+bool NuPlayer2::Decoder::handleAnInputBuffer(size_t index) {
+    if (isDiscontinuityPending()) {
+        return false;
+    }
+
+    if (mCodec == NULL) {
+        ALOGE("[%s] handleAnInputBuffer without a valid codec", mComponentName.c_str());
+        handleError(NO_INIT);
+        return false;
+    }
+
+    size_t bufferSize = 0;
+    uint8_t *bufferBase = mCodec->getInputBuffer(index, &bufferSize);
+
+    if (bufferBase == NULL) {
+        ALOGE("[%s] handleAnInputBuffer, failed to get input buffer", mComponentName.c_str());
+        handleError(UNKNOWN_ERROR);
+        return false;
+    }
+
+    sp<MediaCodecBuffer> buffer =
+        new MediaCodecBuffer(NULL /* format */, new ABuffer(bufferBase, bufferSize));
+
+    if (index >= mInputBuffers.size()) {
+        for (size_t i = mInputBuffers.size(); i <= index; ++i) {
+            mInputBuffers.add();
+            mMediaBuffers.add();
+            mInputBufferIsDequeued.add();
+            mMediaBuffers.editItemAt(i) = NULL;
+            mInputBufferIsDequeued.editItemAt(i) = false;
+        }
+    }
+    mInputBuffers.editItemAt(index) = buffer;
+
+    //CHECK_LT(bufferIx, mInputBuffers.size());
+
+    if (mMediaBuffers[index] != NULL) {
+        mMediaBuffers[index]->release();
+        mMediaBuffers.editItemAt(index) = NULL;
+    }
+    mInputBufferIsDequeued.editItemAt(index) = true;
+
+    if (!mCSDsToSubmit.isEmpty()) {
+        sp<AMessage> msg = new AMessage();
+        msg->setSize("buffer-ix", index);
+
+        sp<ABuffer> buffer = mCSDsToSubmit.itemAt(0);
+        ALOGI("[%s] resubmitting CSD", mComponentName.c_str());
+        msg->setBuffer("buffer", buffer);
+        mCSDsToSubmit.removeAt(0);
+        if (!onInputBufferFetched(msg)) {
+            handleError(UNKNOWN_ERROR);
+            return false;
+        }
+        return true;
+    }
+
+    while (!mPendingInputMessages.empty()) {
+        sp<AMessage> msg = *mPendingInputMessages.begin();
+        if (!onInputBufferFetched(msg)) {
+            break;
+        }
+        mPendingInputMessages.erase(mPendingInputMessages.begin());
+    }
+
+    if (!mInputBufferIsDequeued.editItemAt(index)) {
+        return true;
+    }
+
+    mDequeuedInputBuffers.push_back(index);
+
+    onRequestInputBuffers();
+    return true;
+}
+
+bool NuPlayer2::Decoder::handleAnOutputBuffer(
+        size_t index,
+        size_t offset,
+        size_t size,
+        int64_t timeUs,
+        int32_t flags) {
+    if (mCodec == NULL) {
+        ALOGE("[%s] handleAnOutputBuffer without a valid codec", mComponentName.c_str());
+        handleError(NO_INIT);
+        return false;
+    }
+
+//    CHECK_LT(bufferIx, mOutputBuffers.size());
+
+    size_t bufferSize = 0;
+    uint8_t *bufferBase = mCodec->getOutputBuffer(index, &bufferSize);
+
+    if (bufferBase == NULL) {
+        ALOGE("[%s] handleAnOutputBuffer, failed to get output buffer", mComponentName.c_str());
+        handleError(UNKNOWN_ERROR);
+        return false;
+    }
+
+    sp<MediaCodecBuffer> buffer =
+        new MediaCodecBuffer(NULL /* format */, new ABuffer(bufferBase, bufferSize));
+
+    if (index >= mOutputBuffers.size()) {
+        for (size_t i = mOutputBuffers.size(); i <= index; ++i) {
+            mOutputBuffers.add();
+        }
+    }
+
+    mOutputBuffers.editItemAt(index) = buffer;
+
+    buffer->setRange(offset, size);
+    buffer->meta()->clear();
+    buffer->meta()->setInt64("timeUs", timeUs);
+
+    bool eos = flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM;
+    // we do not expect CODECCONFIG or SYNCFRAME for decoder
+
+    sp<AMessage> reply = new AMessage(kWhatRenderBuffer, this);
+    reply->setSize("buffer-ix", index);
+    reply->setInt32("generation", mBufferGeneration);
+
+    if (eos) {
+        ALOGI("[%s] saw output EOS", mIsAudio ? "audio" : "video");
+
+        buffer->meta()->setInt32("eos", true);
+        reply->setInt32("eos", true);
+    }
+
+    if (mSkipRenderingUntilMediaTimeUs >= 0) {
+        if (timeUs < mSkipRenderingUntilMediaTimeUs) {
+            ALOGV("[%s] dropping buffer at time %lld as requested.",
+                     mComponentName.c_str(), (long long)timeUs);
+
+            reply->post();
+            if (eos) {
+                notifyResumeCompleteIfNecessary();
+                if (mRenderer != NULL && !isDiscontinuityPending()) {
+                    mRenderer->queueEOS(mIsAudio, ERROR_END_OF_STREAM);
+                }
+            }
+            return true;
+        }
+
+        mSkipRenderingUntilMediaTimeUs = -1;
+    }
+
+    mNumFramesTotal += !mIsAudio;
+
+    // wait until 1st frame comes out to signal resume complete
+    notifyResumeCompleteIfNecessary();
+
+    if (mRenderer != NULL) {
+        // send the buffer to renderer.
+        mRenderer->queueBuffer(mIsAudio, buffer, reply);
+        if (eos && !isDiscontinuityPending()) {
+            mRenderer->queueEOS(mIsAudio, ERROR_END_OF_STREAM);
+        }
+    }
+
+    return true;
+}
+
+void NuPlayer2::Decoder::handleOutputFormatChange(const sp<AMessage> &format) {
+    if (!mIsAudio) {
+        int32_t width, height;
+        if (format->findInt32("width", &width)
+                && format->findInt32("height", &height)) {
+            mStats->setInt32("width", width);
+            mStats->setInt32("height", height);
+        }
+        sp<AMessage> notify = mNotify->dup();
+        notify->setInt32("what", kWhatVideoSizeChanged);
+        notify->setMessage("format", format);
+        notify->post();
+    } else if (mRenderer != NULL) {
+        uint32_t flags;
+        int64_t durationUs;
+        bool hasVideo = (mSource->getFormat(false /* audio */) != NULL);
+        if (getAudioDeepBufferSetting() // override regardless of source duration
+                || (mSource->getDuration(&durationUs) == OK
+                        && durationUs > AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US)) {
+            flags = AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
+        } else {
+            flags = AUDIO_OUTPUT_FLAG_NONE;
+        }
+
+        sp<AMessage> reply = new AMessage(kWhatAudioOutputFormatChanged, this);
+        reply->setInt32("generation", mBufferGeneration);
+        mRenderer->changeAudioFormat(
+                format, false /* offloadOnly */, hasVideo,
+                flags, mSource->isStreaming(), reply);
+    }
+}
+
+void NuPlayer2::Decoder::releaseAndResetMediaBuffers() {
+    for (size_t i = 0; i < mMediaBuffers.size(); i++) {
+        if (mMediaBuffers[i] != NULL) {
+            mMediaBuffers[i]->release();
+            mMediaBuffers.editItemAt(i) = NULL;
+        }
+    }
+    mMediaBuffers.resize(mInputBuffers.size());
+    for (size_t i = 0; i < mMediaBuffers.size(); i++) {
+        mMediaBuffers.editItemAt(i) = NULL;
+    }
+    mInputBufferIsDequeued.clear();
+    mInputBufferIsDequeued.resize(mInputBuffers.size());
+    for (size_t i = 0; i < mInputBufferIsDequeued.size(); i++) {
+        mInputBufferIsDequeued.editItemAt(i) = false;
+    }
+
+    mPendingInputMessages.clear();
+    mDequeuedInputBuffers.clear();
+    mSkipRenderingUntilMediaTimeUs = -1;
+}
+
+bool NuPlayer2::Decoder::isStaleReply(const sp<AMessage> &msg) {
+    int32_t generation;
+    CHECK(msg->findInt32("generation", &generation));
+    return generation != mBufferGeneration;
+}
+
+status_t NuPlayer2::Decoder::fetchInputData(sp<AMessage> &reply) {
+    sp<ABuffer> accessUnit;
+    bool dropAccessUnit = true;
+    do {
+        status_t err = mSource->dequeueAccessUnit(mIsAudio, &accessUnit);
+
+        if (err == -EWOULDBLOCK) {
+            return err;
+        } else if (err != OK) {
+            if (err == INFO_DISCONTINUITY) {
+                int32_t type;
+                CHECK(accessUnit->meta()->findInt32("discontinuity", &type));
+
+                bool formatChange =
+                    (mIsAudio &&
+                     (type & ATSParser::DISCONTINUITY_AUDIO_FORMAT))
+                    || (!mIsAudio &&
+                            (type & ATSParser::DISCONTINUITY_VIDEO_FORMAT));
+
+                bool timeChange = (type & ATSParser::DISCONTINUITY_TIME) != 0;
+
+                ALOGI("%s discontinuity (format=%d, time=%d)",
+                        mIsAudio ? "audio" : "video", formatChange, timeChange);
+
+                bool seamlessFormatChange = false;
+                sp<AMessage> newFormat = mSource->getFormat(mIsAudio);
+                if (formatChange) {
+                    seamlessFormatChange =
+                        supportsSeamlessFormatChange(newFormat);
+                    // treat seamless format change separately
+                    formatChange = !seamlessFormatChange;
+                }
+
+                // For format or time change, return EOS to queue EOS input,
+                // then wait for EOS on output.
+                if (formatChange /* not seamless */) {
+                    mFormatChangePending = true;
+                    err = ERROR_END_OF_STREAM;
+                } else if (timeChange) {
+                    rememberCodecSpecificData(newFormat);
+                    mTimeChangePending = true;
+                    err = ERROR_END_OF_STREAM;
+                } else if (seamlessFormatChange) {
+                    // reuse existing decoder and don't flush
+                    rememberCodecSpecificData(newFormat);
+                    continue;
+                } else {
+                    // This stream is unaffected by the discontinuity
+                    return -EWOULDBLOCK;
+                }
+            }
+
+            // reply should only be returned without a buffer set
+            // when there is an error (including EOS)
+            CHECK(err != OK);
+
+            reply->setInt32("err", err);
+            return ERROR_END_OF_STREAM;
+        }
+
+        dropAccessUnit = false;
+        if (!mIsAudio && !mIsEncrypted) {
+            // Extra safeguard if higher-level behavior changes. Otherwise, not required now.
+            // Preventing the buffer from being processed (and sent to codec) if this is a later
+            // round of playback but this time without prepareDrm. Or if there is a race between
+            // stop (which is not blocking) and releaseDrm allowing buffers being processed after
+            // Crypto has been released (GenericSource currently prevents this race though).
+            // Particularly doing this check before IsAVCReferenceFrame call to prevent parsing
+            // of encrypted data.
+            if (mIsEncryptedObservedEarlier) {
+                ALOGE("fetchInputData: mismatched mIsEncrypted/mIsEncryptedObservedEarlier (0/1)");
+
+                return INVALID_OPERATION;
+            }
+
+            int32_t layerId = 0;
+            bool haveLayerId = accessUnit->meta()->findInt32("temporal-layer-id", &layerId);
+            if (mRenderer->getVideoLateByUs() > 100000ll
+                    && mIsVideoAVC
+                    && !IsAVCReferenceFrame(accessUnit)) {
+                dropAccessUnit = true;
+            } else if (haveLayerId && mNumVideoTemporalLayerTotal > 1) {
+                // Add only one layer each time.
+                if (layerId > mCurrentMaxVideoTemporalLayerId + 1
+                        || layerId >= mNumVideoTemporalLayerAllowed) {
+                    dropAccessUnit = true;
+                    ALOGV("dropping layer(%d), speed=%g, allowed layer count=%d, max layerId=%d",
+                            layerId, mPlaybackSpeed, mNumVideoTemporalLayerAllowed,
+                            mCurrentMaxVideoTemporalLayerId);
+                } else if (layerId > mCurrentMaxVideoTemporalLayerId) {
+                    mCurrentMaxVideoTemporalLayerId = layerId;
+                } else if (layerId == 0 && mNumVideoTemporalLayerTotal > 1
+                        && IsIDR(accessUnit->data(), accessUnit->size())) {
+                    mCurrentMaxVideoTemporalLayerId = mNumVideoTemporalLayerTotal - 1;
+                }
+            }
+            if (dropAccessUnit) {
+                if (layerId <= mCurrentMaxVideoTemporalLayerId && layerId > 0) {
+                    mCurrentMaxVideoTemporalLayerId = layerId - 1;
+                }
+                ++mNumInputFramesDropped;
+            }
+        }
+    } while (dropAccessUnit);
+
+    // ALOGV("returned a valid buffer of %s data", mIsAudio ? "mIsAudio" : "video");
+#if 0
+    int64_t mediaTimeUs;
+    CHECK(accessUnit->meta()->findInt64("timeUs", &mediaTimeUs));
+    ALOGV("[%s] feeding input buffer at media time %.3f",
+         mIsAudio ? "audio" : "video",
+         mediaTimeUs / 1E6);
+#endif
+
+    if (mCCDecoder != NULL) {
+        mCCDecoder->decode(accessUnit);
+    }
+
+    reply->setBuffer("buffer", accessUnit);
+
+    return OK;
+}
+
+bool NuPlayer2::Decoder::onInputBufferFetched(const sp<AMessage> &msg) {
+    if (mCodec == NULL) {
+        ALOGE("[%s] onInputBufferFetched without a valid codec", mComponentName.c_str());
+        handleError(NO_INIT);
+        return false;
+    }
+
+    size_t bufferIx;
+    CHECK(msg->findSize("buffer-ix", &bufferIx));
+    CHECK_LT(bufferIx, mInputBuffers.size());
+    sp<MediaCodecBuffer> codecBuffer = mInputBuffers[bufferIx];
+
+    sp<ABuffer> buffer;
+    bool hasBuffer = msg->findBuffer("buffer", &buffer);
+    bool needsCopy = true;
+
+    if (buffer == NULL /* includes !hasBuffer */) {
+        int32_t streamErr = ERROR_END_OF_STREAM;
+        CHECK(msg->findInt32("err", &streamErr) || !hasBuffer);
+
+        CHECK(streamErr != OK);
+
+        // attempt to queue EOS
+        status_t err = mCodec->queueInputBuffer(
+                bufferIx,
+                0,
+                0,
+                0,
+                AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM);
+        if (err == OK) {
+            mInputBufferIsDequeued.editItemAt(bufferIx) = false;
+        } else if (streamErr == ERROR_END_OF_STREAM) {
+            streamErr = err;
+            // err will not be ERROR_END_OF_STREAM
+        }
+
+        if (streamErr != ERROR_END_OF_STREAM) {
+            ALOGE("Stream error for [%s] (err=%d), EOS %s queued",
+                    mComponentName.c_str(),
+                    streamErr,
+                    err == OK ? "successfully" : "unsuccessfully");
+            handleError(streamErr);
+        }
+    } else {
+        sp<AMessage> extra;
+        if (buffer->meta()->findMessage("extra", &extra) && extra != NULL) {
+            int64_t resumeAtMediaTimeUs;
+            if (extra->findInt64(
+                        "resume-at-mediaTimeUs", &resumeAtMediaTimeUs)) {
+                ALOGI("[%s] suppressing rendering until %lld us",
+                        mComponentName.c_str(), (long long)resumeAtMediaTimeUs);
+                mSkipRenderingUntilMediaTimeUs = resumeAtMediaTimeUs;
+            }
+        }
+
+        int64_t timeUs = 0;
+        uint32_t flags = 0;
+        CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+
+        int32_t eos, csd;
+        // we do not expect SYNCFRAME for decoder
+        if (buffer->meta()->findInt32("eos", &eos) && eos) {
+            flags |= AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM;
+        } else if (buffer->meta()->findInt32("csd", &csd) && csd) {
+            flags |= AMEDIACODEC_BUFFER_FLAG_CODEC_CONFIG;
+        }
+
+        // Modular DRM
+        MediaBuffer *mediaBuf = NULL;
+        sp<AMediaCodecCryptoInfoWrapper> cryptInfo;
+
+        // copy into codec buffer
+        if (needsCopy) {
+            if (buffer->size() > codecBuffer->capacity()) {
+                handleError(ERROR_BUFFER_TOO_SMALL);
+                mDequeuedInputBuffers.push_back(bufferIx);
+                return false;
+            }
+
+            if (buffer->data() != NULL) {
+                codecBuffer->setRange(0, buffer->size());
+                memcpy(codecBuffer->data(), buffer->data(), buffer->size());
+            } else { // No buffer->data()
+                //Modular DRM
+                mediaBuf = (MediaBuffer*)buffer->getMediaBufferBase();
+                if (mediaBuf != NULL) {
+                    codecBuffer->setRange(0, mediaBuf->size());
+                    memcpy(codecBuffer->data(), mediaBuf->data(), mediaBuf->size());
+
+                    sp<MetaData> meta_data = mediaBuf->meta_data();
+                    cryptInfo = AMediaCodecCryptoInfoWrapper::Create(meta_data);
+
+                    // since getMediaBuffer() has incremented the refCount
+                    mediaBuf->release();
+                } else { // No mediaBuf
+                    ALOGE("onInputBufferFetched: buffer->data()/mediaBuf are NULL for %p",
+                            buffer.get());
+                    handleError(UNKNOWN_ERROR);
+                    return false;
+                }
+            } // buffer->data()
+        } // needsCopy
+
+        status_t err;
+        if (cryptInfo != NULL) {
+            err = mCodec->queueSecureInputBuffer(
+                    bufferIx,
+                    codecBuffer->offset(),
+                    cryptInfo,
+                    timeUs,
+                    flags);
+            // synchronous call so done with cryptInfo here
+        } else {
+            err = mCodec->queueInputBuffer(
+                    bufferIx,
+                    codecBuffer->offset(),
+                    codecBuffer->size(),
+                    timeUs,
+                    flags);
+        } // no cryptInfo
+
+        if (err != OK) {
+            ALOGE("onInputBufferFetched: queue%sInputBuffer failed for [%s] (err=%d)",
+                    (cryptInfo != NULL ? "Secure" : ""),
+                    mComponentName.c_str(), err);
+            handleError(err);
+        } else {
+            mInputBufferIsDequeued.editItemAt(bufferIx) = false;
+        }
+
+    }   // buffer != NULL
+    return true;
+}
+
+void NuPlayer2::Decoder::onRenderBuffer(const sp<AMessage> &msg) {
+    status_t err;
+    int32_t render;
+    size_t bufferIx;
+    int32_t eos;
+    CHECK(msg->findSize("buffer-ix", &bufferIx));
+
+    if (!mIsAudio) {
+        int64_t timeUs;
+        sp<MediaCodecBuffer> buffer = mOutputBuffers[bufferIx];
+        buffer->meta()->findInt64("timeUs", &timeUs);
+
+        if (mCCDecoder != NULL && mCCDecoder->isSelected()) {
+            mCCDecoder->display(timeUs);
+        }
+    }
+
+    if (mCodec == NULL) {
+        err = NO_INIT;
+    } else if (msg->findInt32("render", &render) && render) {
+        int64_t timestampNs;
+        CHECK(msg->findInt64("timestampNs", &timestampNs));
+        err = mCodec->releaseOutputBufferAtTime(bufferIx, timestampNs);
+    } else {
+        mNumOutputFramesDropped += !mIsAudio;
+        err = mCodec->releaseOutputBuffer(bufferIx, false /* render */);
+    }
+    if (err != OK) {
+        ALOGE("failed to release output buffer for [%s] (err=%d)",
+                mComponentName.c_str(), err);
+        handleError(err);
+    }
+    if (msg->findInt32("eos", &eos) && eos
+            && isDiscontinuityPending()) {
+        finishHandleDiscontinuity(true /* flushOnTimeChange */);
+    }
+}
+
+bool NuPlayer2::Decoder::isDiscontinuityPending() const {
+    return mFormatChangePending || mTimeChangePending;
+}
+
+void NuPlayer2::Decoder::finishHandleDiscontinuity(bool flushOnTimeChange) {
+    ALOGV("finishHandleDiscontinuity: format %d, time %d, flush %d",
+            mFormatChangePending, mTimeChangePending, flushOnTimeChange);
+
+    // If we have format change, pause and wait to be killed;
+    // If we have time change only, flush and restart fetching.
+
+    if (mFormatChangePending) {
+        mPaused = true;
+    } else if (mTimeChangePending) {
+        if (flushOnTimeChange) {
+            doFlush(false /* notifyComplete */);
+            signalResume(false /* notifyComplete */);
+        }
+    }
+
+    // Notify NuPlayer2 to either shutdown decoder, or rescan sources
+    sp<AMessage> msg = mNotify->dup();
+    msg->setInt32("what", kWhatInputDiscontinuity);
+    msg->setInt32("formatChange", mFormatChangePending);
+    msg->post();
+
+    mFormatChangePending = false;
+    mTimeChangePending = false;
+}
+
+bool NuPlayer2::Decoder::supportsSeamlessAudioFormatChange(
+        const sp<AMessage> &targetFormat) const {
+    if (targetFormat == NULL) {
+        return true;
+    }
+
+    AString mime;
+    if (!targetFormat->findString("mime", &mime)) {
+        return false;
+    }
+
+    if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_AUDIO_AAC)) {
+        // field-by-field comparison
+        const char * keys[] = { "channel-count", "sample-rate", "is-adts" };
+        for (unsigned int i = 0; i < sizeof(keys) / sizeof(keys[0]); i++) {
+            int32_t oldVal, newVal;
+            if (!mInputFormat->getInt32(keys[i], &oldVal) ||
+                    !targetFormat->findInt32(keys[i], &newVal) ||
+                    oldVal != newVal) {
+                return false;
+            }
+        }
+
+        sp<ABuffer> newBuf;
+        uint8_t *oldBufData = NULL;
+        size_t oldBufSize = 0;
+        if (mInputFormat->getBuffer("csd-0", (void**)&oldBufData, &oldBufSize) &&
+                targetFormat->findBuffer("csd-0", &newBuf)) {
+            if (oldBufSize != newBuf->size()) {
+                return false;
+            }
+            return !memcmp(oldBufData, newBuf->data(), oldBufSize);
+        }
+    }
+    return false;
+}
+
+bool NuPlayer2::Decoder::supportsSeamlessFormatChange(const sp<AMessage> &targetFormat) const {
+    if (mInputFormat == NULL) {
+        return false;
+    }
+
+    if (targetFormat == NULL) {
+        return true;
+    }
+
+    AString oldMime, newMime;
+    if (!mInputFormat->getString("mime", &oldMime)
+            || !targetFormat->findString("mime", &newMime)
+            || !(oldMime == newMime)) {
+        return false;
+    }
+
+    bool audio = !strncasecmp(oldMime.c_str(), "audio/", strlen("audio/"));
+    bool seamless;
+    if (audio) {
+        seamless = supportsSeamlessAudioFormatChange(targetFormat);
+    } else {
+        int32_t isAdaptive;
+        seamless = (mCodec != NULL &&
+                mInputFormat->getInt32("adaptive-playback", &isAdaptive) &&
+                isAdaptive);
+    }
+
+    ALOGV("%s seamless support for %s", seamless ? "yes" : "no", oldMime.c_str());
+    return seamless;
+}
+
+void NuPlayer2::Decoder::rememberCodecSpecificData(const sp<AMessage> &format) {
+    if (format == NULL) {
+        return;
+    }
+    mCSDsForCurrentFormat.clear();
+    for (int32_t i = 0; ; ++i) {
+        AString tag = "csd-";
+        tag.append(i);
+        sp<ABuffer> buffer;
+        if (!format->findBuffer(tag.c_str(), &buffer)) {
+            break;
+        }
+        mCSDsForCurrentFormat.push(buffer);
+    }
+}
+
+void NuPlayer2::Decoder::notifyResumeCompleteIfNecessary() {
+    if (mResumePending) {
+        mResumePending = false;
+
+        sp<AMessage> notify = mNotify->dup();
+        notify->setInt32("what", kWhatResumeCompleted);
+        notify->post();
+    }
+}
+
+}  // namespace android
+
diff --git a/media/libmedia/nuplayer2/NuPlayer2Decoder.h b/media/libmedia/nuplayer2/NuPlayer2Decoder.h
new file mode 100644
index 0000000..58f70e7
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2Decoder.h
@@ -0,0 +1,150 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NUPLAYER2_DECODER_H_
+#define NUPLAYER2_DECODER_H_
+
+#include "NuPlayer2.h"
+
+#include "NuPlayer2DecoderBase.h"
+
+namespace android {
+
+class MediaCodecBuffer;
+
+struct AMediaCodecWrapper;
+struct AMediaFormatWrapper;
+
+struct NuPlayer2::Decoder : public DecoderBase {
+    Decoder(const sp<AMessage> &notify,
+            const sp<Source> &source,
+            pid_t pid,
+            uid_t uid,
+            const sp<Renderer> &renderer = NULL,
+            const sp<Surface> &surface = NULL,
+            const sp<CCDecoder> &ccDecoder = NULL);
+
+    virtual sp<AMessage> getStats() const;
+
+    // sets the output surface of video decoders.
+    virtual status_t setVideoSurface(const sp<Surface> &surface);
+
+    virtual status_t releaseCrypto();
+
+protected:
+    virtual ~Decoder();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+    virtual void onConfigure(const sp<AMessage> &format);
+    virtual void onSetParameters(const sp<AMessage> &params);
+    virtual void onSetRenderer(const sp<Renderer> &renderer);
+    virtual void onResume(bool notifyComplete);
+    virtual void onFlush();
+    virtual void onShutdown(bool notifyComplete);
+    virtual bool doRequestBuffers();
+
+private:
+    enum {
+        kWhatCodecNotify         = 'cdcN',
+        kWhatRenderBuffer        = 'rndr',
+        kWhatSetVideoSurface     = 'sSur',
+        kWhatAudioOutputFormatChanged = 'aofc',
+        kWhatDrmReleaseCrypto    = 'rDrm',
+    };
+
+    enum {
+        kMaxNumVideoTemporalLayers = 32,
+    };
+
+    sp<Surface> mSurface;
+
+    sp<Source> mSource;
+    sp<Renderer> mRenderer;
+    sp<CCDecoder> mCCDecoder;
+
+    sp<AMediaFormatWrapper> mInputFormat;
+    sp<AMediaCodecWrapper> mCodec;
+
+    List<sp<AMessage> > mPendingInputMessages;
+
+    Vector<sp<MediaCodecBuffer> > mInputBuffers;
+    Vector<sp<MediaCodecBuffer> > mOutputBuffers;
+    Vector<sp<ABuffer> > mCSDsForCurrentFormat;
+    Vector<sp<ABuffer> > mCSDsToSubmit;
+    Vector<bool> mInputBufferIsDequeued;
+    Vector<MediaBuffer *> mMediaBuffers;
+    Vector<size_t> mDequeuedInputBuffers;
+
+    const pid_t mPid;
+    const uid_t mUid;
+    int64_t mSkipRenderingUntilMediaTimeUs;
+    int64_t mNumFramesTotal;
+    int64_t mNumInputFramesDropped;
+    int64_t mNumOutputFramesDropped;
+    int32_t mVideoWidth;
+    int32_t mVideoHeight;
+    bool mIsAudio;
+    bool mIsVideoAVC;
+    bool mIsSecure;
+    bool mIsEncrypted;
+    bool mIsEncryptedObservedEarlier;
+    bool mFormatChangePending;
+    bool mTimeChangePending;
+    float mFrameRateTotal;
+    float mPlaybackSpeed;
+    int32_t mNumVideoTemporalLayerTotal;
+    int32_t mNumVideoTemporalLayerAllowed;
+    int32_t mCurrentMaxVideoTemporalLayerId;
+    float mVideoTemporalLayerAggregateFps[kMaxNumVideoTemporalLayers];
+
+    bool mResumePending;
+    AString mComponentName;
+
+    void handleError(int32_t err);
+    bool handleAnInputBuffer(size_t index);
+    bool handleAnOutputBuffer(
+            size_t index,
+            size_t offset,
+            size_t size,
+            int64_t timeUs,
+            int32_t flags);
+    void handleOutputFormatChange(const sp<AMessage> &format);
+
+    void releaseAndResetMediaBuffers();
+    bool isStaleReply(const sp<AMessage> &msg);
+
+    void doFlush(bool notifyComplete);
+    status_t fetchInputData(sp<AMessage> &reply);
+    bool onInputBufferFetched(const sp<AMessage> &msg);
+    void onRenderBuffer(const sp<AMessage> &msg);
+
+    bool supportsSeamlessFormatChange(const sp<AMessage> &to) const;
+    bool supportsSeamlessAudioFormatChange(const sp<AMessage> &targetFormat) const;
+    void rememberCodecSpecificData(const sp<AMessage> &format);
+    bool isDiscontinuityPending() const;
+    void finishHandleDiscontinuity(bool flushOnTimeChange);
+
+    void notifyResumeCompleteIfNecessary();
+
+    void onReleaseCrypto(const sp<AMessage>& msg);
+
+    DISALLOW_EVIL_CONSTRUCTORS(Decoder);
+};
+
+}  // namespace android
+
+#endif  // NUPLAYER2_DECODER_H_
diff --git a/media/libmedia/nuplayer2/NuPlayer2DecoderBase.cpp b/media/libmedia/nuplayer2/NuPlayer2DecoderBase.cpp
new file mode 100644
index 0000000..4d80912
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2DecoderBase.cpp
@@ -0,0 +1,214 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayer2DecoderBase"
+#include <utils/Log.h>
+#include <inttypes.h>
+
+#include "NuPlayer2DecoderBase.h"
+
+#include "NuPlayer2Renderer.h"
+
+#include <media/MediaCodecBuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+namespace android {
+
+NuPlayer2::DecoderBase::DecoderBase(const sp<AMessage> &notify)
+    :  mNotify(notify),
+       mBufferGeneration(0),
+       mPaused(false),
+       mStats(new AMessage),
+       mRequestInputBuffersPending(false) {
+    // Every decoder has its own looper because MediaCodec operations
+    // are blocking, but NuPlayer2 needs asynchronous operations.
+    mDecoderLooper = new ALooper;
+    mDecoderLooper->setName("NPDecoder");
+    mDecoderLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+}
+
+NuPlayer2::DecoderBase::~DecoderBase() {
+    stopLooper();
+}
+
+static
+status_t PostAndAwaitResponse(
+        const sp<AMessage> &msg, sp<AMessage> *response) {
+    status_t err = msg->postAndAwaitResponse(response);
+
+    if (err != OK) {
+        return err;
+    }
+
+    if (!(*response)->findInt32("err", &err)) {
+        err = OK;
+    }
+
+    return err;
+}
+
+void NuPlayer2::DecoderBase::configure(const sp<AMessage> &format) {
+    sp<AMessage> msg = new AMessage(kWhatConfigure, this);
+    msg->setMessage("format", format);
+    msg->post();
+}
+
+void NuPlayer2::DecoderBase::init() {
+    mDecoderLooper->registerHandler(this);
+}
+
+void NuPlayer2::DecoderBase::stopLooper() {
+    mDecoderLooper->unregisterHandler(id());
+    mDecoderLooper->stop();
+}
+
+void NuPlayer2::DecoderBase::setParameters(const sp<AMessage> &params) {
+    sp<AMessage> msg = new AMessage(kWhatSetParameters, this);
+    msg->setMessage("params", params);
+    msg->post();
+}
+
+void NuPlayer2::DecoderBase::setRenderer(const sp<Renderer> &renderer) {
+    sp<AMessage> msg = new AMessage(kWhatSetRenderer, this);
+    msg->setObject("renderer", renderer);
+    msg->post();
+}
+
+void NuPlayer2::DecoderBase::pause() {
+    sp<AMessage> msg = new AMessage(kWhatPause, this);
+
+    sp<AMessage> response;
+    PostAndAwaitResponse(msg, &response);
+}
+
+void NuPlayer2::DecoderBase::signalFlush() {
+    (new AMessage(kWhatFlush, this))->post();
+}
+
+void NuPlayer2::DecoderBase::signalResume(bool notifyComplete) {
+    sp<AMessage> msg = new AMessage(kWhatResume, this);
+    msg->setInt32("notifyComplete", notifyComplete);
+    msg->post();
+}
+
+void NuPlayer2::DecoderBase::initiateShutdown() {
+    (new AMessage(kWhatShutdown, this))->post();
+}
+
+void NuPlayer2::DecoderBase::onRequestInputBuffers() {
+    if (mRequestInputBuffersPending) {
+        return;
+    }
+
+    // doRequestBuffers() return true if we should request more data
+    if (doRequestBuffers()) {
+        mRequestInputBuffersPending = true;
+
+        sp<AMessage> msg = new AMessage(kWhatRequestInputBuffers, this);
+        msg->post(10 * 1000ll);
+    }
+}
+
+void NuPlayer2::DecoderBase::onMessageReceived(const sp<AMessage> &msg) {
+
+    switch (msg->what()) {
+        case kWhatConfigure:
+        {
+            sp<AMessage> format;
+            CHECK(msg->findMessage("format", &format));
+            onConfigure(format);
+            break;
+        }
+
+        case kWhatSetParameters:
+        {
+            sp<AMessage> params;
+            CHECK(msg->findMessage("params", &params));
+            onSetParameters(params);
+            break;
+        }
+
+        case kWhatSetRenderer:
+        {
+            sp<RefBase> obj;
+            CHECK(msg->findObject("renderer", &obj));
+            onSetRenderer(static_cast<Renderer *>(obj.get()));
+            break;
+        }
+
+        case kWhatPause:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            mPaused = true;
+
+            (new AMessage)->postReply(replyID);
+            break;
+        }
+
+        case kWhatRequestInputBuffers:
+        {
+            mRequestInputBuffersPending = false;
+            onRequestInputBuffers();
+            break;
+        }
+
+        case kWhatFlush:
+        {
+            onFlush();
+            break;
+        }
+
+        case kWhatResume:
+        {
+            int32_t notifyComplete;
+            CHECK(msg->findInt32("notifyComplete", &notifyComplete));
+
+            onResume(notifyComplete);
+            break;
+        }
+
+        case kWhatShutdown:
+        {
+            onShutdown(true);
+            break;
+        }
+
+        default:
+            TRESPASS();
+            break;
+    }
+}
+
+void NuPlayer2::DecoderBase::handleError(int32_t err)
+{
+    // We cannot immediately release the codec due to buffers still outstanding
+    // in the renderer.  We signal to the player the error so it can shutdown/release the
+    // decoder after flushing and increment the generation to discard unnecessary messages.
+
+    ++mBufferGeneration;
+
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatError);
+    notify->setInt32("err", err);
+    notify->post();
+}
+
+}  // namespace android
+
diff --git a/media/libmedia/nuplayer2/NuPlayer2DecoderBase.h b/media/libmedia/nuplayer2/NuPlayer2DecoderBase.h
new file mode 100644
index 0000000..2819dae
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2DecoderBase.h
@@ -0,0 +1,111 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NUPLAYER2_DECODER_BASE_H_
+
+#define NUPLAYER2_DECODER_BASE_H_
+
+#include "NuPlayer2.h"
+
+#include <media/stagefright/foundation/AHandler.h>
+
+namespace android {
+
+struct ABuffer;
+struct MediaCodec;
+class MediaBuffer;
+class MediaCodecBuffer;
+class Surface;
+
+struct NuPlayer2::DecoderBase : public AHandler {
+    explicit DecoderBase(const sp<AMessage> &notify);
+
+    void configure(const sp<AMessage> &format);
+    void init();
+    void setParameters(const sp<AMessage> &params);
+
+    // Synchronous call to ensure decoder will not request or send out data.
+    void pause();
+
+    void setRenderer(const sp<Renderer> &renderer);
+    virtual status_t setVideoSurface(const sp<Surface> &) { return INVALID_OPERATION; }
+
+    void signalFlush();
+    void signalResume(bool notifyComplete);
+    void initiateShutdown();
+
+    virtual sp<AMessage> getStats() const {
+        return mStats;
+    }
+
+    virtual status_t releaseCrypto() {
+        return INVALID_OPERATION;
+    }
+
+    enum {
+        kWhatInputDiscontinuity  = 'inDi',
+        kWhatVideoSizeChanged    = 'viSC',
+        kWhatFlushCompleted      = 'flsC',
+        kWhatShutdownCompleted   = 'shDC',
+        kWhatResumeCompleted     = 'resC',
+        kWhatEOS                 = 'eos ',
+        kWhatError               = 'err ',
+    };
+
+protected:
+
+    virtual ~DecoderBase();
+
+    void stopLooper();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+    virtual void onConfigure(const sp<AMessage> &format) = 0;
+    virtual void onSetParameters(const sp<AMessage> &params) = 0;
+    virtual void onSetRenderer(const sp<Renderer> &renderer) = 0;
+    virtual void onResume(bool notifyComplete) = 0;
+    virtual void onFlush() = 0;
+    virtual void onShutdown(bool notifyComplete) = 0;
+
+    void onRequestInputBuffers();
+    virtual bool doRequestBuffers() = 0;
+    virtual void handleError(int32_t err);
+
+    sp<AMessage> mNotify;
+    int32_t mBufferGeneration;
+    bool mPaused;
+    sp<AMessage> mStats;
+
+private:
+    enum {
+        kWhatConfigure           = 'conf',
+        kWhatSetParameters       = 'setP',
+        kWhatSetRenderer         = 'setR',
+        kWhatPause               = 'paus',
+        kWhatRequestInputBuffers = 'reqB',
+        kWhatFlush               = 'flus',
+        kWhatShutdown            = 'shuD',
+    };
+
+    sp<ALooper> mDecoderLooper;
+    bool mRequestInputBuffersPending;
+
+    DISALLOW_EVIL_CONSTRUCTORS(DecoderBase);
+};
+
+}  // namespace android
+
+#endif  // NUPLAYER2_DECODER_BASE_H_
diff --git a/media/libmedia/nuplayer2/NuPlayer2DecoderPassThrough.cpp b/media/libmedia/nuplayer2/NuPlayer2DecoderPassThrough.cpp
new file mode 100644
index 0000000..0e0c1d8
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2DecoderPassThrough.cpp
@@ -0,0 +1,434 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayer2DecoderPassThrough"
+#include <utils/Log.h>
+#include <inttypes.h>
+
+#include "NuPlayer2DecoderPassThrough.h"
+
+#include "NuPlayer2Renderer.h"
+#include "NuPlayer2Source.h"
+
+#include <media/MediaCodecBuffer.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaErrors.h>
+
+#include "ATSParser.h"
+
+namespace android {
+
+// TODO optimize buffer size for power consumption
+// The offload read buffer size is 32 KB but 24 KB uses less power.
+static const size_t kAggregateBufferSizeBytes = 24 * 1024;
+static const size_t kMaxCachedBytes = 200000;
+
+NuPlayer2::DecoderPassThrough::DecoderPassThrough(
+        const sp<AMessage> &notify,
+        const sp<Source> &source,
+        const sp<Renderer> &renderer)
+    : DecoderBase(notify),
+      mSource(source),
+      mRenderer(renderer),
+      mSkipRenderingUntilMediaTimeUs(-1ll),
+      mReachedEOS(true),
+      mPendingAudioErr(OK),
+      mPendingBuffersToDrain(0),
+      mCachedBytes(0),
+      mComponentName("pass through decoder") {
+    ALOGW_IF(renderer == NULL, "expect a non-NULL renderer");
+}
+
+NuPlayer2::DecoderPassThrough::~DecoderPassThrough() {
+}
+
+void NuPlayer2::DecoderPassThrough::onConfigure(const sp<AMessage> &format) {
+    ALOGV("[%s] onConfigure", mComponentName.c_str());
+    mCachedBytes = 0;
+    mPendingBuffersToDrain = 0;
+    mReachedEOS = false;
+    ++mBufferGeneration;
+
+    onRequestInputBuffers();
+
+    int32_t hasVideo = 0;
+    format->findInt32("has-video", &hasVideo);
+
+    // The audio sink is already opened before the PassThrough decoder is created.
+    // Opening again might be relevant if decoder is instantiated after shutdown and
+    // format is different.
+    status_t err = mRenderer->openAudioSink(
+            format, true /* offloadOnly */, hasVideo,
+            AUDIO_OUTPUT_FLAG_NONE /* flags */, NULL /* isOffloaded */, mSource->isStreaming());
+    if (err != OK) {
+        handleError(err);
+    }
+}
+
+void NuPlayer2::DecoderPassThrough::onSetParameters(const sp<AMessage> &/*params*/) {
+    ALOGW("onSetParameters() called unexpectedly");
+}
+
+void NuPlayer2::DecoderPassThrough::onSetRenderer(
+        const sp<Renderer> &renderer) {
+    // renderer can't be changed during offloading
+    ALOGW_IF(renderer != mRenderer,
+            "ignoring request to change renderer");
+}
+
+bool NuPlayer2::DecoderPassThrough::isStaleReply(const sp<AMessage> &msg) {
+    int32_t generation;
+    CHECK(msg->findInt32("generation", &generation));
+    return generation != mBufferGeneration;
+}
+
+bool NuPlayer2::DecoderPassThrough::isDoneFetching() const {
+    ALOGV("[%s] mCachedBytes = %zu, mReachedEOS = %d mPaused = %d",
+            mComponentName.c_str(), mCachedBytes, mReachedEOS, mPaused);
+
+    return mCachedBytes >= kMaxCachedBytes || mReachedEOS || mPaused;
+}
+
+/*
+ * returns true if we should request more data
+ */
+bool NuPlayer2::DecoderPassThrough::doRequestBuffers() {
+    status_t err = OK;
+    while (!isDoneFetching()) {
+        sp<AMessage> msg = new AMessage();
+
+        err = fetchInputData(msg);
+        if (err != OK) {
+            break;
+        }
+
+        onInputBufferFetched(msg);
+    }
+
+    return err == -EWOULDBLOCK
+            && mSource->feedMoreTSData() == OK;
+}
+
+status_t NuPlayer2::DecoderPassThrough::dequeueAccessUnit(sp<ABuffer> *accessUnit) {
+    status_t err;
+
+    // Did we save an accessUnit earlier because of a discontinuity?
+    if (mPendingAudioAccessUnit != NULL) {
+        *accessUnit = mPendingAudioAccessUnit;
+        mPendingAudioAccessUnit.clear();
+        err = mPendingAudioErr;
+        ALOGV("feedDecoderInputData() use mPendingAudioAccessUnit");
+    } else {
+        err = mSource->dequeueAccessUnit(true /* audio */, accessUnit);
+    }
+
+    if (err == INFO_DISCONTINUITY || err == ERROR_END_OF_STREAM) {
+        if (mAggregateBuffer != NULL) {
+            // We already have some data so save this for later.
+            mPendingAudioErr = err;
+            mPendingAudioAccessUnit = *accessUnit;
+            (*accessUnit).clear();
+            ALOGD("return aggregated buffer and save err(=%d) for later", err);
+            err = OK;
+        }
+    }
+
+    return err;
+}
+
+sp<ABuffer> NuPlayer2::DecoderPassThrough::aggregateBuffer(
+        const sp<ABuffer> &accessUnit) {
+    sp<ABuffer> aggregate;
+
+    if (accessUnit == NULL) {
+        // accessUnit is saved to mPendingAudioAccessUnit
+        // return current mAggregateBuffer
+        aggregate = mAggregateBuffer;
+        mAggregateBuffer.clear();
+        return aggregate;
+    }
+
+    size_t smallSize = accessUnit->size();
+    if ((mAggregateBuffer == NULL)
+            // Don't bother if only room for a few small buffers.
+            && (smallSize < (kAggregateBufferSizeBytes / 3))) {
+        // Create a larger buffer for combining smaller buffers from the extractor.
+        mAggregateBuffer = new ABuffer(kAggregateBufferSizeBytes);
+        mAggregateBuffer->setRange(0, 0); // start empty
+    }
+
+    if (mAggregateBuffer != NULL) {
+        int64_t timeUs;
+        int64_t dummy;
+        bool smallTimestampValid = accessUnit->meta()->findInt64("timeUs", &timeUs);
+        bool bigTimestampValid = mAggregateBuffer->meta()->findInt64("timeUs", &dummy);
+        // Will the smaller buffer fit?
+        size_t bigSize = mAggregateBuffer->size();
+        size_t roomLeft = mAggregateBuffer->capacity() - bigSize;
+        // Should we save this small buffer for the next big buffer?
+        // If the first small buffer did not have a timestamp then save
+        // any buffer that does have a timestamp until the next big buffer.
+        if ((smallSize > roomLeft)
+            || (!bigTimestampValid && (bigSize > 0) && smallTimestampValid)) {
+            mPendingAudioErr = OK;
+            mPendingAudioAccessUnit = accessUnit;
+            aggregate = mAggregateBuffer;
+            mAggregateBuffer.clear();
+        } else {
+            // Grab time from first small buffer if available.
+            if ((bigSize == 0) && smallTimestampValid) {
+                mAggregateBuffer->meta()->setInt64("timeUs", timeUs);
+            }
+            // Append small buffer to the bigger buffer.
+            memcpy(mAggregateBuffer->base() + bigSize, accessUnit->data(), smallSize);
+            bigSize += smallSize;
+            mAggregateBuffer->setRange(0, bigSize);
+
+            ALOGV("feedDecoderInputData() smallSize = %zu, bigSize = %zu, capacity = %zu",
+                    smallSize, bigSize, mAggregateBuffer->capacity());
+        }
+    } else {
+        // decided not to aggregate
+        aggregate = accessUnit;
+    }
+
+    return aggregate;
+}
+
+status_t NuPlayer2::DecoderPassThrough::fetchInputData(sp<AMessage> &reply) {
+    sp<ABuffer> accessUnit;
+
+    do {
+        status_t err = dequeueAccessUnit(&accessUnit);
+
+        if (err == -EWOULDBLOCK) {
+            // Flush out the aggregate buffer to try to avoid underrun.
+            accessUnit = aggregateBuffer(NULL /* accessUnit */);
+            if (accessUnit != NULL) {
+                break;
+            }
+            return err;
+        } else if (err != OK) {
+            if (err == INFO_DISCONTINUITY) {
+                int32_t type;
+                CHECK(accessUnit->meta()->findInt32("discontinuity", &type));
+
+                bool formatChange =
+                        (type & ATSParser::DISCONTINUITY_AUDIO_FORMAT) != 0;
+
+                bool timeChange =
+                        (type & ATSParser::DISCONTINUITY_TIME) != 0;
+
+                ALOGI("audio discontinuity (formatChange=%d, time=%d)",
+                        formatChange, timeChange);
+
+                if (formatChange || timeChange) {
+                    sp<AMessage> msg = mNotify->dup();
+                    msg->setInt32("what", kWhatInputDiscontinuity);
+                    // will perform seamless format change,
+                    // only notify NuPlayer2 to scan sources
+                    msg->setInt32("formatChange", false);
+                    msg->post();
+                }
+
+                if (timeChange) {
+                    doFlush(false /* notifyComplete */);
+                    err = OK;
+                } else if (formatChange) {
+                    // do seamless format change
+                    err = OK;
+                } else {
+                    // This stream is unaffected by the discontinuity
+                    return -EWOULDBLOCK;
+                }
+            }
+
+            reply->setInt32("err", err);
+            return OK;
+        }
+
+        accessUnit = aggregateBuffer(accessUnit);
+    } while (accessUnit == NULL);
+
+#if 0
+    int64_t mediaTimeUs;
+    CHECK(accessUnit->meta()->findInt64("timeUs", &mediaTimeUs));
+    ALOGV("feeding audio input buffer at media time %.2f secs",
+         mediaTimeUs / 1E6);
+#endif
+
+    reply->setBuffer("buffer", accessUnit);
+
+    return OK;
+}
+
+void NuPlayer2::DecoderPassThrough::onInputBufferFetched(
+        const sp<AMessage> &msg) {
+    if (mReachedEOS) {
+        return;
+    }
+
+    sp<ABuffer> buffer;
+    bool hasBuffer = msg->findBuffer("buffer", &buffer);
+    if (buffer == NULL) {
+        int32_t streamErr = ERROR_END_OF_STREAM;
+        CHECK(msg->findInt32("err", &streamErr) || !hasBuffer);
+        if (streamErr == OK) {
+            return;
+        }
+
+        if (streamErr != ERROR_END_OF_STREAM) {
+            handleError(streamErr);
+        }
+        mReachedEOS = true;
+        if (mRenderer != NULL) {
+            mRenderer->queueEOS(true /* audio */, ERROR_END_OF_STREAM);
+        }
+        return;
+    }
+
+    sp<AMessage> extra;
+    if (buffer->meta()->findMessage("extra", &extra) && extra != NULL) {
+        int64_t resumeAtMediaTimeUs;
+        if (extra->findInt64(
+                    "resume-at-mediatimeUs", &resumeAtMediaTimeUs)) {
+            ALOGI("[%s] suppressing rendering until %lld us",
+                    mComponentName.c_str(), (long long)resumeAtMediaTimeUs);
+            mSkipRenderingUntilMediaTimeUs = resumeAtMediaTimeUs;
+        }
+    }
+
+    int32_t bufferSize = buffer->size();
+    mCachedBytes += bufferSize;
+
+    int64_t timeUs = 0;
+    CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+    if (mSkipRenderingUntilMediaTimeUs >= 0) {
+        if (timeUs < mSkipRenderingUntilMediaTimeUs) {
+            ALOGV("[%s] dropping buffer at time %lld as requested.",
+                     mComponentName.c_str(), (long long)timeUs);
+
+            onBufferConsumed(bufferSize);
+            return;
+        }
+
+        mSkipRenderingUntilMediaTimeUs = -1;
+    }
+
+    if (mRenderer == NULL) {
+        onBufferConsumed(bufferSize);
+        return;
+    }
+
+    sp<AMessage> reply = new AMessage(kWhatBufferConsumed, this);
+    reply->setInt32("generation", mBufferGeneration);
+    reply->setInt32("size", bufferSize);
+
+    sp<MediaCodecBuffer> mcBuffer = new MediaCodecBuffer(nullptr, buffer);
+    mcBuffer->meta()->setInt64("timeUs", timeUs);
+
+    mRenderer->queueBuffer(true /* audio */, mcBuffer, reply);
+
+    ++mPendingBuffersToDrain;
+    ALOGV("onInputBufferFilled: #ToDrain = %zu, cachedBytes = %zu",
+            mPendingBuffersToDrain, mCachedBytes);
+}
+
+void NuPlayer2::DecoderPassThrough::onBufferConsumed(int32_t size) {
+    --mPendingBuffersToDrain;
+    mCachedBytes -= size;
+    ALOGV("onBufferConsumed: #ToDrain = %zu, cachedBytes = %zu",
+            mPendingBuffersToDrain, mCachedBytes);
+    onRequestInputBuffers();
+}
+
+void NuPlayer2::DecoderPassThrough::onResume(bool notifyComplete) {
+    mPaused = false;
+
+    onRequestInputBuffers();
+
+    if (notifyComplete) {
+        sp<AMessage> notify = mNotify->dup();
+        notify->setInt32("what", kWhatResumeCompleted);
+        notify->post();
+    }
+}
+
+void NuPlayer2::DecoderPassThrough::doFlush(bool notifyComplete) {
+    ++mBufferGeneration;
+    mSkipRenderingUntilMediaTimeUs = -1;
+    mPendingAudioAccessUnit.clear();
+    mPendingAudioErr = OK;
+    mAggregateBuffer.clear();
+
+    if (mRenderer != NULL) {
+        mRenderer->flush(true /* audio */, notifyComplete);
+        mRenderer->signalTimeDiscontinuity();
+    }
+
+    mPendingBuffersToDrain = 0;
+    mCachedBytes = 0;
+    mReachedEOS = false;
+}
+
+void NuPlayer2::DecoderPassThrough::onFlush() {
+    doFlush(true /* notifyComplete */);
+
+    mPaused = true;
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatFlushCompleted);
+    notify->post();
+
+}
+
+void NuPlayer2::DecoderPassThrough::onShutdown(bool notifyComplete) {
+    ++mBufferGeneration;
+    mSkipRenderingUntilMediaTimeUs = -1;
+
+    if (notifyComplete) {
+        sp<AMessage> notify = mNotify->dup();
+        notify->setInt32("what", kWhatShutdownCompleted);
+        notify->post();
+    }
+
+    mReachedEOS = true;
+}
+
+void NuPlayer2::DecoderPassThrough::onMessageReceived(const sp<AMessage> &msg) {
+    ALOGV("[%s] onMessage: %s", mComponentName.c_str(),
+            msg->debugString().c_str());
+
+    switch (msg->what()) {
+        case kWhatBufferConsumed:
+        {
+            if (!isStaleReply(msg)) {
+                int32_t size;
+                CHECK(msg->findInt32("size", &size));
+                onBufferConsumed(size);
+            }
+            break;
+        }
+
+        default:
+            DecoderBase::onMessageReceived(msg);
+            break;
+    }
+}
+
+}  // namespace android
diff --git a/media/libmedia/nuplayer2/NuPlayer2DecoderPassThrough.h b/media/libmedia/nuplayer2/NuPlayer2DecoderPassThrough.h
new file mode 100644
index 0000000..838c60a
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2DecoderPassThrough.h
@@ -0,0 +1,85 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NUPLAYER2_DECODER_PASS_THROUGH_H_
+
+#define NUPLAYER2_DECODER_PASS_THROUGH_H_
+
+#include "NuPlayer2.h"
+
+#include "NuPlayer2DecoderBase.h"
+
+namespace android {
+
+struct NuPlayer2::DecoderPassThrough : public DecoderBase {
+    DecoderPassThrough(const sp<AMessage> &notify,
+                       const sp<Source> &source,
+                       const sp<Renderer> &renderer);
+
+protected:
+
+    virtual ~DecoderPassThrough();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+    virtual void onConfigure(const sp<AMessage> &format);
+    virtual void onSetParameters(const sp<AMessage> &params);
+    virtual void onSetRenderer(const sp<Renderer> &renderer);
+    virtual void onResume(bool notifyComplete);
+    virtual void onFlush();
+    virtual void onShutdown(bool notifyComplete);
+    virtual bool doRequestBuffers();
+
+private:
+    enum {
+        kWhatBufferConsumed     = 'bufC',
+    };
+
+    sp<Source> mSource;
+    sp<Renderer> mRenderer;
+    int64_t mSkipRenderingUntilMediaTimeUs;
+
+    bool    mReachedEOS;
+
+    // Used by feedDecoderInputData to aggregate small buffers into
+    // one large buffer.
+    sp<ABuffer> mPendingAudioAccessUnit;
+    status_t    mPendingAudioErr;
+    sp<ABuffer> mAggregateBuffer;
+
+    // mPendingBuffersToDrain are only for debugging. It can be removed
+    // when the power investigation is done.
+    size_t  mPendingBuffersToDrain;
+    size_t  mCachedBytes;
+    AString mComponentName;
+
+    bool isStaleReply(const sp<AMessage> &msg);
+    bool isDoneFetching() const;
+
+    status_t dequeueAccessUnit(sp<ABuffer> *accessUnit);
+    sp<ABuffer> aggregateBuffer(const sp<ABuffer> &accessUnit);
+    status_t fetchInputData(sp<AMessage> &reply);
+    void doFlush(bool notifyComplete);
+
+    void onInputBufferFetched(const sp<AMessage> &msg);
+    void onBufferConsumed(int32_t size);
+
+    DISALLOW_EVIL_CONSTRUCTORS(DecoderPassThrough);
+};
+
+}  // namespace android
+
+#endif  // NUPLAYER2_DECODER_PASS_THROUGH_H_
diff --git a/media/libmedia/nuplayer2/NuPlayer2Driver.cpp b/media/libmedia/nuplayer2/NuPlayer2Driver.cpp
new file mode 100644
index 0000000..c0f0bad
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2Driver.cpp
@@ -0,0 +1,1092 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayer2Driver"
+#include <inttypes.h>
+#include <utils/Log.h>
+#include <cutils/properties.h>
+
+#include "NuPlayer2Driver.h"
+
+#include "NuPlayer2.h"
+#include "NuPlayer2Source.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/foundation/ByteUtils.h>
+#include <media/stagefright/MediaClock.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
+
+#include <media/IMediaAnalyticsService.h>
+
+static const int kDumpLockRetries = 50;
+static const int kDumpLockSleepUs = 20000;
+
+namespace android {
+
+// key for media statistics
+static const char *kKeyPlayer = "nuplayer";
+// attrs for media statistics
+static const char *kPlayerVMime = "android.media.mediaplayer.video.mime";
+static const char *kPlayerVCodec = "android.media.mediaplayer.video.codec";
+static const char *kPlayerWidth = "android.media.mediaplayer.width";
+static const char *kPlayerHeight = "android.media.mediaplayer.height";
+static const char *kPlayerFrames = "android.media.mediaplayer.frames";
+static const char *kPlayerFramesDropped = "android.media.mediaplayer.dropped";
+static const char *kPlayerAMime = "android.media.mediaplayer.audio.mime";
+static const char *kPlayerACodec = "android.media.mediaplayer.audio.codec";
+static const char *kPlayerDuration = "android.media.mediaplayer.durationMs";
+static const char *kPlayerPlaying = "android.media.mediaplayer.playingMs";
+static const char *kPlayerError = "android.media.mediaplayer.err";
+static const char *kPlayerErrorCode = "android.media.mediaplayer.errcode";
+static const char *kPlayerDataSourceType = "android.media.mediaplayer.dataSource";
+//
+static const char *kPlayerRebuffering = "android.media.mediaplayer.rebufferingMs";
+static const char *kPlayerRebufferingCount = "android.media.mediaplayer.rebuffers";
+static const char *kPlayerRebufferingAtExit = "android.media.mediaplayer.rebufferExit";
+
+
+NuPlayer2Driver::NuPlayer2Driver(pid_t pid)
+    : mState(STATE_IDLE),
+      mIsAsyncPrepare(false),
+      mAsyncResult(UNKNOWN_ERROR),
+      mSetSurfaceInProgress(false),
+      mDurationUs(-1),
+      mPositionUs(-1),
+      mSeekInProgress(false),
+      mPlayingTimeUs(0),
+      mRebufferingTimeUs(0),
+      mRebufferingEvents(0),
+      mRebufferingAtExit(false),
+      mLooper(new ALooper),
+      mMediaClock(new MediaClock),
+      mPlayer(new NuPlayer2(pid, mMediaClock)),
+      mPlayerFlags(0),
+      mAnalyticsItem(NULL),
+      mClientUid(-1),
+      mAtEOS(false),
+      mLooping(false),
+      mAutoLoop(false) {
+    ALOGD("NuPlayer2Driver(%p) created, clientPid(%d)", this, pid);
+    mLooper->setName("NuPlayer2Driver Looper");
+
+    mMediaClock->init();
+
+    // set up an analytics record
+    mAnalyticsItem = new MediaAnalyticsItem(kKeyPlayer);
+    mAnalyticsItem->generateSessionID();
+
+    mLooper->start(
+            false, /* runOnCallingThread */
+            true,  /* canCallJava */
+            PRIORITY_AUDIO);
+
+    mLooper->registerHandler(mPlayer);
+
+    mPlayer->setDriver(this);
+}
+
+NuPlayer2Driver::~NuPlayer2Driver() {
+    ALOGV("~NuPlayer2Driver(%p)", this);
+    mLooper->stop();
+
+    // finalize any pending metrics, usually a no-op.
+    updateMetrics("destructor");
+    logMetrics("destructor");
+
+    if (mAnalyticsItem != NULL) {
+        delete mAnalyticsItem;
+        mAnalyticsItem = NULL;
+    }
+}
+
+status_t NuPlayer2Driver::initCheck() {
+    return OK;
+}
+
+status_t NuPlayer2Driver::setUID(uid_t uid) {
+    mPlayer->setUID(uid);
+    mClientUid = uid;
+    if (mAnalyticsItem) {
+        mAnalyticsItem->setUid(mClientUid);
+    }
+
+    return OK;
+}
+
+status_t NuPlayer2Driver::setDataSource(
+        const sp<MediaHTTPService> &httpService,
+        const char *url,
+        const KeyedVector<String8, String8> *headers) {
+    ALOGV("setDataSource(%p) url(%s)", this, uriDebugString(url, false).c_str());
+    Mutex::Autolock autoLock(mLock);
+
+    if (mState != STATE_IDLE) {
+        return INVALID_OPERATION;
+    }
+
+    mState = STATE_SET_DATASOURCE_PENDING;
+
+    mPlayer->setDataSourceAsync(httpService, url, headers);
+
+    while (mState == STATE_SET_DATASOURCE_PENDING) {
+        mCondition.wait(mLock);
+    }
+
+    return mAsyncResult;
+}
+
+status_t NuPlayer2Driver::setDataSource(int fd, int64_t offset, int64_t length) {
+    ALOGV("setDataSource(%p) file(%d)", this, fd);
+    Mutex::Autolock autoLock(mLock);
+
+    if (mState != STATE_IDLE) {
+        return INVALID_OPERATION;
+    }
+
+    mState = STATE_SET_DATASOURCE_PENDING;
+
+    mPlayer->setDataSourceAsync(fd, offset, length);
+
+    while (mState == STATE_SET_DATASOURCE_PENDING) {
+        mCondition.wait(mLock);
+    }
+
+    return mAsyncResult;
+}
+
+status_t NuPlayer2Driver::setDataSource(const sp<IStreamSource> &source) {
+    ALOGV("setDataSource(%p) stream source", this);
+    Mutex::Autolock autoLock(mLock);
+
+    if (mState != STATE_IDLE) {
+        return INVALID_OPERATION;
+    }
+
+    mState = STATE_SET_DATASOURCE_PENDING;
+
+    mPlayer->setDataSourceAsync(source);
+
+    while (mState == STATE_SET_DATASOURCE_PENDING) {
+        mCondition.wait(mLock);
+    }
+
+    return mAsyncResult;
+}
+
+status_t NuPlayer2Driver::setDataSource(const sp<DataSource> &source) {
+    ALOGV("setDataSource(%p) callback source", this);
+    Mutex::Autolock autoLock(mLock);
+
+    if (mState != STATE_IDLE) {
+        return INVALID_OPERATION;
+    }
+
+    mState = STATE_SET_DATASOURCE_PENDING;
+
+    mPlayer->setDataSourceAsync(source);
+
+    while (mState == STATE_SET_DATASOURCE_PENDING) {
+        mCondition.wait(mLock);
+    }
+
+    return mAsyncResult;
+}
+
+status_t NuPlayer2Driver::setVideoSurfaceTexture(
+        const sp<IGraphicBufferProducer> &bufferProducer) {
+    ALOGV("setVideoSurfaceTexture(%p)", this);
+    Mutex::Autolock autoLock(mLock);
+
+    if (mSetSurfaceInProgress) {
+        return INVALID_OPERATION;
+    }
+
+    switch (mState) {
+        case STATE_SET_DATASOURCE_PENDING:
+        case STATE_RESET_IN_PROGRESS:
+            return INVALID_OPERATION;
+
+        default:
+            break;
+    }
+
+    mSetSurfaceInProgress = true;
+
+    mPlayer->setVideoSurfaceTextureAsync(bufferProducer);
+
+    while (mSetSurfaceInProgress) {
+        mCondition.wait(mLock);
+    }
+
+    return OK;
+}
+
+status_t NuPlayer2Driver::getBufferingSettings(BufferingSettings* buffering) {
+    ALOGV("getBufferingSettings(%p)", this);
+    {
+        Mutex::Autolock autoLock(mLock);
+        if (mState == STATE_IDLE) {
+            return INVALID_OPERATION;
+        }
+    }
+
+    return mPlayer->getBufferingSettings(buffering);
+}
+
+status_t NuPlayer2Driver::setBufferingSettings(const BufferingSettings& buffering) {
+    ALOGV("setBufferingSettings(%p)", this);
+    {
+        Mutex::Autolock autoLock(mLock);
+        if (mState == STATE_IDLE) {
+            return INVALID_OPERATION;
+        }
+    }
+
+    return mPlayer->setBufferingSettings(buffering);
+}
+
+status_t NuPlayer2Driver::prepare() {
+    ALOGV("prepare(%p)", this);
+    Mutex::Autolock autoLock(mLock);
+    return prepare_l();
+}
+
+status_t NuPlayer2Driver::prepare_l() {
+    switch (mState) {
+        case STATE_UNPREPARED:
+            mState = STATE_PREPARING;
+
+            // Make sure we're not posting any notifications, success or
+            // failure information is only communicated through our result
+            // code.
+            mIsAsyncPrepare = false;
+            mPlayer->prepareAsync();
+            while (mState == STATE_PREPARING) {
+                mCondition.wait(mLock);
+            }
+            return (mState == STATE_PREPARED) ? OK : UNKNOWN_ERROR;
+        case STATE_STOPPED:
+            // this is really just paused. handle as seek to start
+            mAtEOS = false;
+            mState = STATE_STOPPED_AND_PREPARING;
+            mIsAsyncPrepare = false;
+            mPlayer->seekToAsync(0, MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC /* mode */,
+                    true /* needNotify */);
+            while (mState == STATE_STOPPED_AND_PREPARING) {
+                mCondition.wait(mLock);
+            }
+            return (mState == STATE_STOPPED_AND_PREPARED) ? OK : UNKNOWN_ERROR;
+        default:
+            return INVALID_OPERATION;
+    };
+}
+
+status_t NuPlayer2Driver::prepareAsync() {
+    ALOGV("prepareAsync(%p)", this);
+    Mutex::Autolock autoLock(mLock);
+
+    switch (mState) {
+        case STATE_UNPREPARED:
+            mState = STATE_PREPARING;
+            mIsAsyncPrepare = true;
+            mPlayer->prepareAsync();
+            return OK;
+        case STATE_STOPPED:
+            // this is really just paused. handle as seek to start
+            mAtEOS = false;
+            mState = STATE_STOPPED_AND_PREPARING;
+            mIsAsyncPrepare = true;
+            mPlayer->seekToAsync(0, MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC /* mode */,
+                    true /* needNotify */);
+            return OK;
+        default:
+            return INVALID_OPERATION;
+    };
+}
+
+status_t NuPlayer2Driver::start() {
+    ALOGD("start(%p), state is %d, eos is %d", this, mState, mAtEOS);
+    Mutex::Autolock autoLock(mLock);
+    return start_l();
+}
+
+status_t NuPlayer2Driver::start_l() {
+    switch (mState) {
+        case STATE_UNPREPARED:
+        {
+            status_t err = prepare_l();
+
+            if (err != OK) {
+                return err;
+            }
+
+            CHECK_EQ(mState, STATE_PREPARED);
+
+            // fall through
+        }
+
+        case STATE_PAUSED:
+        case STATE_STOPPED_AND_PREPARED:
+        case STATE_PREPARED:
+        {
+            mPlayer->start();
+
+            // fall through
+        }
+
+        case STATE_RUNNING:
+        {
+            if (mAtEOS) {
+                mPlayer->seekToAsync(0);
+                mAtEOS = false;
+                mPositionUs = -1;
+            }
+            break;
+        }
+
+        default:
+            return INVALID_OPERATION;
+    }
+
+    mState = STATE_RUNNING;
+
+    return OK;
+}
+
+status_t NuPlayer2Driver::stop() {
+    ALOGD("stop(%p)", this);
+    Mutex::Autolock autoLock(mLock);
+
+    switch (mState) {
+        case STATE_RUNNING:
+            mPlayer->pause();
+            // fall through
+
+        case STATE_PAUSED:
+            mState = STATE_STOPPED;
+            notifyListener_l(MEDIA2_STOPPED);
+            break;
+
+        case STATE_PREPARED:
+        case STATE_STOPPED:
+        case STATE_STOPPED_AND_PREPARING:
+        case STATE_STOPPED_AND_PREPARED:
+            mState = STATE_STOPPED;
+            break;
+
+        default:
+            return INVALID_OPERATION;
+    }
+
+    return OK;
+}
+
+status_t NuPlayer2Driver::pause() {
+    ALOGD("pause(%p)", this);
+    // The NuPlayerRenderer may get flushed if pause for long enough, e.g. the pause timeout tear
+    // down for audio offload mode. If that happens, the NuPlayerRenderer will no longer know the
+    // current position. So similar to seekTo, update |mPositionUs| to the pause position by calling
+    // getCurrentPosition here.
+    int unused;
+    getCurrentPosition(&unused);
+
+    Mutex::Autolock autoLock(mLock);
+
+    switch (mState) {
+        case STATE_PAUSED:
+        case STATE_PREPARED:
+            return OK;
+
+        case STATE_RUNNING:
+            mState = STATE_PAUSED;
+            notifyListener_l(MEDIA2_PAUSED);
+            mPlayer->pause();
+            break;
+
+        default:
+            return INVALID_OPERATION;
+    }
+
+    return OK;
+}
+
+bool NuPlayer2Driver::isPlaying() {
+    return mState == STATE_RUNNING && !mAtEOS;
+}
+
+status_t NuPlayer2Driver::setPlaybackSettings(const AudioPlaybackRate &rate) {
+    status_t err = mPlayer->setPlaybackSettings(rate);
+    if (err == OK) {
+        // try to update position
+        int unused;
+        getCurrentPosition(&unused);
+        Mutex::Autolock autoLock(mLock);
+        if (rate.mSpeed == 0.f && mState == STATE_RUNNING) {
+            mState = STATE_PAUSED;
+            notifyListener_l(MEDIA2_PAUSED);
+        } else if (rate.mSpeed != 0.f
+                && (mState == STATE_PAUSED
+                    || mState == STATE_STOPPED_AND_PREPARED
+                    || mState == STATE_PREPARED)) {
+            err = start_l();
+        }
+    }
+    return err;
+}
+
+status_t NuPlayer2Driver::getPlaybackSettings(AudioPlaybackRate *rate) {
+    return mPlayer->getPlaybackSettings(rate);
+}
+
+status_t NuPlayer2Driver::setSyncSettings(const AVSyncSettings &sync, float videoFpsHint) {
+    return mPlayer->setSyncSettings(sync, videoFpsHint);
+}
+
+status_t NuPlayer2Driver::getSyncSettings(AVSyncSettings *sync, float *videoFps) {
+    return mPlayer->getSyncSettings(sync, videoFps);
+}
+
+status_t NuPlayer2Driver::seekTo(int msec, MediaPlayer2SeekMode mode) {
+    ALOGD("seekTo(%p) (%d ms, %d) at state %d", this, msec, mode, mState);
+    Mutex::Autolock autoLock(mLock);
+
+    int64_t seekTimeUs = msec * 1000ll;
+
+    switch (mState) {
+        case STATE_PREPARED:
+        case STATE_STOPPED_AND_PREPARED:
+        case STATE_PAUSED:
+        case STATE_RUNNING:
+        {
+            mAtEOS = false;
+            mSeekInProgress = true;
+            // seeks can take a while, so we essentially paused
+            notifyListener_l(MEDIA2_PAUSED);
+            mPlayer->seekToAsync(seekTimeUs, mode, true /* needNotify */);
+            break;
+        }
+
+        default:
+            return INVALID_OPERATION;
+    }
+
+    mPositionUs = seekTimeUs;
+    return OK;
+}
+
+status_t NuPlayer2Driver::getCurrentPosition(int *msec) {
+    int64_t tempUs = 0;
+    {
+        Mutex::Autolock autoLock(mLock);
+        if (mSeekInProgress || (mState == STATE_PAUSED && !mAtEOS)) {
+            tempUs = (mPositionUs <= 0) ? 0 : mPositionUs;
+            *msec = (int)divRound(tempUs, (int64_t)(1000));
+            return OK;
+        }
+    }
+
+    status_t ret = mPlayer->getCurrentPosition(&tempUs);
+
+    Mutex::Autolock autoLock(mLock);
+    // We need to check mSeekInProgress here because mPlayer->seekToAsync is an async call, which
+    // means getCurrentPosition can be called before seek is completed. Iow, renderer may return a
+    // position value that's different the seek to position.
+    if (ret != OK) {
+        tempUs = (mPositionUs <= 0) ? 0 : mPositionUs;
+    } else {
+        mPositionUs = tempUs;
+    }
+    *msec = (int)divRound(tempUs, (int64_t)(1000));
+    return OK;
+}
+
+status_t NuPlayer2Driver::getDuration(int *msec) {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mDurationUs < 0) {
+        return UNKNOWN_ERROR;
+    }
+
+    *msec = (mDurationUs + 500ll) / 1000;
+
+    return OK;
+}
+
+void NuPlayer2Driver::updateMetrics(const char *where) {
+    if (where == NULL) {
+        where = "unknown";
+    }
+    ALOGV("updateMetrics(%p) from %s at state %d", this, where, mState);
+
+    // gather the final stats for this record
+    Vector<sp<AMessage>> trackStats;
+    mPlayer->getStats(&trackStats);
+
+    if (trackStats.size() > 0) {
+        for (size_t i = 0; i < trackStats.size(); ++i) {
+            const sp<AMessage> &stats = trackStats.itemAt(i);
+
+            AString mime;
+            stats->findString("mime", &mime);
+
+            AString name;
+            stats->findString("component-name", &name);
+
+            if (mime.startsWith("video/")) {
+                int32_t width, height;
+                mAnalyticsItem->setCString(kPlayerVMime, mime.c_str());
+                if (!name.empty()) {
+                    mAnalyticsItem->setCString(kPlayerVCodec, name.c_str());
+                }
+
+                if (stats->findInt32("width", &width)
+                        && stats->findInt32("height", &height)) {
+                    mAnalyticsItem->setInt32(kPlayerWidth, width);
+                    mAnalyticsItem->setInt32(kPlayerHeight, height);
+                }
+
+                int64_t numFramesTotal = 0;
+                int64_t numFramesDropped = 0;
+                stats->findInt64("frames-total", &numFramesTotal);
+                stats->findInt64("frames-dropped-output", &numFramesDropped);
+
+                mAnalyticsItem->setInt64(kPlayerFrames, numFramesTotal);
+                mAnalyticsItem->setInt64(kPlayerFramesDropped, numFramesDropped);
+
+
+            } else if (mime.startsWith("audio/")) {
+                mAnalyticsItem->setCString(kPlayerAMime, mime.c_str());
+                if (!name.empty()) {
+                    mAnalyticsItem->setCString(kPlayerACodec, name.c_str());
+                }
+            }
+        }
+    }
+
+    // always provide duration and playing time, even if they have 0/unknown values.
+
+    // getDuration() uses mLock for mutex -- careful where we use it.
+    int duration_ms = -1;
+    getDuration(&duration_ms);
+    mAnalyticsItem->setInt64(kPlayerDuration, duration_ms);
+
+    mAnalyticsItem->setInt64(kPlayerPlaying, (mPlayingTimeUs+500)/1000 );
+
+    if (mRebufferingEvents != 0) {
+        mAnalyticsItem->setInt64(kPlayerRebuffering, (mRebufferingTimeUs+500)/1000 );
+        mAnalyticsItem->setInt32(kPlayerRebufferingCount, mRebufferingEvents);
+        mAnalyticsItem->setInt32(kPlayerRebufferingAtExit, mRebufferingAtExit);
+    }
+
+    mAnalyticsItem->setCString(kPlayerDataSourceType, mPlayer->getDataSourceType());
+}
+
+
+void NuPlayer2Driver::logMetrics(const char *where) {
+    if (where == NULL) {
+        where = "unknown";
+    }
+    ALOGV("logMetrics(%p) from %s at state %d", this, where, mState);
+
+    if (mAnalyticsItem == NULL || mAnalyticsItem->isEnabled() == false) {
+        return;
+    }
+
+    // log only non-empty records
+    // we always updateMetrics() before we get here
+    // and that always injects 3 fields (duration, playing time, and
+    // datasource) into the record.
+    // So the canonical "empty" record has 3 elements in it.
+    if (mAnalyticsItem->count() > 3) {
+
+        mAnalyticsItem->setFinalized(true);
+        mAnalyticsItem->selfrecord();
+
+        // re-init in case we prepare() and start() again.
+        delete mAnalyticsItem ;
+        mAnalyticsItem = new MediaAnalyticsItem("nuplayer");
+        if (mAnalyticsItem) {
+            mAnalyticsItem->generateSessionID();
+            mAnalyticsItem->setUid(mClientUid);
+        }
+    } else {
+        ALOGV("did not have anything to record");
+    }
+}
+
+status_t NuPlayer2Driver::reset() {
+    ALOGD("reset(%p) at state %d", this, mState);
+
+    updateMetrics("reset");
+    logMetrics("reset");
+
+    Mutex::Autolock autoLock(mLock);
+
+    switch (mState) {
+        case STATE_IDLE:
+            return OK;
+
+        case STATE_SET_DATASOURCE_PENDING:
+        case STATE_RESET_IN_PROGRESS:
+            return INVALID_OPERATION;
+
+        case STATE_PREPARING:
+        {
+            CHECK(mIsAsyncPrepare);
+
+            notifyListener_l(MEDIA2_PREPARED);
+            break;
+        }
+
+        default:
+            break;
+    }
+
+    if (mState != STATE_STOPPED) {
+        notifyListener_l(MEDIA2_STOPPED);
+    }
+
+    mState = STATE_RESET_IN_PROGRESS;
+    mPlayer->resetAsync();
+
+    while (mState == STATE_RESET_IN_PROGRESS) {
+        mCondition.wait(mLock);
+    }
+
+    mDurationUs = -1;
+    mPositionUs = -1;
+    mLooping = false;
+    mPlayingTimeUs = 0;
+    mRebufferingTimeUs = 0;
+    mRebufferingEvents = 0;
+    mRebufferingAtExit = false;
+
+    return OK;
+}
+
+status_t NuPlayer2Driver::notifyAt(int64_t mediaTimeUs) {
+    ALOGV("notifyAt(%p), time:%lld", this, (long long)mediaTimeUs);
+    return mPlayer->notifyAt(mediaTimeUs);
+}
+
+status_t NuPlayer2Driver::setLooping(int loop) {
+    mLooping = loop != 0;
+    return OK;
+}
+
+player2_type NuPlayer2Driver::playerType() {
+    return PLAYER2_NU_PLAYER2;
+}
+
+status_t NuPlayer2Driver::invoke(const Parcel &request, Parcel *reply) {
+    if (reply == NULL) {
+        ALOGE("reply is a NULL pointer");
+        return BAD_VALUE;
+    }
+
+    int32_t methodId;
+    status_t ret = request.readInt32(&methodId);
+    if (ret != OK) {
+        ALOGE("Failed to retrieve the requested method to invoke");
+        return ret;
+    }
+
+    switch (methodId) {
+        case MEDIA_PLAYER2_INVOKE_ID_SET_VIDEO_SCALING_MODE:
+        {
+            int mode = request.readInt32();
+            return mPlayer->setVideoScalingMode(mode);
+        }
+
+        case MEDIA_PLAYER2_INVOKE_ID_GET_TRACK_INFO:
+        {
+            return mPlayer->getTrackInfo(reply);
+        }
+
+        case MEDIA_PLAYER2_INVOKE_ID_SELECT_TRACK:
+        {
+            int trackIndex = request.readInt32();
+            int msec = 0;
+            // getCurrentPosition should always return OK
+            getCurrentPosition(&msec);
+            return mPlayer->selectTrack(trackIndex, true /* select */, msec * 1000ll);
+        }
+
+        case MEDIA_PLAYER2_INVOKE_ID_UNSELECT_TRACK:
+        {
+            int trackIndex = request.readInt32();
+            return mPlayer->selectTrack(trackIndex, false /* select */, 0xdeadbeef /* not used */);
+        }
+
+        case MEDIA_PLAYER2_INVOKE_ID_GET_SELECTED_TRACK:
+        {
+            int32_t type = request.readInt32();
+            return mPlayer->getSelectedTrack(type, reply);
+        }
+
+        default:
+        {
+            return INVALID_OPERATION;
+        }
+    }
+}
+
+void NuPlayer2Driver::setAudioSink(const sp<AudioSink> &audioSink) {
+    mPlayer->setAudioSink(audioSink);
+    mAudioSink = audioSink;
+}
+
+status_t NuPlayer2Driver::setParameter(
+        int /* key */, const Parcel & /* request */) {
+    return INVALID_OPERATION;
+}
+
+status_t NuPlayer2Driver::getParameter(int key, Parcel *reply) {
+
+    if (key == FOURCC('m','t','r','X')) {
+        // mtrX -- a play on 'metrics' (not matrix)
+        // gather current info all together, parcel it, and send it back
+        updateMetrics("api");
+        mAnalyticsItem->writeToParcel(reply);
+        return OK;
+    }
+
+    return INVALID_OPERATION;
+}
+
+status_t NuPlayer2Driver::getMetadata(
+        const media::Metadata::Filter& /* ids */, Parcel *records) {
+    Mutex::Autolock autoLock(mLock);
+
+    using media::Metadata;
+
+    Metadata meta(records);
+
+    meta.appendBool(
+            Metadata::kPauseAvailable,
+            mPlayerFlags & NuPlayer2::Source::FLAG_CAN_PAUSE);
+
+    meta.appendBool(
+            Metadata::kSeekBackwardAvailable,
+            mPlayerFlags & NuPlayer2::Source::FLAG_CAN_SEEK_BACKWARD);
+
+    meta.appendBool(
+            Metadata::kSeekForwardAvailable,
+            mPlayerFlags & NuPlayer2::Source::FLAG_CAN_SEEK_FORWARD);
+
+    meta.appendBool(
+            Metadata::kSeekAvailable,
+            mPlayerFlags & NuPlayer2::Source::FLAG_CAN_SEEK);
+
+    return OK;
+}
+
+void NuPlayer2Driver::notifyResetComplete() {
+    ALOGD("notifyResetComplete(%p)", this);
+    Mutex::Autolock autoLock(mLock);
+
+    CHECK_EQ(mState, STATE_RESET_IN_PROGRESS);
+    mState = STATE_IDLE;
+    mCondition.broadcast();
+}
+
+void NuPlayer2Driver::notifySetSurfaceComplete() {
+    ALOGV("notifySetSurfaceComplete(%p)", this);
+    Mutex::Autolock autoLock(mLock);
+
+    CHECK(mSetSurfaceInProgress);
+    mSetSurfaceInProgress = false;
+
+    mCondition.broadcast();
+}
+
+void NuPlayer2Driver::notifyDuration(int64_t durationUs) {
+    Mutex::Autolock autoLock(mLock);
+    mDurationUs = durationUs;
+}
+
+void NuPlayer2Driver::notifyMorePlayingTimeUs(int64_t playingUs) {
+    Mutex::Autolock autoLock(mLock);
+    mPlayingTimeUs += playingUs;
+}
+
+void NuPlayer2Driver::notifyMoreRebufferingTimeUs(int64_t rebufferingUs) {
+    Mutex::Autolock autoLock(mLock);
+    mRebufferingTimeUs += rebufferingUs;
+    mRebufferingEvents++;
+}
+
+void NuPlayer2Driver::notifyRebufferingWhenExit(bool status) {
+    Mutex::Autolock autoLock(mLock);
+    mRebufferingAtExit = status;
+}
+
+void NuPlayer2Driver::notifySeekComplete() {
+    ALOGV("notifySeekComplete(%p)", this);
+    Mutex::Autolock autoLock(mLock);
+    mSeekInProgress = false;
+    notifySeekComplete_l();
+}
+
+void NuPlayer2Driver::notifySeekComplete_l() {
+    bool wasSeeking = true;
+    if (mState == STATE_STOPPED_AND_PREPARING) {
+        wasSeeking = false;
+        mState = STATE_STOPPED_AND_PREPARED;
+        mCondition.broadcast();
+        if (!mIsAsyncPrepare) {
+            // if we are preparing synchronously, no need to notify listener
+            return;
+        }
+    } else if (mState == STATE_STOPPED) {
+        // no need to notify listener
+        return;
+    }
+    notifyListener_l(wasSeeking ? MEDIA2_SEEK_COMPLETE : MEDIA2_PREPARED);
+}
+
+status_t NuPlayer2Driver::dump(
+        int fd, const Vector<String16> & /* args */) const {
+
+    Vector<sp<AMessage> > trackStats;
+    mPlayer->getStats(&trackStats);
+
+    AString logString(" NuPlayer2\n");
+    char buf[256] = {0};
+
+    bool locked = false;
+    for (int i = 0; i < kDumpLockRetries; ++i) {
+        if (mLock.tryLock() == NO_ERROR) {
+            locked = true;
+            break;
+        }
+        usleep(kDumpLockSleepUs);
+    }
+
+    if (locked) {
+        snprintf(buf, sizeof(buf), "  state(%d), atEOS(%d), looping(%d), autoLoop(%d)\n",
+                mState, mAtEOS, mLooping, mAutoLoop);
+        mLock.unlock();
+    } else {
+        snprintf(buf, sizeof(buf), "  NPD(%p) lock is taken\n", this);
+    }
+    logString.append(buf);
+
+    for (size_t i = 0; i < trackStats.size(); ++i) {
+        const sp<AMessage> &stats = trackStats.itemAt(i);
+
+        AString mime;
+        if (stats->findString("mime", &mime)) {
+            snprintf(buf, sizeof(buf), "  mime(%s)\n", mime.c_str());
+            logString.append(buf);
+        }
+
+        AString name;
+        if (stats->findString("component-name", &name)) {
+            snprintf(buf, sizeof(buf), "    decoder(%s)\n", name.c_str());
+            logString.append(buf);
+        }
+
+        if (mime.startsWith("video/")) {
+            int32_t width, height;
+            if (stats->findInt32("width", &width)
+                    && stats->findInt32("height", &height)) {
+                snprintf(buf, sizeof(buf), "    resolution(%d x %d)\n", width, height);
+                logString.append(buf);
+            }
+
+            int64_t numFramesTotal = 0;
+            int64_t numFramesDropped = 0;
+
+            stats->findInt64("frames-total", &numFramesTotal);
+            stats->findInt64("frames-dropped-output", &numFramesDropped);
+            snprintf(buf, sizeof(buf), "    numFramesTotal(%lld), numFramesDropped(%lld), "
+                     "percentageDropped(%.2f%%)\n",
+                     (long long)numFramesTotal,
+                     (long long)numFramesDropped,
+                     numFramesTotal == 0
+                            ? 0.0 : (double)(numFramesDropped * 100) / numFramesTotal);
+            logString.append(buf);
+        }
+    }
+
+    ALOGI("%s", logString.c_str());
+
+    if (fd >= 0) {
+        FILE *out = fdopen(dup(fd), "w");
+        fprintf(out, "%s", logString.c_str());
+        fclose(out);
+        out = NULL;
+    }
+
+    return OK;
+}
+
+void NuPlayer2Driver::notifyListener(
+        int msg, int ext1, int ext2, const Parcel *in) {
+    Mutex::Autolock autoLock(mLock);
+    notifyListener_l(msg, ext1, ext2, in);
+}
+
+void NuPlayer2Driver::notifyListener_l(
+        int msg, int ext1, int ext2, const Parcel *in) {
+    ALOGD("notifyListener_l(%p), (%d, %d, %d, %d), loop setting(%d, %d)",
+            this, msg, ext1, ext2, (in == NULL ? -1 : (int)in->dataSize()), mAutoLoop, mLooping);
+    switch (msg) {
+        case MEDIA2_PLAYBACK_COMPLETE:
+        {
+            if (mState != STATE_RESET_IN_PROGRESS) {
+                if (mAutoLoop) {
+                    audio_stream_type_t streamType = AUDIO_STREAM_MUSIC;
+                    if (mAudioSink != NULL) {
+                        streamType = mAudioSink->getAudioStreamType();
+                    }
+                    if (streamType == AUDIO_STREAM_NOTIFICATION) {
+                        ALOGW("disabling auto-loop for notification");
+                        mAutoLoop = false;
+                    }
+                }
+                if (mLooping || mAutoLoop) {
+                    mPlayer->seekToAsync(0);
+                    if (mAudioSink != NULL) {
+                        // The renderer has stopped the sink at the end in order to play out
+                        // the last little bit of audio. If we're looping, we need to restart it.
+                        mAudioSink->start();
+                    }
+                    // don't send completion event when looping
+                    return;
+                }
+                if (property_get_bool("persist.debug.sf.stats", false)) {
+                    Vector<String16> args;
+                    dump(-1, args);
+                }
+                mPlayer->pause();
+                mState = STATE_PAUSED;
+            }
+            // fall through
+        }
+
+        case MEDIA2_ERROR:
+        {
+            // when we have an error, add it to the analytics for this playback.
+            // ext1 is our primary 'error type' value. Only add ext2 when non-zero.
+            // [test against msg is due to fall through from previous switch value]
+            if (msg == MEDIA2_ERROR) {
+                mAnalyticsItem->setInt32(kPlayerError, ext1);
+                if (ext2 != 0) {
+                    mAnalyticsItem->setInt32(kPlayerErrorCode, ext2);
+                }
+            }
+            mAtEOS = true;
+            break;
+        }
+
+        default:
+            break;
+    }
+
+    mLock.unlock();
+    sendEvent(msg, ext1, ext2, in);
+    mLock.lock();
+}
+
+void NuPlayer2Driver::notifySetDataSourceCompleted(status_t err) {
+    Mutex::Autolock autoLock(mLock);
+
+    CHECK_EQ(mState, STATE_SET_DATASOURCE_PENDING);
+
+    mAsyncResult = err;
+    mState = (err == OK) ? STATE_UNPREPARED : STATE_IDLE;
+    mCondition.broadcast();
+}
+
+void NuPlayer2Driver::notifyPrepareCompleted(status_t err) {
+    ALOGV("notifyPrepareCompleted %d", err);
+
+    Mutex::Autolock autoLock(mLock);
+
+    if (mState != STATE_PREPARING) {
+        // We were preparing asynchronously when the client called
+        // reset(), we sent a premature "prepared" notification and
+        // then initiated the reset. This notification is stale.
+        CHECK(mState == STATE_RESET_IN_PROGRESS || mState == STATE_IDLE);
+        return;
+    }
+
+    CHECK_EQ(mState, STATE_PREPARING);
+
+    mAsyncResult = err;
+
+    if (err == OK) {
+        // update state before notifying client, so that if client calls back into NuPlayer2Driver
+        // in response, NuPlayer2Driver has the right state
+        mState = STATE_PREPARED;
+        if (mIsAsyncPrepare) {
+            notifyListener_l(MEDIA2_PREPARED);
+        }
+    } else {
+        mState = STATE_UNPREPARED;
+        if (mIsAsyncPrepare) {
+            notifyListener_l(MEDIA2_ERROR, MEDIA2_ERROR_UNKNOWN, err);
+        }
+    }
+
+    sp<MetaData> meta = mPlayer->getFileMeta();
+    int32_t loop;
+    if (meta != NULL
+            && meta->findInt32(kKeyAutoLoop, &loop) && loop != 0) {
+        mAutoLoop = true;
+    }
+
+    mCondition.broadcast();
+}
+
+void NuPlayer2Driver::notifyFlagsChanged(uint32_t flags) {
+    Mutex::Autolock autoLock(mLock);
+
+    mPlayerFlags = flags;
+}
+
+// Modular DRM
+status_t NuPlayer2Driver::prepareDrm(const uint8_t uuid[16], const Vector<uint8_t> &drmSessionId)
+{
+    ALOGV("prepareDrm(%p) state: %d", this, mState);
+
+    // leaving the state verification for mediaplayer.cpp
+    status_t ret = mPlayer->prepareDrm(uuid, drmSessionId);
+
+    ALOGV("prepareDrm ret: %d", ret);
+
+    return ret;
+}
+
+status_t NuPlayer2Driver::releaseDrm()
+{
+    ALOGV("releaseDrm(%p) state: %d", this, mState);
+
+    // leaving the state verification for mediaplayer.cpp
+    status_t ret = mPlayer->releaseDrm();
+
+    ALOGV("releaseDrm ret: %d", ret);
+
+    return ret;
+}
+
+}  // namespace android
diff --git a/media/libmedia/nuplayer2/NuPlayer2Driver.h b/media/libmedia/nuplayer2/NuPlayer2Driver.h
new file mode 100644
index 0000000..edd4556
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2Driver.h
@@ -0,0 +1,162 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/MediaPlayer2Interface.h>
+
+#include <media/MediaAnalyticsItem.h>
+#include <media/stagefright/foundation/ABase.h>
+
+namespace android {
+
+struct ALooper;
+struct MediaClock;
+struct NuPlayer2;
+
+struct NuPlayer2Driver : public MediaPlayer2Interface {
+    explicit NuPlayer2Driver(pid_t pid);
+
+    virtual status_t initCheck();
+
+    virtual status_t setUID(uid_t uid);
+
+    virtual status_t setDataSource(
+            const sp<MediaHTTPService> &httpService,
+            const char *url,
+            const KeyedVector<String8, String8> *headers);
+
+    virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
+
+    virtual status_t setDataSource(const sp<IStreamSource> &source);
+
+    virtual status_t setDataSource(const sp<DataSource>& dataSource);
+
+    virtual status_t setVideoSurfaceTexture(
+            const sp<IGraphicBufferProducer> &bufferProducer);
+
+    virtual status_t getBufferingSettings(
+            BufferingSettings* buffering /* nonnull */) override;
+    virtual status_t setBufferingSettings(const BufferingSettings& buffering) override;
+
+    virtual status_t prepare();
+    virtual status_t prepareAsync();
+    virtual status_t start();
+    virtual status_t stop();
+    virtual status_t pause();
+    virtual bool isPlaying();
+    virtual status_t setPlaybackSettings(const AudioPlaybackRate &rate);
+    virtual status_t getPlaybackSettings(AudioPlaybackRate *rate);
+    virtual status_t setSyncSettings(const AVSyncSettings &sync, float videoFpsHint);
+    virtual status_t getSyncSettings(AVSyncSettings *sync, float *videoFps);
+    virtual status_t seekTo(
+            int msec, MediaPlayer2SeekMode mode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC);
+    virtual status_t getCurrentPosition(int *msec);
+    virtual status_t getDuration(int *msec);
+    virtual status_t reset();
+    virtual status_t notifyAt(int64_t mediaTimeUs) override;
+    virtual status_t setLooping(int loop);
+    virtual player2_type playerType();
+    virtual status_t invoke(const Parcel &request, Parcel *reply);
+    virtual void setAudioSink(const sp<AudioSink> &audioSink);
+    virtual status_t setParameter(int key, const Parcel &request);
+    virtual status_t getParameter(int key, Parcel *reply);
+
+    virtual status_t getMetadata(
+            const media::Metadata::Filter& ids, Parcel *records);
+
+    virtual status_t dump(int fd, const Vector<String16> &args) const;
+
+    void notifySetDataSourceCompleted(status_t err);
+    void notifyPrepareCompleted(status_t err);
+    void notifyResetComplete();
+    void notifySetSurfaceComplete();
+    void notifyDuration(int64_t durationUs);
+    void notifyMorePlayingTimeUs(int64_t timeUs);
+    void notifyMoreRebufferingTimeUs(int64_t timeUs);
+    void notifyRebufferingWhenExit(bool status);
+    void notifySeekComplete();
+    void notifySeekComplete_l();
+    void notifyListener(int msg, int ext1 = 0, int ext2 = 0, const Parcel *in = NULL);
+    void notifyFlagsChanged(uint32_t flags);
+
+    // Modular DRM
+    virtual status_t prepareDrm(const uint8_t uuid[16], const Vector<uint8_t> &drmSessionId);
+    virtual status_t releaseDrm();
+
+protected:
+    virtual ~NuPlayer2Driver();
+
+private:
+    enum State {
+        STATE_IDLE,
+        STATE_SET_DATASOURCE_PENDING,
+        STATE_UNPREPARED,
+        STATE_PREPARING,
+        STATE_PREPARED,
+        STATE_RUNNING,
+        STATE_PAUSED,
+        STATE_RESET_IN_PROGRESS,
+        STATE_STOPPED,                  // equivalent to PAUSED
+        STATE_STOPPED_AND_PREPARING,    // equivalent to PAUSED, but seeking
+        STATE_STOPPED_AND_PREPARED,     // equivalent to PAUSED, but seek complete
+    };
+
+    mutable Mutex mLock;
+    Condition mCondition;
+
+    State mState;
+
+    bool mIsAsyncPrepare;
+    status_t mAsyncResult;
+
+    // The following are protected through "mLock"
+    // >>>
+    bool mSetSurfaceInProgress;
+    int64_t mDurationUs;
+    int64_t mPositionUs;
+    bool mSeekInProgress;
+    int64_t mPlayingTimeUs;
+    int64_t mRebufferingTimeUs;
+    int32_t mRebufferingEvents;
+    bool mRebufferingAtExit;
+    // <<<
+
+    sp<ALooper> mLooper;
+    const sp<MediaClock> mMediaClock;
+    const sp<NuPlayer2> mPlayer;
+    sp<AudioSink> mAudioSink;
+    uint32_t mPlayerFlags;
+
+    MediaAnalyticsItem *mAnalyticsItem;
+    uid_t mClientUid;
+
+    bool mAtEOS;
+    bool mLooping;
+    bool mAutoLoop;
+
+
+    void updateMetrics(const char *where);
+    void logMetrics(const char *where);
+
+    status_t prepare_l();
+    status_t start_l();
+    void notifyListener_l(int msg, int ext1 = 0, int ext2 = 0, const Parcel *in = NULL);
+
+    DISALLOW_EVIL_CONSTRUCTORS(NuPlayer2Driver);
+};
+
+}  // namespace android
+
+
diff --git a/media/libmedia/nuplayer2/NuPlayer2Drm.cpp b/media/libmedia/nuplayer2/NuPlayer2Drm.cpp
new file mode 100644
index 0000000..5a4502a
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2Drm.cpp
@@ -0,0 +1,130 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayer2Drm"
+
+#include "NuPlayer2Drm.h"
+
+#include "NdkWrapper.h"
+#include <utils/Log.h>
+
+
+namespace android {
+
+Vector<DrmUUID> NuPlayer2Drm::parsePSSH(const void *pssh, size_t psshsize)
+{
+    Vector<DrmUUID> drmSchemes, empty;
+    const int DATALEN_SIZE = 4;
+
+    // the format of the buffer is 1 or more of:
+    //    {
+    //        16 byte uuid
+    //        4 byte data length N
+    //        N bytes of data
+    //    }
+    // Determine the number of entries in the source data.
+    // Since we got the data from stagefright, we trust it is valid and properly formatted.
+
+    const uint8_t *data = (const uint8_t*)pssh;
+    size_t len = psshsize;
+    size_t numentries = 0;
+    while (len > 0) {
+        if (len < DrmUUID::UUID_SIZE) {
+            ALOGE("ParsePSSH: invalid PSSH data");
+            return empty;
+        }
+
+        const uint8_t *uuidPtr = data;
+
+        // skip uuid
+        data += DrmUUID::UUID_SIZE;
+        len -= DrmUUID::UUID_SIZE;
+
+        // get data length
+        if (len < DATALEN_SIZE) {
+            ALOGE("ParsePSSH: invalid PSSH data");
+            return empty;
+        }
+
+        uint32_t datalen = *((uint32_t*)data);
+        data += DATALEN_SIZE;
+        len -= DATALEN_SIZE;
+
+        if (len < datalen) {
+            ALOGE("ParsePSSH: invalid PSSH data");
+            return empty;
+        }
+
+        // skip the data
+        data += datalen;
+        len -= datalen;
+
+        DrmUUID _uuid(uuidPtr);
+        drmSchemes.add(_uuid);
+
+        ALOGV("ParsePSSH[%zu]: %s: %s", numentries,
+                _uuid.toHexString().string(),
+                DrmUUID::arrayToHex(data, datalen).string()
+             );
+
+        numentries++;
+    }
+
+    return drmSchemes;
+}
+
+Vector<DrmUUID> NuPlayer2Drm::getSupportedDrmSchemes(const void *pssh, size_t psshsize)
+{
+    Vector<DrmUUID> psshDRMs = parsePSSH(pssh, psshsize);
+
+    Vector<DrmUUID> supportedDRMs;
+    for (size_t i = 0; i < psshDRMs.size(); i++) {
+        DrmUUID uuid = psshDRMs[i];
+        if (AMediaDrmWrapper::isCryptoSchemeSupported(uuid.ptr(), NULL)) {
+            supportedDRMs.add(uuid);
+        }
+    }
+
+    ALOGV("getSupportedDrmSchemes: psshDRMs: %zu supportedDRMs: %zu",
+            psshDRMs.size(), supportedDRMs.size());
+
+    return supportedDRMs;
+}
+
+// Parcel has only private copy constructor so passing it in rather than returning
+void NuPlayer2Drm::retrieveDrmInfo(const void *pssh, size_t psshsize, Parcel *parcel)
+{
+    // 1) PSSH bytes
+    parcel->writeUint32(psshsize);
+    parcel->writeByteArray(psshsize, (const uint8_t*)pssh);
+
+    ALOGV("retrieveDrmInfo: MEDIA2_DRM_INFO  PSSH: size: %zu %s", psshsize,
+            DrmUUID::arrayToHex((uint8_t*)pssh, psshsize).string());
+
+    // 2) supportedDRMs
+    Vector<DrmUUID> supportedDRMs = getSupportedDrmSchemes(pssh, psshsize);
+    parcel->writeUint32(supportedDRMs.size());
+    for (size_t i = 0; i < supportedDRMs.size(); i++) {
+        DrmUUID uuid = supportedDRMs[i];
+        parcel->writeByteArray(DrmUUID::UUID_SIZE, uuid.ptr());
+
+        ALOGV("retrieveDrmInfo: MEDIA2_DRM_INFO  supportedScheme[%zu] %s", i,
+                uuid.toHexString().string());
+    }
+}
+
+}   // namespace android
diff --git a/media/libmedia/nuplayer2/NuPlayer2Drm.h b/media/libmedia/nuplayer2/NuPlayer2Drm.h
new file mode 100644
index 0000000..e762ccc
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2Drm.h
@@ -0,0 +1,87 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NUPLAYER2_DRM_H_
+#define NUPLAYER2_DRM_H_
+
+#include <binder/Parcel.h>
+#include <media/stagefright/MetaData.h> // for CryptInfo
+
+
+namespace android {
+
+    struct DrmUUID {
+        static const int UUID_SIZE = 16;
+
+        DrmUUID() {
+            memset(this->uuid, 0, sizeof(uuid));
+        }
+
+        // to allow defining Vector/KeyedVector of UUID type
+        DrmUUID(const DrmUUID &a) {
+            memcpy(this->uuid, a.uuid, sizeof(uuid));
+        }
+
+        // to allow defining Vector/KeyedVector of UUID type
+        DrmUUID(const uint8_t uuid_in[UUID_SIZE]) {
+            memcpy(this->uuid, uuid_in, sizeof(uuid));
+        }
+
+        const uint8_t *ptr() const {
+            return uuid;
+        }
+
+        String8 toHexString() const {
+            return arrayToHex(uuid, UUID_SIZE);
+        }
+
+        static String8 toHexString(const uint8_t uuid_in[UUID_SIZE]) {
+            return arrayToHex(uuid_in, UUID_SIZE);
+        }
+
+        static String8 arrayToHex(const uint8_t *array, int bytes) {
+            String8 result;
+            for (int i = 0; i < bytes; i++) {
+                result.appendFormat("%02x", array[i]);
+            }
+
+            return result;
+        }
+
+    protected:
+        uint8_t uuid[UUID_SIZE];
+    };
+
+
+    struct NuPlayer2Drm {
+
+        // static helpers - internal
+
+    protected:
+        static Vector<DrmUUID> parsePSSH(const void *pssh, size_t psshsize);
+        static Vector<DrmUUID> getSupportedDrmSchemes(const void *pssh, size_t psshsize);
+
+        // static helpers - public
+
+    public:
+        // Parcel has only private copy constructor so passing it in rather than returning
+        static void retrieveDrmInfo(const void *pssh, size_t psshsize, Parcel *parcel);
+
+    };  // NuPlayer2Drm
+
+}   // android
+
+#endif     //NUPLAYER2_DRM_H_
diff --git a/media/libmedia/nuplayer2/NuPlayer2Renderer.cpp b/media/libmedia/nuplayer2/NuPlayer2Renderer.cpp
new file mode 100644
index 0000000..71f5dce
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2Renderer.cpp
@@ -0,0 +1,2075 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayer2Renderer"
+#include <utils/Log.h>
+
+#include "JWakeLock.h"
+#include "NuPlayer2Renderer.h"
+#include <algorithm>
+#include <cutils/properties.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/MediaClock.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
+#include <media/stagefright/VideoFrameScheduler.h>
+#include <media/MediaCodecBuffer.h>
+
+#include <inttypes.h>
+
+namespace android {
+
+/*
+ * Example of common configuration settings in shell script form
+
+   #Turn offload audio off (use PCM for Play Music) -- AudioPolicyManager
+   adb shell setprop audio.offload.disable 1
+
+   #Allow offload audio with video (requires offloading to be enabled) -- AudioPolicyManager
+   adb shell setprop audio.offload.video 1
+
+   #Use audio callbacks for PCM data
+   adb shell setprop media.stagefright.audio.cbk 1
+
+   #Use deep buffer for PCM data with video (it is generally enabled for audio-only)
+   adb shell setprop media.stagefright.audio.deep 1
+
+   #Set size of buffers for pcm audio sink in msec (example: 1000 msec)
+   adb shell setprop media.stagefright.audio.sink 1000
+
+ * These configurations take effect for the next track played (not the current track).
+ */
+
+static inline bool getUseAudioCallbackSetting() {
+    return property_get_bool("media.stagefright.audio.cbk", false /* default_value */);
+}
+
+static inline int32_t getAudioSinkPcmMsSetting() {
+    return property_get_int32(
+            "media.stagefright.audio.sink", 500 /* default_value */);
+}
+
+// Maximum time in paused state when offloading audio decompression. When elapsed, the AudioSink
+// is closed to allow the audio DSP to power down.
+static const int64_t kOffloadPauseMaxUs = 10000000ll;
+
+// Maximum allowed delay from AudioSink, 1.5 seconds.
+static const int64_t kMaxAllowedAudioSinkDelayUs = 1500000ll;
+
+static const int64_t kMinimumAudioClockUpdatePeriodUs = 20 /* msec */ * 1000;
+
+// static
+const NuPlayer2::Renderer::PcmInfo NuPlayer2::Renderer::AUDIO_PCMINFO_INITIALIZER = {
+        AUDIO_CHANNEL_NONE,
+        AUDIO_OUTPUT_FLAG_NONE,
+        AUDIO_FORMAT_INVALID,
+        0, // mNumChannels
+        0 // mSampleRate
+};
+
+// static
+const int64_t NuPlayer2::Renderer::kMinPositionUpdateDelayUs = 100000ll;
+
+NuPlayer2::Renderer::Renderer(
+        const sp<MediaPlayer2Base::AudioSink> &sink,
+        const sp<MediaClock> &mediaClock,
+        const sp<AMessage> &notify,
+        uint32_t flags)
+    : mAudioSink(sink),
+      mUseVirtualAudioSink(false),
+      mNotify(notify),
+      mFlags(flags),
+      mNumFramesWritten(0),
+      mDrainAudioQueuePending(false),
+      mDrainVideoQueuePending(false),
+      mAudioQueueGeneration(0),
+      mVideoQueueGeneration(0),
+      mAudioDrainGeneration(0),
+      mVideoDrainGeneration(0),
+      mAudioEOSGeneration(0),
+      mMediaClock(mediaClock),
+      mPlaybackSettings(AUDIO_PLAYBACK_RATE_DEFAULT),
+      mAudioFirstAnchorTimeMediaUs(-1),
+      mAnchorTimeMediaUs(-1),
+      mAnchorNumFramesWritten(-1),
+      mVideoLateByUs(0ll),
+      mNextVideoTimeMediaUs(-1),
+      mHasAudio(false),
+      mHasVideo(false),
+      mNotifyCompleteAudio(false),
+      mNotifyCompleteVideo(false),
+      mSyncQueues(false),
+      mPaused(false),
+      mPauseDrainAudioAllowedUs(0),
+      mVideoSampleReceived(false),
+      mVideoRenderingStarted(false),
+      mVideoRenderingStartGeneration(0),
+      mAudioRenderingStartGeneration(0),
+      mRenderingDataDelivered(false),
+      mNextAudioClockUpdateTimeUs(-1),
+      mLastAudioMediaTimeUs(-1),
+      mAudioOffloadPauseTimeoutGeneration(0),
+      mAudioTornDown(false),
+      mCurrentOffloadInfo(AUDIO_INFO_INITIALIZER),
+      mCurrentPcmInfo(AUDIO_PCMINFO_INITIALIZER),
+      mTotalBuffersQueued(0),
+      mLastAudioBufferDrained(0),
+      mUseAudioCallback(false),
+      mWakeLock(new JWakeLock()) {
+    CHECK(mediaClock != NULL);
+    mPlaybackRate = mPlaybackSettings.mSpeed;
+    mMediaClock->setPlaybackRate(mPlaybackRate);
+}
+
+NuPlayer2::Renderer::~Renderer() {
+    if (offloadingAudio()) {
+        mAudioSink->stop();
+        mAudioSink->flush();
+        mAudioSink->close();
+    }
+
+    // Try to avoid racing condition in case callback is still on.
+    Mutex::Autolock autoLock(mLock);
+    if (mUseAudioCallback) {
+        flushQueue(&mAudioQueue);
+        flushQueue(&mVideoQueue);
+    }
+    mWakeLock.clear();
+    mVideoScheduler.clear();
+    mNotify.clear();
+    mAudioSink.clear();
+}
+
+void NuPlayer2::Renderer::queueBuffer(
+        bool audio,
+        const sp<MediaCodecBuffer> &buffer,
+        const sp<AMessage> &notifyConsumed) {
+    sp<AMessage> msg = new AMessage(kWhatQueueBuffer, this);
+    msg->setInt32("queueGeneration", getQueueGeneration(audio));
+    msg->setInt32("audio", static_cast<int32_t>(audio));
+    msg->setObject("buffer", buffer);
+    msg->setMessage("notifyConsumed", notifyConsumed);
+    msg->post();
+}
+
+void NuPlayer2::Renderer::queueEOS(bool audio, status_t finalResult) {
+    CHECK_NE(finalResult, (status_t)OK);
+
+    sp<AMessage> msg = new AMessage(kWhatQueueEOS, this);
+    msg->setInt32("queueGeneration", getQueueGeneration(audio));
+    msg->setInt32("audio", static_cast<int32_t>(audio));
+    msg->setInt32("finalResult", finalResult);
+    msg->post();
+}
+
+status_t NuPlayer2::Renderer::setPlaybackSettings(const AudioPlaybackRate &rate) {
+    sp<AMessage> msg = new AMessage(kWhatConfigPlayback, this);
+    writeToAMessage(msg, rate);
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+    if (err == OK && response != NULL) {
+        CHECK(response->findInt32("err", &err));
+    }
+    return err;
+}
+
+status_t NuPlayer2::Renderer::onConfigPlayback(const AudioPlaybackRate &rate /* sanitized */) {
+    if (rate.mSpeed == 0.f) {
+        onPause();
+        // don't call audiosink's setPlaybackRate if pausing, as pitch does not
+        // have to correspond to the any non-0 speed (e.g old speed). Keep
+        // settings nonetheless, using the old speed, in case audiosink changes.
+        AudioPlaybackRate newRate = rate;
+        newRate.mSpeed = mPlaybackSettings.mSpeed;
+        mPlaybackSettings = newRate;
+        return OK;
+    }
+
+    if (mAudioSink != NULL && mAudioSink->ready()) {
+        status_t err = mAudioSink->setPlaybackRate(rate);
+        if (err != OK) {
+            return err;
+        }
+    }
+    mPlaybackSettings = rate;
+    mPlaybackRate = rate.mSpeed;
+    mMediaClock->setPlaybackRate(mPlaybackRate);
+    return OK;
+}
+
+status_t NuPlayer2::Renderer::getPlaybackSettings(AudioPlaybackRate *rate /* nonnull */) {
+    sp<AMessage> msg = new AMessage(kWhatGetPlaybackSettings, this);
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+    if (err == OK && response != NULL) {
+        CHECK(response->findInt32("err", &err));
+        if (err == OK) {
+            readFromAMessage(response, rate);
+        }
+    }
+    return err;
+}
+
+status_t NuPlayer2::Renderer::onGetPlaybackSettings(AudioPlaybackRate *rate /* nonnull */) {
+    if (mAudioSink != NULL && mAudioSink->ready()) {
+        status_t err = mAudioSink->getPlaybackRate(rate);
+        if (err == OK) {
+            if (!isAudioPlaybackRateEqual(*rate, mPlaybackSettings)) {
+                ALOGW("correcting mismatch in internal/external playback rate");
+            }
+            // get playback settings used by audiosink, as it may be
+            // slightly off due to audiosink not taking small changes.
+            mPlaybackSettings = *rate;
+            if (mPaused) {
+                rate->mSpeed = 0.f;
+            }
+        }
+        return err;
+    }
+    *rate = mPlaybackSettings;
+    return OK;
+}
+
+status_t NuPlayer2::Renderer::setSyncSettings(const AVSyncSettings &sync, float videoFpsHint) {
+    sp<AMessage> msg = new AMessage(kWhatConfigSync, this);
+    writeToAMessage(msg, sync, videoFpsHint);
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+    if (err == OK && response != NULL) {
+        CHECK(response->findInt32("err", &err));
+    }
+    return err;
+}
+
+status_t NuPlayer2::Renderer::onConfigSync(const AVSyncSettings &sync, float videoFpsHint __unused) {
+    if (sync.mSource != AVSYNC_SOURCE_DEFAULT) {
+        return BAD_VALUE;
+    }
+    // TODO: support sync sources
+    return INVALID_OPERATION;
+}
+
+status_t NuPlayer2::Renderer::getSyncSettings(AVSyncSettings *sync, float *videoFps) {
+    sp<AMessage> msg = new AMessage(kWhatGetSyncSettings, this);
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+    if (err == OK && response != NULL) {
+        CHECK(response->findInt32("err", &err));
+        if (err == OK) {
+            readFromAMessage(response, sync, videoFps);
+        }
+    }
+    return err;
+}
+
+status_t NuPlayer2::Renderer::onGetSyncSettings(
+        AVSyncSettings *sync /* nonnull */, float *videoFps /* nonnull */) {
+    *sync = mSyncSettings;
+    *videoFps = -1.f;
+    return OK;
+}
+
+void NuPlayer2::Renderer::flush(bool audio, bool notifyComplete) {
+    {
+        Mutex::Autolock autoLock(mLock);
+        if (audio) {
+            mNotifyCompleteAudio |= notifyComplete;
+            clearAudioFirstAnchorTime_l();
+            ++mAudioQueueGeneration;
+            ++mAudioDrainGeneration;
+        } else {
+            mNotifyCompleteVideo |= notifyComplete;
+            ++mVideoQueueGeneration;
+            ++mVideoDrainGeneration;
+        }
+
+        mMediaClock->clearAnchor();
+        mVideoLateByUs = 0;
+        mNextVideoTimeMediaUs = -1;
+        mSyncQueues = false;
+    }
+
+    sp<AMessage> msg = new AMessage(kWhatFlush, this);
+    msg->setInt32("audio", static_cast<int32_t>(audio));
+    msg->post();
+}
+
+void NuPlayer2::Renderer::signalTimeDiscontinuity() {
+}
+
+void NuPlayer2::Renderer::signalDisableOffloadAudio() {
+    (new AMessage(kWhatDisableOffloadAudio, this))->post();
+}
+
+void NuPlayer2::Renderer::signalEnableOffloadAudio() {
+    (new AMessage(kWhatEnableOffloadAudio, this))->post();
+}
+
+void NuPlayer2::Renderer::pause() {
+    (new AMessage(kWhatPause, this))->post();
+}
+
+void NuPlayer2::Renderer::resume() {
+    (new AMessage(kWhatResume, this))->post();
+}
+
+void NuPlayer2::Renderer::setVideoFrameRate(float fps) {
+    sp<AMessage> msg = new AMessage(kWhatSetVideoFrameRate, this);
+    msg->setFloat("frame-rate", fps);
+    msg->post();
+}
+
+// Called on any threads without mLock acquired.
+status_t NuPlayer2::Renderer::getCurrentPosition(int64_t *mediaUs) {
+    status_t result = mMediaClock->getMediaTime(ALooper::GetNowUs(), mediaUs);
+    if (result == OK) {
+        return result;
+    }
+
+    // MediaClock has not started yet. Try to start it if possible.
+    {
+        Mutex::Autolock autoLock(mLock);
+        if (mAudioFirstAnchorTimeMediaUs == -1) {
+            return result;
+        }
+
+        AudioTimestamp ts;
+        status_t res = mAudioSink->getTimestamp(ts);
+        if (res != OK) {
+            return result;
+        }
+
+        // AudioSink has rendered some frames.
+        int64_t nowUs = ALooper::GetNowUs();
+        int64_t nowMediaUs = mAudioSink->getPlayedOutDurationUs(nowUs)
+                + mAudioFirstAnchorTimeMediaUs;
+        mMediaClock->updateAnchor(nowMediaUs, nowUs, -1);
+    }
+
+    return mMediaClock->getMediaTime(ALooper::GetNowUs(), mediaUs);
+}
+
+void NuPlayer2::Renderer::clearAudioFirstAnchorTime_l() {
+    mAudioFirstAnchorTimeMediaUs = -1;
+    mMediaClock->setStartingTimeMedia(-1);
+}
+
+void NuPlayer2::Renderer::setAudioFirstAnchorTimeIfNeeded_l(int64_t mediaUs) {
+    if (mAudioFirstAnchorTimeMediaUs == -1) {
+        mAudioFirstAnchorTimeMediaUs = mediaUs;
+        mMediaClock->setStartingTimeMedia(mediaUs);
+    }
+}
+
+// Called on renderer looper.
+void NuPlayer2::Renderer::clearAnchorTime() {
+    mMediaClock->clearAnchor();
+    mAnchorTimeMediaUs = -1;
+    mAnchorNumFramesWritten = -1;
+}
+
+void NuPlayer2::Renderer::setVideoLateByUs(int64_t lateUs) {
+    Mutex::Autolock autoLock(mLock);
+    mVideoLateByUs = lateUs;
+}
+
+int64_t NuPlayer2::Renderer::getVideoLateByUs() {
+    Mutex::Autolock autoLock(mLock);
+    return mVideoLateByUs;
+}
+
+status_t NuPlayer2::Renderer::openAudioSink(
+        const sp<AMessage> &format,
+        bool offloadOnly,
+        bool hasVideo,
+        uint32_t flags,
+        bool *isOffloaded,
+        bool isStreaming) {
+    sp<AMessage> msg = new AMessage(kWhatOpenAudioSink, this);
+    msg->setMessage("format", format);
+    msg->setInt32("offload-only", offloadOnly);
+    msg->setInt32("has-video", hasVideo);
+    msg->setInt32("flags", flags);
+    msg->setInt32("isStreaming", isStreaming);
+
+    sp<AMessage> response;
+    status_t postStatus = msg->postAndAwaitResponse(&response);
+
+    int32_t err;
+    if (postStatus != OK || response.get() == nullptr || !response->findInt32("err", &err)) {
+        err = INVALID_OPERATION;
+    } else if (err == OK && isOffloaded != NULL) {
+        int32_t offload;
+        CHECK(response->findInt32("offload", &offload));
+        *isOffloaded = (offload != 0);
+    }
+    return err;
+}
+
+void NuPlayer2::Renderer::closeAudioSink() {
+    sp<AMessage> msg = new AMessage(kWhatCloseAudioSink, this);
+
+    sp<AMessage> response;
+    msg->postAndAwaitResponse(&response);
+}
+
+void NuPlayer2::Renderer::changeAudioFormat(
+        const sp<AMessage> &format,
+        bool offloadOnly,
+        bool hasVideo,
+        uint32_t flags,
+        bool isStreaming,
+        const sp<AMessage> &notify) {
+    sp<AMessage> meta = new AMessage;
+    meta->setMessage("format", format);
+    meta->setInt32("offload-only", offloadOnly);
+    meta->setInt32("has-video", hasVideo);
+    meta->setInt32("flags", flags);
+    meta->setInt32("isStreaming", isStreaming);
+
+    sp<AMessage> msg = new AMessage(kWhatChangeAudioFormat, this);
+    msg->setInt32("queueGeneration", getQueueGeneration(true /* audio */));
+    msg->setMessage("notify", notify);
+    msg->setMessage("meta", meta);
+    msg->post();
+}
+
+void NuPlayer2::Renderer::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatOpenAudioSink:
+        {
+            sp<AMessage> format;
+            CHECK(msg->findMessage("format", &format));
+
+            int32_t offloadOnly;
+            CHECK(msg->findInt32("offload-only", &offloadOnly));
+
+            int32_t hasVideo;
+            CHECK(msg->findInt32("has-video", &hasVideo));
+
+            uint32_t flags;
+            CHECK(msg->findInt32("flags", (int32_t *)&flags));
+
+            uint32_t isStreaming;
+            CHECK(msg->findInt32("isStreaming", (int32_t *)&isStreaming));
+
+            status_t err = onOpenAudioSink(format, offloadOnly, hasVideo, flags, isStreaming);
+
+            sp<AMessage> response = new AMessage;
+            response->setInt32("err", err);
+            response->setInt32("offload", offloadingAudio());
+
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+            response->postReply(replyID);
+
+            break;
+        }
+
+        case kWhatCloseAudioSink:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            onCloseAudioSink();
+
+            sp<AMessage> response = new AMessage;
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatStopAudioSink:
+        {
+            mAudioSink->stop();
+            break;
+        }
+
+        case kWhatChangeAudioFormat:
+        {
+            int32_t queueGeneration;
+            CHECK(msg->findInt32("queueGeneration", &queueGeneration));
+
+            sp<AMessage> notify;
+            CHECK(msg->findMessage("notify", &notify));
+
+            if (offloadingAudio()) {
+                ALOGW("changeAudioFormat should NOT be called in offload mode");
+                notify->setInt32("err", INVALID_OPERATION);
+                notify->post();
+                break;
+            }
+
+            sp<AMessage> meta;
+            CHECK(msg->findMessage("meta", &meta));
+
+            if (queueGeneration != getQueueGeneration(true /* audio */)
+                    || mAudioQueue.empty()) {
+                onChangeAudioFormat(meta, notify);
+                break;
+            }
+
+            QueueEntry entry;
+            entry.mNotifyConsumed = notify;
+            entry.mMeta = meta;
+
+            Mutex::Autolock autoLock(mLock);
+            mAudioQueue.push_back(entry);
+            postDrainAudioQueue_l();
+
+            break;
+        }
+
+        case kWhatDrainAudioQueue:
+        {
+            mDrainAudioQueuePending = false;
+
+            int32_t generation;
+            CHECK(msg->findInt32("drainGeneration", &generation));
+            if (generation != getDrainGeneration(true /* audio */)) {
+                break;
+            }
+
+            if (onDrainAudioQueue()) {
+                uint32_t numFramesPlayed;
+                CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed),
+                         (status_t)OK);
+
+                // Handle AudioTrack race when start is immediately called after flush.
+                uint32_t numFramesPendingPlayout =
+                    (mNumFramesWritten > numFramesPlayed ?
+                        mNumFramesWritten - numFramesPlayed : 0);
+
+                // This is how long the audio sink will have data to
+                // play back.
+                int64_t delayUs =
+                    mAudioSink->msecsPerFrame()
+                        * numFramesPendingPlayout * 1000ll;
+                if (mPlaybackRate > 1.0f) {
+                    delayUs /= mPlaybackRate;
+                }
+
+                // Let's give it more data after about half that time
+                // has elapsed.
+                delayUs /= 2;
+                // check the buffer size to estimate maximum delay permitted.
+                const int64_t maxDrainDelayUs = std::max(
+                        mAudioSink->getBufferDurationInUs(), (int64_t)500000 /* half second */);
+                ALOGD_IF(delayUs > maxDrainDelayUs, "postDrainAudioQueue long delay: %lld > %lld",
+                        (long long)delayUs, (long long)maxDrainDelayUs);
+                Mutex::Autolock autoLock(mLock);
+                postDrainAudioQueue_l(delayUs);
+            }
+            break;
+        }
+
+        case kWhatDrainVideoQueue:
+        {
+            int32_t generation;
+            CHECK(msg->findInt32("drainGeneration", &generation));
+            if (generation != getDrainGeneration(false /* audio */)) {
+                break;
+            }
+
+            mDrainVideoQueuePending = false;
+
+            onDrainVideoQueue();
+
+            postDrainVideoQueue();
+            break;
+        }
+
+        case kWhatPostDrainVideoQueue:
+        {
+            int32_t generation;
+            CHECK(msg->findInt32("drainGeneration", &generation));
+            if (generation != getDrainGeneration(false /* audio */)) {
+                break;
+            }
+
+            mDrainVideoQueuePending = false;
+            postDrainVideoQueue();
+            break;
+        }
+
+        case kWhatQueueBuffer:
+        {
+            onQueueBuffer(msg);
+            break;
+        }
+
+        case kWhatQueueEOS:
+        {
+            onQueueEOS(msg);
+            break;
+        }
+
+        case kWhatEOS:
+        {
+            int32_t generation;
+            CHECK(msg->findInt32("audioEOSGeneration", &generation));
+            if (generation != mAudioEOSGeneration) {
+                break;
+            }
+            status_t finalResult;
+            CHECK(msg->findInt32("finalResult", &finalResult));
+            notifyEOS(true /* audio */, finalResult);
+            break;
+        }
+
+        case kWhatConfigPlayback:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+            AudioPlaybackRate rate;
+            readFromAMessage(msg, &rate);
+            status_t err = onConfigPlayback(rate);
+            sp<AMessage> response = new AMessage;
+            response->setInt32("err", err);
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatGetPlaybackSettings:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+            AudioPlaybackRate rate = AUDIO_PLAYBACK_RATE_DEFAULT;
+            status_t err = onGetPlaybackSettings(&rate);
+            sp<AMessage> response = new AMessage;
+            if (err == OK) {
+                writeToAMessage(response, rate);
+            }
+            response->setInt32("err", err);
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatConfigSync:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+            AVSyncSettings sync;
+            float videoFpsHint;
+            readFromAMessage(msg, &sync, &videoFpsHint);
+            status_t err = onConfigSync(sync, videoFpsHint);
+            sp<AMessage> response = new AMessage;
+            response->setInt32("err", err);
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatGetSyncSettings:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            ALOGV("kWhatGetSyncSettings");
+            AVSyncSettings sync;
+            float videoFps = -1.f;
+            status_t err = onGetSyncSettings(&sync, &videoFps);
+            sp<AMessage> response = new AMessage;
+            if (err == OK) {
+                writeToAMessage(response, sync, videoFps);
+            }
+            response->setInt32("err", err);
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatFlush:
+        {
+            onFlush(msg);
+            break;
+        }
+
+        case kWhatDisableOffloadAudio:
+        {
+            onDisableOffloadAudio();
+            break;
+        }
+
+        case kWhatEnableOffloadAudio:
+        {
+            onEnableOffloadAudio();
+            break;
+        }
+
+        case kWhatPause:
+        {
+            onPause();
+            break;
+        }
+
+        case kWhatResume:
+        {
+            onResume();
+            break;
+        }
+
+        case kWhatSetVideoFrameRate:
+        {
+            float fps;
+            CHECK(msg->findFloat("frame-rate", &fps));
+            onSetVideoFrameRate(fps);
+            break;
+        }
+
+        case kWhatAudioTearDown:
+        {
+            int32_t reason;
+            CHECK(msg->findInt32("reason", &reason));
+
+            onAudioTearDown((AudioTearDownReason)reason);
+            break;
+        }
+
+        case kWhatAudioOffloadPauseTimeout:
+        {
+            int32_t generation;
+            CHECK(msg->findInt32("drainGeneration", &generation));
+            if (generation != mAudioOffloadPauseTimeoutGeneration) {
+                break;
+            }
+            ALOGV("Audio Offload tear down due to pause timeout.");
+            onAudioTearDown(kDueToTimeout);
+            mWakeLock->release();
+            break;
+        }
+
+        default:
+            TRESPASS();
+            break;
+    }
+}
+
+void NuPlayer2::Renderer::postDrainAudioQueue_l(int64_t delayUs) {
+    if (mDrainAudioQueuePending || mSyncQueues || mUseAudioCallback) {
+        return;
+    }
+
+    if (mAudioQueue.empty()) {
+        return;
+    }
+
+    // FIXME: if paused, wait until AudioTrack stop() is complete before delivering data.
+    if (mPaused) {
+        const int64_t diffUs = mPauseDrainAudioAllowedUs - ALooper::GetNowUs();
+        if (diffUs > delayUs) {
+            delayUs = diffUs;
+        }
+    }
+
+    mDrainAudioQueuePending = true;
+    sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, this);
+    msg->setInt32("drainGeneration", mAudioDrainGeneration);
+    msg->post(delayUs);
+}
+
+void NuPlayer2::Renderer::prepareForMediaRenderingStart_l() {
+    mAudioRenderingStartGeneration = mAudioDrainGeneration;
+    mVideoRenderingStartGeneration = mVideoDrainGeneration;
+    mRenderingDataDelivered = false;
+}
+
+void NuPlayer2::Renderer::notifyIfMediaRenderingStarted_l() {
+    if (mVideoRenderingStartGeneration == mVideoDrainGeneration &&
+        mAudioRenderingStartGeneration == mAudioDrainGeneration) {
+        mRenderingDataDelivered = true;
+        if (mPaused) {
+            return;
+        }
+        mVideoRenderingStartGeneration = -1;
+        mAudioRenderingStartGeneration = -1;
+
+        sp<AMessage> notify = mNotify->dup();
+        notify->setInt32("what", kWhatMediaRenderingStart);
+        notify->post();
+    }
+}
+
+// static
+size_t NuPlayer2::Renderer::AudioSinkCallback(
+        MediaPlayer2Base::AudioSink * /* audioSink */,
+        void *buffer,
+        size_t size,
+        void *cookie,
+        MediaPlayer2Base::AudioSink::cb_event_t event) {
+    NuPlayer2::Renderer *me = (NuPlayer2::Renderer *)cookie;
+
+    switch (event) {
+        case MediaPlayer2Base::AudioSink::CB_EVENT_FILL_BUFFER:
+        {
+            return me->fillAudioBuffer(buffer, size);
+            break;
+        }
+
+        case MediaPlayer2Base::AudioSink::CB_EVENT_STREAM_END:
+        {
+            ALOGV("AudioSink::CB_EVENT_STREAM_END");
+            me->notifyEOSCallback();
+            break;
+        }
+
+        case MediaPlayer2Base::AudioSink::CB_EVENT_TEAR_DOWN:
+        {
+            ALOGV("AudioSink::CB_EVENT_TEAR_DOWN");
+            me->notifyAudioTearDown(kDueToError);
+            break;
+        }
+    }
+
+    return 0;
+}
+
+void NuPlayer2::Renderer::notifyEOSCallback() {
+    Mutex::Autolock autoLock(mLock);
+
+    if (!mUseAudioCallback) {
+        return;
+    }
+
+    notifyEOS_l(true /* audio */, ERROR_END_OF_STREAM);
+}
+
+size_t NuPlayer2::Renderer::fillAudioBuffer(void *buffer, size_t size) {
+    Mutex::Autolock autoLock(mLock);
+
+    if (!mUseAudioCallback) {
+        return 0;
+    }
+
+    bool hasEOS = false;
+
+    size_t sizeCopied = 0;
+    bool firstEntry = true;
+    QueueEntry *entry;  // will be valid after while loop if hasEOS is set.
+    while (sizeCopied < size && !mAudioQueue.empty()) {
+        entry = &*mAudioQueue.begin();
+
+        if (entry->mBuffer == NULL) { // EOS
+            hasEOS = true;
+            mAudioQueue.erase(mAudioQueue.begin());
+            break;
+        }
+
+        if (firstEntry && entry->mOffset == 0) {
+            firstEntry = false;
+            int64_t mediaTimeUs;
+            CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
+            ALOGV("fillAudioBuffer: rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
+            setAudioFirstAnchorTimeIfNeeded_l(mediaTimeUs);
+        }
+
+        size_t copy = entry->mBuffer->size() - entry->mOffset;
+        size_t sizeRemaining = size - sizeCopied;
+        if (copy > sizeRemaining) {
+            copy = sizeRemaining;
+        }
+
+        memcpy((char *)buffer + sizeCopied,
+               entry->mBuffer->data() + entry->mOffset,
+               copy);
+
+        entry->mOffset += copy;
+        if (entry->mOffset == entry->mBuffer->size()) {
+            entry->mNotifyConsumed->post();
+            mAudioQueue.erase(mAudioQueue.begin());
+            entry = NULL;
+        }
+        sizeCopied += copy;
+
+        notifyIfMediaRenderingStarted_l();
+    }
+
+    if (mAudioFirstAnchorTimeMediaUs >= 0) {
+        int64_t nowUs = ALooper::GetNowUs();
+        int64_t nowMediaUs =
+            mAudioFirstAnchorTimeMediaUs + mAudioSink->getPlayedOutDurationUs(nowUs);
+        // we don't know how much data we are queueing for offloaded tracks.
+        mMediaClock->updateAnchor(nowMediaUs, nowUs, INT64_MAX);
+    }
+
+    // for non-offloaded audio, we need to compute the frames written because
+    // there is no EVENT_STREAM_END notification. The frames written gives
+    // an estimate on the pending played out duration.
+    if (!offloadingAudio()) {
+        mNumFramesWritten += sizeCopied / mAudioSink->frameSize();
+    }
+
+    if (hasEOS) {
+        (new AMessage(kWhatStopAudioSink, this))->post();
+        // As there is currently no EVENT_STREAM_END callback notification for
+        // non-offloaded audio tracks, we need to post the EOS ourselves.
+        if (!offloadingAudio()) {
+            int64_t postEOSDelayUs = 0;
+            if (mAudioSink->needsTrailingPadding()) {
+                postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs());
+            }
+            ALOGV("fillAudioBuffer: notifyEOS_l "
+                    "mNumFramesWritten:%u  finalResult:%d  postEOSDelay:%lld",
+                    mNumFramesWritten, entry->mFinalResult, (long long)postEOSDelayUs);
+            notifyEOS_l(true /* audio */, entry->mFinalResult, postEOSDelayUs);
+        }
+    }
+    return sizeCopied;
+}
+
+void NuPlayer2::Renderer::drainAudioQueueUntilLastEOS() {
+    List<QueueEntry>::iterator it = mAudioQueue.begin(), itEOS = it;
+    bool foundEOS = false;
+    while (it != mAudioQueue.end()) {
+        int32_t eos;
+        QueueEntry *entry = &*it++;
+        if ((entry->mBuffer == nullptr && entry->mNotifyConsumed == nullptr)
+                || (entry->mNotifyConsumed->findInt32("eos", &eos) && eos != 0)) {
+            itEOS = it;
+            foundEOS = true;
+        }
+    }
+
+    if (foundEOS) {
+        // post all replies before EOS and drop the samples
+        for (it = mAudioQueue.begin(); it != itEOS; it++) {
+            if (it->mBuffer == nullptr) {
+                if (it->mNotifyConsumed == nullptr) {
+                    // delay doesn't matter as we don't even have an AudioTrack
+                    notifyEOS(true /* audio */, it->mFinalResult);
+                } else {
+                    // TAG for re-opening audio sink.
+                    onChangeAudioFormat(it->mMeta, it->mNotifyConsumed);
+                }
+            } else {
+                it->mNotifyConsumed->post();
+            }
+        }
+        mAudioQueue.erase(mAudioQueue.begin(), itEOS);
+    }
+}
+
+bool NuPlayer2::Renderer::onDrainAudioQueue() {
+    // do not drain audio during teardown as queued buffers may be invalid.
+    if (mAudioTornDown) {
+        return false;
+    }
+    // TODO: This call to getPosition checks if AudioTrack has been created
+    // in AudioSink before draining audio. If AudioTrack doesn't exist, then
+    // CHECKs on getPosition will fail.
+    // We still need to figure out why AudioTrack is not created when
+    // this function is called. One possible reason could be leftover
+    // audio. Another possible place is to check whether decoder
+    // has received INFO_FORMAT_CHANGED as the first buffer since
+    // AudioSink is opened there, and possible interactions with flush
+    // immediately after start. Investigate error message
+    // "vorbis_dsp_synthesis returned -135", along with RTSP.
+    uint32_t numFramesPlayed;
+    if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
+        // When getPosition fails, renderer will not reschedule the draining
+        // unless new samples are queued.
+        // If we have pending EOS (or "eos" marker for discontinuities), we need
+        // to post these now as NuPlayer2Decoder might be waiting for it.
+        drainAudioQueueUntilLastEOS();
+
+        ALOGW("onDrainAudioQueue(): audio sink is not ready");
+        return false;
+    }
+
+#if 0
+    ssize_t numFramesAvailableToWrite =
+        mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
+
+    if (numFramesAvailableToWrite == mAudioSink->frameCount()) {
+        ALOGI("audio sink underrun");
+    } else {
+        ALOGV("audio queue has %d frames left to play",
+             mAudioSink->frameCount() - numFramesAvailableToWrite);
+    }
+#endif
+
+    uint32_t prevFramesWritten = mNumFramesWritten;
+    while (!mAudioQueue.empty()) {
+        QueueEntry *entry = &*mAudioQueue.begin();
+
+        if (entry->mBuffer == NULL) {
+            if (entry->mNotifyConsumed != nullptr) {
+                // TAG for re-open audio sink.
+                onChangeAudioFormat(entry->mMeta, entry->mNotifyConsumed);
+                mAudioQueue.erase(mAudioQueue.begin());
+                continue;
+            }
+
+            // EOS
+            if (mPaused) {
+                // Do not notify EOS when paused.
+                // This is needed to avoid switch to next clip while in pause.
+                ALOGV("onDrainAudioQueue(): Do not notify EOS when paused");
+                return false;
+            }
+
+            int64_t postEOSDelayUs = 0;
+            if (mAudioSink->needsTrailingPadding()) {
+                postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs());
+            }
+            notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs);
+            mLastAudioMediaTimeUs = getDurationUsIfPlayedAtSampleRate(mNumFramesWritten);
+
+            mAudioQueue.erase(mAudioQueue.begin());
+            entry = NULL;
+            if (mAudioSink->needsTrailingPadding()) {
+                // If we're not in gapless playback (i.e. through setNextPlayer), we
+                // need to stop the track here, because that will play out the last
+                // little bit at the end of the file. Otherwise short files won't play.
+                mAudioSink->stop();
+                mNumFramesWritten = 0;
+            }
+            return false;
+        }
+
+        mLastAudioBufferDrained = entry->mBufferOrdinal;
+
+        // ignore 0-sized buffer which could be EOS marker with no data
+        if (entry->mOffset == 0 && entry->mBuffer->size() > 0) {
+            int64_t mediaTimeUs;
+            CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
+            ALOGV("onDrainAudioQueue: rendering audio at media time %.2f secs",
+                    mediaTimeUs / 1E6);
+            onNewAudioMediaTime(mediaTimeUs);
+        }
+
+        size_t copy = entry->mBuffer->size() - entry->mOffset;
+
+        ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset,
+                                            copy, false /* blocking */);
+        if (written < 0) {
+            // An error in AudioSink write. Perhaps the AudioSink was not properly opened.
+            if (written == WOULD_BLOCK) {
+                ALOGV("AudioSink write would block when writing %zu bytes", copy);
+            } else {
+                ALOGE("AudioSink write error(%zd) when writing %zu bytes", written, copy);
+                // This can only happen when AudioSink was opened with doNotReconnect flag set to
+                // true, in which case the NuPlayer2 will handle the reconnect.
+                notifyAudioTearDown(kDueToError);
+            }
+            break;
+        }
+
+        entry->mOffset += written;
+        size_t remainder = entry->mBuffer->size() - entry->mOffset;
+        if ((ssize_t)remainder < mAudioSink->frameSize()) {
+            if (remainder > 0) {
+                ALOGW("Corrupted audio buffer has fractional frames, discarding %zu bytes.",
+                        remainder);
+                entry->mOffset += remainder;
+                copy -= remainder;
+            }
+
+            entry->mNotifyConsumed->post();
+            mAudioQueue.erase(mAudioQueue.begin());
+
+            entry = NULL;
+        }
+
+        size_t copiedFrames = written / mAudioSink->frameSize();
+        mNumFramesWritten += copiedFrames;
+
+        {
+            Mutex::Autolock autoLock(mLock);
+            int64_t maxTimeMedia;
+            maxTimeMedia =
+                mAnchorTimeMediaUs +
+                        (int64_t)(max((long long)mNumFramesWritten - mAnchorNumFramesWritten, 0LL)
+                                * 1000LL * mAudioSink->msecsPerFrame());
+            mMediaClock->updateMaxTimeMedia(maxTimeMedia);
+
+            notifyIfMediaRenderingStarted_l();
+        }
+
+        if (written != (ssize_t)copy) {
+            // A short count was received from AudioSink::write()
+            //
+            // AudioSink write is called in non-blocking mode.
+            // It may return with a short count when:
+            //
+            // 1) Size to be copied is not a multiple of the frame size. Fractional frames are
+            //    discarded.
+            // 2) The data to be copied exceeds the available buffer in AudioSink.
+            // 3) An error occurs and data has been partially copied to the buffer in AudioSink.
+            // 4) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded.
+
+            // (Case 1)
+            // Must be a multiple of the frame size.  If it is not a multiple of a frame size, it
+            // needs to fail, as we should not carry over fractional frames between calls.
+            CHECK_EQ(copy % mAudioSink->frameSize(), 0u);
+
+            // (Case 2, 3, 4)
+            // Return early to the caller.
+            // Beware of calling immediately again as this may busy-loop if you are not careful.
+            ALOGV("AudioSink write short frame count %zd < %zu", written, copy);
+            break;
+        }
+    }
+
+    // calculate whether we need to reschedule another write.
+    bool reschedule = !mAudioQueue.empty()
+            && (!mPaused
+                || prevFramesWritten != mNumFramesWritten); // permit pause to fill buffers
+    //ALOGD("reschedule:%d  empty:%d  mPaused:%d  prevFramesWritten:%u  mNumFramesWritten:%u",
+    //        reschedule, mAudioQueue.empty(), mPaused, prevFramesWritten, mNumFramesWritten);
+    return reschedule;
+}
+
+int64_t NuPlayer2::Renderer::getDurationUsIfPlayedAtSampleRate(uint32_t numFrames) {
+    int32_t sampleRate = offloadingAudio() ?
+            mCurrentOffloadInfo.sample_rate : mCurrentPcmInfo.mSampleRate;
+    if (sampleRate == 0) {
+        ALOGE("sampleRate is 0 in %s mode", offloadingAudio() ? "offload" : "non-offload");
+        return 0;
+    }
+    // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours.
+    return (int64_t)((int32_t)numFrames * 1000000LL / sampleRate);
+}
+
+// Calculate duration of pending samples if played at normal rate (i.e., 1.0).
+int64_t NuPlayer2::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) {
+    int64_t writtenAudioDurationUs = getDurationUsIfPlayedAtSampleRate(mNumFramesWritten);
+    if (mUseVirtualAudioSink) {
+        int64_t nowUs = ALooper::GetNowUs();
+        int64_t mediaUs;
+        if (mMediaClock->getMediaTime(nowUs, &mediaUs) != OK) {
+            return 0ll;
+        } else {
+            return writtenAudioDurationUs - (mediaUs - mAudioFirstAnchorTimeMediaUs);
+        }
+    }
+
+    const int64_t audioSinkPlayedUs = mAudioSink->getPlayedOutDurationUs(nowUs);
+    int64_t pendingUs = writtenAudioDurationUs - audioSinkPlayedUs;
+    if (pendingUs < 0) {
+        // This shouldn't happen unless the timestamp is stale.
+        ALOGW("%s: pendingUs %lld < 0, clamping to zero, potential resume after pause "
+                "writtenAudioDurationUs: %lld, audioSinkPlayedUs: %lld",
+                __func__, (long long)pendingUs,
+                (long long)writtenAudioDurationUs, (long long)audioSinkPlayedUs);
+        pendingUs = 0;
+    }
+    return pendingUs;
+}
+
+int64_t NuPlayer2::Renderer::getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs) {
+    int64_t realUs;
+    if (mMediaClock->getRealTimeFor(mediaTimeUs, &realUs) != OK) {
+        // If failed to get current position, e.g. due to audio clock is
+        // not ready, then just play out video immediately without delay.
+        return nowUs;
+    }
+    return realUs;
+}
+
+void NuPlayer2::Renderer::onNewAudioMediaTime(int64_t mediaTimeUs) {
+    Mutex::Autolock autoLock(mLock);
+    // TRICKY: vorbis decoder generates multiple frames with the same
+    // timestamp, so only update on the first frame with a given timestamp
+    if (mediaTimeUs == mAnchorTimeMediaUs) {
+        return;
+    }
+    setAudioFirstAnchorTimeIfNeeded_l(mediaTimeUs);
+
+    // mNextAudioClockUpdateTimeUs is -1 if we're waiting for audio sink to start
+    if (mNextAudioClockUpdateTimeUs == -1) {
+        AudioTimestamp ts;
+        if (mAudioSink->getTimestamp(ts) == OK && ts.mPosition > 0) {
+            mNextAudioClockUpdateTimeUs = 0; // start our clock updates
+        }
+    }
+    int64_t nowUs = ALooper::GetNowUs();
+    if (mNextAudioClockUpdateTimeUs >= 0) {
+        if (nowUs >= mNextAudioClockUpdateTimeUs) {
+            int64_t nowMediaUs = mediaTimeUs - getPendingAudioPlayoutDurationUs(nowUs);
+            mMediaClock->updateAnchor(nowMediaUs, nowUs, mediaTimeUs);
+            mUseVirtualAudioSink = false;
+            mNextAudioClockUpdateTimeUs = nowUs + kMinimumAudioClockUpdatePeriodUs;
+        }
+    } else {
+        int64_t unused;
+        if ((mMediaClock->getMediaTime(nowUs, &unused) != OK)
+                && (getDurationUsIfPlayedAtSampleRate(mNumFramesWritten)
+                        > kMaxAllowedAudioSinkDelayUs)) {
+            // Enough data has been sent to AudioSink, but AudioSink has not rendered
+            // any data yet. Something is wrong with AudioSink, e.g., the device is not
+            // connected to audio out.
+            // Switch to system clock. This essentially creates a virtual AudioSink with
+            // initial latenty of getDurationUsIfPlayedAtSampleRate(mNumFramesWritten).
+            // This virtual AudioSink renders audio data starting from the very first sample
+            // and it's paced by system clock.
+            ALOGW("AudioSink stuck. ARE YOU CONNECTED TO AUDIO OUT? Switching to system clock.");
+            mMediaClock->updateAnchor(mAudioFirstAnchorTimeMediaUs, nowUs, mediaTimeUs);
+            mUseVirtualAudioSink = true;
+        }
+    }
+    mAnchorNumFramesWritten = mNumFramesWritten;
+    mAnchorTimeMediaUs = mediaTimeUs;
+}
+
+// Called without mLock acquired.
+void NuPlayer2::Renderer::postDrainVideoQueue() {
+    if (mDrainVideoQueuePending
+            || getSyncQueues()
+            || (mPaused && mVideoSampleReceived)) {
+        return;
+    }
+
+    if (mVideoQueue.empty()) {
+        return;
+    }
+
+    QueueEntry &entry = *mVideoQueue.begin();
+
+    sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, this);
+    msg->setInt32("drainGeneration", getDrainGeneration(false /* audio */));
+
+    if (entry.mBuffer == NULL) {
+        // EOS doesn't carry a timestamp.
+        msg->post();
+        mDrainVideoQueuePending = true;
+        return;
+    }
+
+    int64_t nowUs = ALooper::GetNowUs();
+    if (mFlags & FLAG_REAL_TIME) {
+        int64_t realTimeUs;
+        CHECK(entry.mBuffer->meta()->findInt64("timeUs", &realTimeUs));
+
+        realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000;
+
+        int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000);
+
+        int64_t delayUs = realTimeUs - nowUs;
+
+        ALOGW_IF(delayUs > 500000, "unusually high delayUs: %lld", (long long)delayUs);
+        // post 2 display refreshes before rendering is due
+        msg->post(delayUs > twoVsyncsUs ? delayUs - twoVsyncsUs : 0);
+
+        mDrainVideoQueuePending = true;
+        return;
+    }
+
+    int64_t mediaTimeUs;
+    CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
+
+    {
+        Mutex::Autolock autoLock(mLock);
+        if (mAnchorTimeMediaUs < 0) {
+            mMediaClock->updateAnchor(mediaTimeUs, nowUs, mediaTimeUs);
+            mAnchorTimeMediaUs = mediaTimeUs;
+        }
+    }
+    mNextVideoTimeMediaUs = mediaTimeUs + 100000;
+    if (!mHasAudio) {
+        // smooth out videos >= 10fps
+        mMediaClock->updateMaxTimeMedia(mNextVideoTimeMediaUs);
+    }
+
+    if (!mVideoSampleReceived || mediaTimeUs < mAudioFirstAnchorTimeMediaUs) {
+        msg->post();
+    } else {
+        int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000);
+
+        // post 2 display refreshes before rendering is due
+        mMediaClock->addTimer(msg, mediaTimeUs, -twoVsyncsUs);
+    }
+
+    mDrainVideoQueuePending = true;
+}
+
+void NuPlayer2::Renderer::onDrainVideoQueue() {
+    if (mVideoQueue.empty()) {
+        return;
+    }
+
+    QueueEntry *entry = &*mVideoQueue.begin();
+
+    if (entry->mBuffer == NULL) {
+        // EOS
+
+        notifyEOS(false /* audio */, entry->mFinalResult);
+
+        mVideoQueue.erase(mVideoQueue.begin());
+        entry = NULL;
+
+        setVideoLateByUs(0);
+        return;
+    }
+
+    int64_t nowUs = ALooper::GetNowUs();
+    int64_t realTimeUs;
+    int64_t mediaTimeUs = -1;
+    if (mFlags & FLAG_REAL_TIME) {
+        CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs));
+    } else {
+        CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
+
+        realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
+    }
+    realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000;
+
+    bool tooLate = false;
+
+    if (!mPaused) {
+        setVideoLateByUs(nowUs - realTimeUs);
+        tooLate = (mVideoLateByUs > 40000);
+
+        if (tooLate) {
+            ALOGV("video late by %lld us (%.2f secs)",
+                 (long long)mVideoLateByUs, mVideoLateByUs / 1E6);
+        } else {
+            int64_t mediaUs = 0;
+            mMediaClock->getMediaTime(realTimeUs, &mediaUs);
+            ALOGV("rendering video at media time %.2f secs",
+                    (mFlags & FLAG_REAL_TIME ? realTimeUs :
+                    mediaUs) / 1E6);
+
+            if (!(mFlags & FLAG_REAL_TIME)
+                    && mLastAudioMediaTimeUs != -1
+                    && mediaTimeUs > mLastAudioMediaTimeUs) {
+                // If audio ends before video, video continues to drive media clock.
+                // Also smooth out videos >= 10fps.
+                mMediaClock->updateMaxTimeMedia(mediaTimeUs + 100000);
+            }
+        }
+    } else {
+        setVideoLateByUs(0);
+        if (!mVideoSampleReceived && !mHasAudio) {
+            // This will ensure that the first frame after a flush won't be used as anchor
+            // when renderer is in paused state, because resume can happen any time after seek.
+            clearAnchorTime();
+        }
+    }
+
+    // Always render the first video frame while keeping stats on A/V sync.
+    if (!mVideoSampleReceived) {
+        realTimeUs = nowUs;
+        tooLate = false;
+    }
+
+    entry->mNotifyConsumed->setInt64("timestampNs", realTimeUs * 1000ll);
+    entry->mNotifyConsumed->setInt32("render", !tooLate);
+    entry->mNotifyConsumed->post();
+    mVideoQueue.erase(mVideoQueue.begin());
+    entry = NULL;
+
+    mVideoSampleReceived = true;
+
+    if (!mPaused) {
+        if (!mVideoRenderingStarted) {
+            mVideoRenderingStarted = true;
+            notifyVideoRenderingStart();
+        }
+        Mutex::Autolock autoLock(mLock);
+        notifyIfMediaRenderingStarted_l();
+    }
+}
+
+void NuPlayer2::Renderer::notifyVideoRenderingStart() {
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatVideoRenderingStart);
+    notify->post();
+}
+
+void NuPlayer2::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t delayUs) {
+    Mutex::Autolock autoLock(mLock);
+    notifyEOS_l(audio, finalResult, delayUs);
+}
+
+void NuPlayer2::Renderer::notifyEOS_l(bool audio, status_t finalResult, int64_t delayUs) {
+    if (audio && delayUs > 0) {
+        sp<AMessage> msg = new AMessage(kWhatEOS, this);
+        msg->setInt32("audioEOSGeneration", mAudioEOSGeneration);
+        msg->setInt32("finalResult", finalResult);
+        msg->post(delayUs);
+        return;
+    }
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatEOS);
+    notify->setInt32("audio", static_cast<int32_t>(audio));
+    notify->setInt32("finalResult", finalResult);
+    notify->post(delayUs);
+
+    if (audio) {
+        // Video might outlive audio. Clear anchor to enable video only case.
+        mAnchorTimeMediaUs = -1;
+        mHasAudio = false;
+        if (mNextVideoTimeMediaUs >= 0) {
+            int64_t mediaUs = 0;
+            mMediaClock->getMediaTime(ALooper::GetNowUs(), &mediaUs);
+            if (mNextVideoTimeMediaUs > mediaUs) {
+                mMediaClock->updateMaxTimeMedia(mNextVideoTimeMediaUs);
+            }
+        }
+    }
+}
+
+void NuPlayer2::Renderer::notifyAudioTearDown(AudioTearDownReason reason) {
+    sp<AMessage> msg = new AMessage(kWhatAudioTearDown, this);
+    msg->setInt32("reason", reason);
+    msg->post();
+}
+
+void NuPlayer2::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
+    int32_t audio;
+    CHECK(msg->findInt32("audio", &audio));
+
+    if (dropBufferIfStale(audio, msg)) {
+        return;
+    }
+
+    if (audio) {
+        mHasAudio = true;
+    } else {
+        mHasVideo = true;
+    }
+
+    if (mHasVideo) {
+        if (mVideoScheduler == NULL) {
+            mVideoScheduler = new VideoFrameScheduler();
+            mVideoScheduler->init();
+        }
+    }
+
+    sp<RefBase> obj;
+    CHECK(msg->findObject("buffer", &obj));
+    sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
+
+    sp<AMessage> notifyConsumed;
+    CHECK(msg->findMessage("notifyConsumed", &notifyConsumed));
+
+    QueueEntry entry;
+    entry.mBuffer = buffer;
+    entry.mNotifyConsumed = notifyConsumed;
+    entry.mOffset = 0;
+    entry.mFinalResult = OK;
+    entry.mBufferOrdinal = ++mTotalBuffersQueued;
+
+    if (audio) {
+        Mutex::Autolock autoLock(mLock);
+        mAudioQueue.push_back(entry);
+        postDrainAudioQueue_l();
+    } else {
+        mVideoQueue.push_back(entry);
+        postDrainVideoQueue();
+    }
+
+    Mutex::Autolock autoLock(mLock);
+    if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
+        return;
+    }
+
+    sp<MediaCodecBuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
+    sp<MediaCodecBuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;
+
+    if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
+        // EOS signalled on either queue.
+        syncQueuesDone_l();
+        return;
+    }
+
+    int64_t firstAudioTimeUs;
+    int64_t firstVideoTimeUs;
+    CHECK(firstAudioBuffer->meta()
+            ->findInt64("timeUs", &firstAudioTimeUs));
+    CHECK(firstVideoBuffer->meta()
+            ->findInt64("timeUs", &firstVideoTimeUs));
+
+    int64_t diff = firstVideoTimeUs - firstAudioTimeUs;
+
+    ALOGV("queueDiff = %.2f secs", diff / 1E6);
+
+    if (diff > 100000ll) {
+        // Audio data starts More than 0.1 secs before video.
+        // Drop some audio.
+
+        (*mAudioQueue.begin()).mNotifyConsumed->post();
+        mAudioQueue.erase(mAudioQueue.begin());
+        return;
+    }
+
+    syncQueuesDone_l();
+}
+
+void NuPlayer2::Renderer::syncQueuesDone_l() {
+    if (!mSyncQueues) {
+        return;
+    }
+
+    mSyncQueues = false;
+
+    if (!mAudioQueue.empty()) {
+        postDrainAudioQueue_l();
+    }
+
+    if (!mVideoQueue.empty()) {
+        mLock.unlock();
+        postDrainVideoQueue();
+        mLock.lock();
+    }
+}
+
+void NuPlayer2::Renderer::onQueueEOS(const sp<AMessage> &msg) {
+    int32_t audio;
+    CHECK(msg->findInt32("audio", &audio));
+
+    if (dropBufferIfStale(audio, msg)) {
+        return;
+    }
+
+    int32_t finalResult;
+    CHECK(msg->findInt32("finalResult", &finalResult));
+
+    QueueEntry entry;
+    entry.mOffset = 0;
+    entry.mFinalResult = finalResult;
+
+    if (audio) {
+        Mutex::Autolock autoLock(mLock);
+        if (mAudioQueue.empty() && mSyncQueues) {
+            syncQueuesDone_l();
+        }
+        mAudioQueue.push_back(entry);
+        postDrainAudioQueue_l();
+    } else {
+        if (mVideoQueue.empty() && getSyncQueues()) {
+            Mutex::Autolock autoLock(mLock);
+            syncQueuesDone_l();
+        }
+        mVideoQueue.push_back(entry);
+        postDrainVideoQueue();
+    }
+}
+
+void NuPlayer2::Renderer::onFlush(const sp<AMessage> &msg) {
+    int32_t audio, notifyComplete;
+    CHECK(msg->findInt32("audio", &audio));
+
+    {
+        Mutex::Autolock autoLock(mLock);
+        if (audio) {
+            notifyComplete = mNotifyCompleteAudio;
+            mNotifyCompleteAudio = false;
+            mLastAudioMediaTimeUs = -1;
+        } else {
+            notifyComplete = mNotifyCompleteVideo;
+            mNotifyCompleteVideo = false;
+        }
+
+        // If we're currently syncing the queues, i.e. dropping audio while
+        // aligning the first audio/video buffer times and only one of the
+        // two queues has data, we may starve that queue by not requesting
+        // more buffers from the decoder. If the other source then encounters
+        // a discontinuity that leads to flushing, we'll never find the
+        // corresponding discontinuity on the other queue.
+        // Therefore we'll stop syncing the queues if at least one of them
+        // is flushed.
+        syncQueuesDone_l();
+    }
+    clearAnchorTime();
+
+    ALOGV("flushing %s", audio ? "audio" : "video");
+    if (audio) {
+        {
+            Mutex::Autolock autoLock(mLock);
+            flushQueue(&mAudioQueue);
+
+            ++mAudioDrainGeneration;
+            ++mAudioEOSGeneration;
+            prepareForMediaRenderingStart_l();
+
+            // the frame count will be reset after flush.
+            clearAudioFirstAnchorTime_l();
+        }
+
+        mDrainAudioQueuePending = false;
+
+        if (offloadingAudio()) {
+            mAudioSink->pause();
+            mAudioSink->flush();
+            if (!mPaused) {
+                mAudioSink->start();
+            }
+        } else {
+            mAudioSink->pause();
+            mAudioSink->flush();
+            // Call stop() to signal to the AudioSink to completely fill the
+            // internal buffer before resuming playback.
+            // FIXME: this is ignored after flush().
+            mAudioSink->stop();
+            if (mPaused) {
+                // Race condition: if renderer is paused and audio sink is stopped,
+                // we need to make sure that the audio track buffer fully drains
+                // before delivering data.
+                // FIXME: remove this if we can detect if stop() is complete.
+                const int delayUs = 2 * 50 * 1000; // (2 full mixer thread cycles at 50ms)
+                mPauseDrainAudioAllowedUs = ALooper::GetNowUs() + delayUs;
+            } else {
+                mAudioSink->start();
+            }
+            mNumFramesWritten = 0;
+        }
+        mNextAudioClockUpdateTimeUs = -1;
+    } else {
+        flushQueue(&mVideoQueue);
+
+        mDrainVideoQueuePending = false;
+
+        if (mVideoScheduler != NULL) {
+            mVideoScheduler->restart();
+        }
+
+        Mutex::Autolock autoLock(mLock);
+        ++mVideoDrainGeneration;
+        prepareForMediaRenderingStart_l();
+    }
+
+    mVideoSampleReceived = false;
+
+    if (notifyComplete) {
+        notifyFlushComplete(audio);
+    }
+}
+
+void NuPlayer2::Renderer::flushQueue(List<QueueEntry> *queue) {
+    while (!queue->empty()) {
+        QueueEntry *entry = &*queue->begin();
+
+        if (entry->mBuffer != NULL) {
+            entry->mNotifyConsumed->post();
+        } else if (entry->mNotifyConsumed != nullptr) {
+            // Is it needed to open audio sink now?
+            onChangeAudioFormat(entry->mMeta, entry->mNotifyConsumed);
+        }
+
+        queue->erase(queue->begin());
+        entry = NULL;
+    }
+}
+
+void NuPlayer2::Renderer::notifyFlushComplete(bool audio) {
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatFlushComplete);
+    notify->setInt32("audio", static_cast<int32_t>(audio));
+    notify->post();
+}
+
+bool NuPlayer2::Renderer::dropBufferIfStale(
+        bool audio, const sp<AMessage> &msg) {
+    int32_t queueGeneration;
+    CHECK(msg->findInt32("queueGeneration", &queueGeneration));
+
+    if (queueGeneration == getQueueGeneration(audio)) {
+        return false;
+    }
+
+    sp<AMessage> notifyConsumed;
+    if (msg->findMessage("notifyConsumed", &notifyConsumed)) {
+        notifyConsumed->post();
+    }
+
+    return true;
+}
+
+void NuPlayer2::Renderer::onAudioSinkChanged() {
+    if (offloadingAudio()) {
+        return;
+    }
+    CHECK(!mDrainAudioQueuePending);
+    mNumFramesWritten = 0;
+    mAnchorNumFramesWritten = -1;
+    uint32_t written;
+    if (mAudioSink->getFramesWritten(&written) == OK) {
+        mNumFramesWritten = written;
+    }
+}
+
+void NuPlayer2::Renderer::onDisableOffloadAudio() {
+    Mutex::Autolock autoLock(mLock);
+    mFlags &= ~FLAG_OFFLOAD_AUDIO;
+    ++mAudioDrainGeneration;
+    if (mAudioRenderingStartGeneration != -1) {
+        prepareForMediaRenderingStart_l();
+    }
+}
+
+void NuPlayer2::Renderer::onEnableOffloadAudio() {
+    Mutex::Autolock autoLock(mLock);
+    mFlags |= FLAG_OFFLOAD_AUDIO;
+    ++mAudioDrainGeneration;
+    if (mAudioRenderingStartGeneration != -1) {
+        prepareForMediaRenderingStart_l();
+    }
+}
+
+void NuPlayer2::Renderer::onPause() {
+    if (mPaused) {
+        return;
+    }
+
+    {
+        Mutex::Autolock autoLock(mLock);
+        // we do not increment audio drain generation so that we fill audio buffer during pause.
+        ++mVideoDrainGeneration;
+        prepareForMediaRenderingStart_l();
+        mPaused = true;
+        mMediaClock->setPlaybackRate(0.0);
+    }
+
+    mDrainAudioQueuePending = false;
+    mDrainVideoQueuePending = false;
+
+    // Note: audio data may not have been decoded, and the AudioSink may not be opened.
+    mAudioSink->pause();
+    startAudioOffloadPauseTimeout();
+
+    ALOGV("now paused audio queue has %zu entries, video has %zu entries",
+          mAudioQueue.size(), mVideoQueue.size());
+}
+
+void NuPlayer2::Renderer::onResume() {
+    if (!mPaused) {
+        return;
+    }
+
+    // Note: audio data may not have been decoded, and the AudioSink may not be opened.
+    cancelAudioOffloadPauseTimeout();
+    if (mAudioSink->ready()) {
+        status_t err = mAudioSink->start();
+        if (err != OK) {
+            ALOGE("cannot start AudioSink err %d", err);
+            notifyAudioTearDown(kDueToError);
+        }
+    }
+
+    {
+        Mutex::Autolock autoLock(mLock);
+        mPaused = false;
+        // rendering started message may have been delayed if we were paused.
+        if (mRenderingDataDelivered) {
+            notifyIfMediaRenderingStarted_l();
+        }
+        // configure audiosink as we did not do it when pausing
+        if (mAudioSink != NULL && mAudioSink->ready()) {
+            mAudioSink->setPlaybackRate(mPlaybackSettings);
+        }
+
+        mMediaClock->setPlaybackRate(mPlaybackRate);
+
+        if (!mAudioQueue.empty()) {
+            postDrainAudioQueue_l();
+        }
+    }
+
+    if (!mVideoQueue.empty()) {
+        postDrainVideoQueue();
+    }
+}
+
+void NuPlayer2::Renderer::onSetVideoFrameRate(float fps) {
+    if (mVideoScheduler == NULL) {
+        mVideoScheduler = new VideoFrameScheduler();
+    }
+    mVideoScheduler->init(fps);
+}
+
+int32_t NuPlayer2::Renderer::getQueueGeneration(bool audio) {
+    Mutex::Autolock autoLock(mLock);
+    return (audio ? mAudioQueueGeneration : mVideoQueueGeneration);
+}
+
+int32_t NuPlayer2::Renderer::getDrainGeneration(bool audio) {
+    Mutex::Autolock autoLock(mLock);
+    return (audio ? mAudioDrainGeneration : mVideoDrainGeneration);
+}
+
+bool NuPlayer2::Renderer::getSyncQueues() {
+    Mutex::Autolock autoLock(mLock);
+    return mSyncQueues;
+}
+
+void NuPlayer2::Renderer::onAudioTearDown(AudioTearDownReason reason) {
+    if (mAudioTornDown) {
+        return;
+    }
+    mAudioTornDown = true;
+
+    int64_t currentPositionUs;
+    sp<AMessage> notify = mNotify->dup();
+    if (getCurrentPosition(&currentPositionUs) == OK) {
+        notify->setInt64("positionUs", currentPositionUs);
+    }
+
+    mAudioSink->stop();
+    mAudioSink->flush();
+
+    notify->setInt32("what", kWhatAudioTearDown);
+    notify->setInt32("reason", reason);
+    notify->post();
+}
+
+void NuPlayer2::Renderer::startAudioOffloadPauseTimeout() {
+    if (offloadingAudio()) {
+        mWakeLock->acquire();
+        sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, this);
+        msg->setInt32("drainGeneration", mAudioOffloadPauseTimeoutGeneration);
+        msg->post(kOffloadPauseMaxUs);
+    }
+}
+
+void NuPlayer2::Renderer::cancelAudioOffloadPauseTimeout() {
+    // We may have called startAudioOffloadPauseTimeout() without
+    // the AudioSink open and with offloadingAudio enabled.
+    //
+    // When we cancel, it may be that offloadingAudio is subsequently disabled, so regardless
+    // we always release the wakelock and increment the pause timeout generation.
+    //
+    // Note: The acquired wakelock prevents the device from suspending
+    // immediately after offload pause (in case a resume happens shortly thereafter).
+    mWakeLock->release(true);
+    ++mAudioOffloadPauseTimeoutGeneration;
+}
+
+status_t NuPlayer2::Renderer::onOpenAudioSink(
+        const sp<AMessage> &format,
+        bool offloadOnly,
+        bool hasVideo,
+        uint32_t flags,
+        bool isStreaming) {
+    ALOGV("openAudioSink: offloadOnly(%d) offloadingAudio(%d)",
+            offloadOnly, offloadingAudio());
+    bool audioSinkChanged = false;
+
+    int32_t numChannels;
+    CHECK(format->findInt32("channel-count", &numChannels));
+
+    int32_t channelMask;
+    if (!format->findInt32("channel-mask", &channelMask)) {
+        // signal to the AudioSink to derive the mask from count.
+        channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER;
+    }
+
+    int32_t sampleRate;
+    CHECK(format->findInt32("sample-rate", &sampleRate));
+
+    if (offloadingAudio()) {
+        audio_format_t audioFormat = AUDIO_FORMAT_PCM_16_BIT;
+        AString mime;
+        CHECK(format->findString("mime", &mime));
+        status_t err = mapMimeToAudioFormat(audioFormat, mime.c_str());
+
+        if (err != OK) {
+            ALOGE("Couldn't map mime \"%s\" to a valid "
+                    "audio_format", mime.c_str());
+            onDisableOffloadAudio();
+        } else {
+            ALOGV("Mime \"%s\" mapped to audio_format 0x%x",
+                    mime.c_str(), audioFormat);
+
+            int avgBitRate = -1;
+            format->findInt32("bitrate", &avgBitRate);
+
+            int32_t aacProfile = -1;
+            if (audioFormat == AUDIO_FORMAT_AAC
+                    && format->findInt32("aac-profile", &aacProfile)) {
+                // Redefine AAC format as per aac profile
+                mapAACProfileToAudioFormat(
+                        audioFormat,
+                        aacProfile);
+            }
+
+            audio_offload_info_t offloadInfo = AUDIO_INFO_INITIALIZER;
+            offloadInfo.duration_us = -1;
+            format->findInt64(
+                    "durationUs", &offloadInfo.duration_us);
+            offloadInfo.sample_rate = sampleRate;
+            offloadInfo.channel_mask = channelMask;
+            offloadInfo.format = audioFormat;
+            offloadInfo.stream_type = AUDIO_STREAM_MUSIC;
+            offloadInfo.bit_rate = avgBitRate;
+            offloadInfo.has_video = hasVideo;
+            offloadInfo.is_streaming = isStreaming;
+
+            if (memcmp(&mCurrentOffloadInfo, &offloadInfo, sizeof(offloadInfo)) == 0) {
+                ALOGV("openAudioSink: no change in offload mode");
+                // no change from previous configuration, everything ok.
+                return OK;
+            }
+            mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
+
+            ALOGV("openAudioSink: try to open AudioSink in offload mode");
+            uint32_t offloadFlags = flags;
+            offloadFlags |= AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
+            offloadFlags &= ~AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
+            audioSinkChanged = true;
+            mAudioSink->close();
+
+            err = mAudioSink->open(
+                    sampleRate,
+                    numChannels,
+                    (audio_channel_mask_t)channelMask,
+                    audioFormat,
+                    0 /* bufferCount - unused */,
+                    &NuPlayer2::Renderer::AudioSinkCallback,
+                    this,
+                    (audio_output_flags_t)offloadFlags,
+                    &offloadInfo);
+
+            if (err == OK) {
+                err = mAudioSink->setPlaybackRate(mPlaybackSettings);
+            }
+
+            if (err == OK) {
+                // If the playback is offloaded to h/w, we pass
+                // the HAL some metadata information.
+                // We don't want to do this for PCM because it
+                // will be going through the AudioFlinger mixer
+                // before reaching the hardware.
+                // TODO
+                mCurrentOffloadInfo = offloadInfo;
+                if (!mPaused) { // for preview mode, don't start if paused
+                    err = mAudioSink->start();
+                }
+                ALOGV_IF(err == OK, "openAudioSink: offload succeeded");
+            }
+            if (err != OK) {
+                // Clean up, fall back to non offload mode.
+                mAudioSink->close();
+                onDisableOffloadAudio();
+                mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
+                ALOGV("openAudioSink: offload failed");
+                if (offloadOnly) {
+                    notifyAudioTearDown(kForceNonOffload);
+                }
+            } else {
+                mUseAudioCallback = true;  // offload mode transfers data through callback
+                ++mAudioDrainGeneration;  // discard pending kWhatDrainAudioQueue message.
+            }
+        }
+    }
+    if (!offloadOnly && !offloadingAudio()) {
+        ALOGV("openAudioSink: open AudioSink in NON-offload mode");
+        uint32_t pcmFlags = flags;
+        pcmFlags &= ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
+
+        const PcmInfo info = {
+                (audio_channel_mask_t)channelMask,
+                (audio_output_flags_t)pcmFlags,
+                AUDIO_FORMAT_PCM_16_BIT, // TODO: change to audioFormat
+                numChannels,
+                sampleRate
+        };
+        if (memcmp(&mCurrentPcmInfo, &info, sizeof(info)) == 0) {
+            ALOGV("openAudioSink: no change in pcm mode");
+            // no change from previous configuration, everything ok.
+            return OK;
+        }
+
+        audioSinkChanged = true;
+        mAudioSink->close();
+        mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
+        // Note: It is possible to set up the callback, but not use it to send audio data.
+        // This requires a fix in AudioSink to explicitly specify the transfer mode.
+        mUseAudioCallback = getUseAudioCallbackSetting();
+        if (mUseAudioCallback) {
+            ++mAudioDrainGeneration;  // discard pending kWhatDrainAudioQueue message.
+        }
+
+        // Compute the desired buffer size.
+        // For callback mode, the amount of time before wakeup is about half the buffer size.
+        const uint32_t frameCount =
+                (unsigned long long)sampleRate * getAudioSinkPcmMsSetting() / 1000;
+
+        // The doNotReconnect means AudioSink will signal back and let NuPlayer2 to re-construct
+        // AudioSink. We don't want this when there's video because it will cause a video seek to
+        // the previous I frame. But we do want this when there's only audio because it will give
+        // NuPlayer2 a chance to switch from non-offload mode to offload mode.
+        // So we only set doNotReconnect when there's no video.
+        const bool doNotReconnect = !hasVideo;
+
+        // We should always be able to set our playback settings if the sink is closed.
+        LOG_ALWAYS_FATAL_IF(mAudioSink->setPlaybackRate(mPlaybackSettings) != OK,
+                "onOpenAudioSink: can't set playback rate on closed sink");
+        status_t err = mAudioSink->open(
+                    sampleRate,
+                    numChannels,
+                    (audio_channel_mask_t)channelMask,
+                    AUDIO_FORMAT_PCM_16_BIT,
+                    0 /* bufferCount - unused */,
+                    mUseAudioCallback ? &NuPlayer2::Renderer::AudioSinkCallback : NULL,
+                    mUseAudioCallback ? this : NULL,
+                    (audio_output_flags_t)pcmFlags,
+                    NULL,
+                    doNotReconnect,
+                    frameCount);
+        if (err != OK) {
+            ALOGW("openAudioSink: non offloaded open failed status: %d", err);
+            mAudioSink->close();
+            mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
+            return err;
+        }
+        mCurrentPcmInfo = info;
+        if (!mPaused) { // for preview mode, don't start if paused
+            mAudioSink->start();
+        }
+    }
+    if (audioSinkChanged) {
+        onAudioSinkChanged();
+    }
+    mAudioTornDown = false;
+    return OK;
+}
+
+void NuPlayer2::Renderer::onCloseAudioSink() {
+    mAudioSink->close();
+    mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
+    mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
+}
+
+void NuPlayer2::Renderer::onChangeAudioFormat(
+        const sp<AMessage> &meta, const sp<AMessage> &notify) {
+    sp<AMessage> format;
+    CHECK(meta->findMessage("format", &format));
+
+    int32_t offloadOnly;
+    CHECK(meta->findInt32("offload-only", &offloadOnly));
+
+    int32_t hasVideo;
+    CHECK(meta->findInt32("has-video", &hasVideo));
+
+    uint32_t flags;
+    CHECK(meta->findInt32("flags", (int32_t *)&flags));
+
+    uint32_t isStreaming;
+    CHECK(meta->findInt32("isStreaming", (int32_t *)&isStreaming));
+
+    status_t err = onOpenAudioSink(format, offloadOnly, hasVideo, flags, isStreaming);
+
+    if (err != OK) {
+        notify->setInt32("err", err);
+    }
+    notify->post();
+}
+
+}  // namespace android
+
diff --git a/media/libmedia/nuplayer2/NuPlayer2Renderer.h b/media/libmedia/nuplayer2/NuPlayer2Renderer.h
new file mode 100644
index 0000000..3007654
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2Renderer.h
@@ -0,0 +1,303 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NUPLAYER2_RENDERER_H_
+
+#define NUPLAYER2_RENDERER_H_
+
+#include <media/AudioResamplerPublic.h>
+#include <media/AVSyncSettings.h>
+
+#include "NuPlayer2.h"
+
+namespace android {
+
+class  JWakeLock;
+struct MediaClock;
+class MediaCodecBuffer;
+struct VideoFrameScheduler;
+
+struct NuPlayer2::Renderer : public AHandler {
+    enum Flags {
+        FLAG_REAL_TIME = 1,
+        FLAG_OFFLOAD_AUDIO = 2,
+    };
+    Renderer(const sp<MediaPlayer2Base::AudioSink> &sink,
+             const sp<MediaClock> &mediaClock,
+             const sp<AMessage> &notify,
+             uint32_t flags = 0);
+
+    static size_t AudioSinkCallback(
+            MediaPlayer2Base::AudioSink *audioSink,
+            void *data, size_t size, void *me,
+            MediaPlayer2Base::AudioSink::cb_event_t event);
+
+    void queueBuffer(
+            bool audio,
+            const sp<MediaCodecBuffer> &buffer,
+            const sp<AMessage> &notifyConsumed);
+
+    void queueEOS(bool audio, status_t finalResult);
+
+    status_t setPlaybackSettings(const AudioPlaybackRate &rate /* sanitized */);
+    status_t getPlaybackSettings(AudioPlaybackRate *rate /* nonnull */);
+    status_t setSyncSettings(const AVSyncSettings &sync, float videoFpsHint);
+    status_t getSyncSettings(AVSyncSettings *sync /* nonnull */, float *videoFps /* nonnull */);
+
+    void flush(bool audio, bool notifyComplete);
+
+    void signalTimeDiscontinuity();
+
+    void signalDisableOffloadAudio();
+    void signalEnableOffloadAudio();
+
+    void pause();
+    void resume();
+
+    void setVideoFrameRate(float fps);
+
+    status_t getCurrentPosition(int64_t *mediaUs);
+    int64_t getVideoLateByUs();
+
+    status_t openAudioSink(
+            const sp<AMessage> &format,
+            bool offloadOnly,
+            bool hasVideo,
+            uint32_t flags,
+            bool *isOffloaded,
+            bool isStreaming);
+    void closeAudioSink();
+
+    // re-open audio sink after all pending audio buffers played.
+    void changeAudioFormat(
+            const sp<AMessage> &format,
+            bool offloadOnly,
+            bool hasVideo,
+            uint32_t flags,
+            bool isStreaming,
+            const sp<AMessage> &notify);
+
+    enum {
+        kWhatEOS                      = 'eos ',
+        kWhatFlushComplete            = 'fluC',
+        kWhatPosition                 = 'posi',
+        kWhatVideoRenderingStart      = 'vdrd',
+        kWhatMediaRenderingStart      = 'mdrd',
+        kWhatAudioTearDown            = 'adTD',
+        kWhatAudioOffloadPauseTimeout = 'aOPT',
+    };
+
+    enum AudioTearDownReason {
+        kDueToError = 0,   // Could restart with either offload or non-offload.
+        kDueToTimeout,
+        kForceNonOffload,  // Restart only with non-offload.
+    };
+
+protected:
+    virtual ~Renderer();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+private:
+    enum {
+        kWhatDrainAudioQueue     = 'draA',
+        kWhatDrainVideoQueue     = 'draV',
+        kWhatPostDrainVideoQueue = 'pDVQ',
+        kWhatQueueBuffer         = 'queB',
+        kWhatQueueEOS            = 'qEOS',
+        kWhatConfigPlayback      = 'cfPB',
+        kWhatConfigSync          = 'cfSy',
+        kWhatGetPlaybackSettings = 'gPbS',
+        kWhatGetSyncSettings     = 'gSyS',
+        kWhatFlush               = 'flus',
+        kWhatPause               = 'paus',
+        kWhatResume              = 'resm',
+        kWhatOpenAudioSink       = 'opnA',
+        kWhatCloseAudioSink      = 'clsA',
+        kWhatChangeAudioFormat   = 'chgA',
+        kWhatStopAudioSink       = 'stpA',
+        kWhatDisableOffloadAudio = 'noOA',
+        kWhatEnableOffloadAudio  = 'enOA',
+        kWhatSetVideoFrameRate   = 'sVFR',
+    };
+
+    // if mBuffer != nullptr, it's a buffer containing real data.
+    // else if mNotifyConsumed == nullptr, it's EOS.
+    // else it's a tag for re-opening audio sink in different format.
+    struct QueueEntry {
+        sp<MediaCodecBuffer> mBuffer;
+        sp<AMessage> mMeta;
+        sp<AMessage> mNotifyConsumed;
+        size_t mOffset;
+        status_t mFinalResult;
+        int32_t mBufferOrdinal;
+    };
+
+    static const int64_t kMinPositionUpdateDelayUs;
+
+    sp<MediaPlayer2Base::AudioSink> mAudioSink;
+    bool mUseVirtualAudioSink;
+    sp<AMessage> mNotify;
+    Mutex mLock;
+    uint32_t mFlags;
+    List<QueueEntry> mAudioQueue;
+    List<QueueEntry> mVideoQueue;
+    uint32_t mNumFramesWritten;
+    sp<VideoFrameScheduler> mVideoScheduler;
+
+    bool mDrainAudioQueuePending;
+    bool mDrainVideoQueuePending;
+    int32_t mAudioQueueGeneration;
+    int32_t mVideoQueueGeneration;
+    int32_t mAudioDrainGeneration;
+    int32_t mVideoDrainGeneration;
+    int32_t mAudioEOSGeneration;
+
+    const sp<MediaClock> mMediaClock;
+    float mPlaybackRate; // audio track rate
+
+    AudioPlaybackRate mPlaybackSettings;
+    AVSyncSettings mSyncSettings;
+    float mVideoFpsHint;
+
+    int64_t mAudioFirstAnchorTimeMediaUs;
+    int64_t mAnchorTimeMediaUs;
+    int64_t mAnchorNumFramesWritten;
+    int64_t mVideoLateByUs;
+    int64_t mNextVideoTimeMediaUs;
+    bool mHasAudio;
+    bool mHasVideo;
+
+    bool mNotifyCompleteAudio;
+    bool mNotifyCompleteVideo;
+
+    bool mSyncQueues;
+
+    // modified on only renderer's thread.
+    bool mPaused;
+    int64_t mPauseDrainAudioAllowedUs; // time when we can drain/deliver audio in pause mode.
+
+    bool mVideoSampleReceived;
+    bool mVideoRenderingStarted;
+    int32_t mVideoRenderingStartGeneration;
+    int32_t mAudioRenderingStartGeneration;
+    bool mRenderingDataDelivered;
+
+    int64_t mNextAudioClockUpdateTimeUs;
+    // the media timestamp of last audio sample right before EOS.
+    int64_t mLastAudioMediaTimeUs;
+
+    int32_t mAudioOffloadPauseTimeoutGeneration;
+    bool mAudioTornDown;
+    audio_offload_info_t mCurrentOffloadInfo;
+
+    struct PcmInfo {
+        audio_channel_mask_t mChannelMask;
+        audio_output_flags_t mFlags;
+        audio_format_t mFormat;
+        int32_t mNumChannels;
+        int32_t mSampleRate;
+    };
+    PcmInfo mCurrentPcmInfo;
+    static const PcmInfo AUDIO_PCMINFO_INITIALIZER;
+
+    int32_t mTotalBuffersQueued;
+    int32_t mLastAudioBufferDrained;
+    bool mUseAudioCallback;
+
+    sp<JWakeLock> mWakeLock;
+
+    status_t getCurrentPositionOnLooper(int64_t *mediaUs);
+    status_t getCurrentPositionOnLooper(
+            int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo = false);
+    bool getCurrentPositionIfPaused_l(int64_t *mediaUs);
+    status_t getCurrentPositionFromAnchor(
+            int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo = false);
+
+    void notifyEOSCallback();
+    size_t fillAudioBuffer(void *buffer, size_t size);
+
+    bool onDrainAudioQueue();
+    void drainAudioQueueUntilLastEOS();
+    int64_t getPendingAudioPlayoutDurationUs(int64_t nowUs);
+    void postDrainAudioQueue_l(int64_t delayUs = 0);
+
+    void clearAnchorTime();
+    void clearAudioFirstAnchorTime_l();
+    void setAudioFirstAnchorTimeIfNeeded_l(int64_t mediaUs);
+    void setVideoLateByUs(int64_t lateUs);
+
+    void onNewAudioMediaTime(int64_t mediaTimeUs);
+    int64_t getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs);
+
+    void onDrainVideoQueue();
+    void postDrainVideoQueue();
+
+    void prepareForMediaRenderingStart_l();
+    void notifyIfMediaRenderingStarted_l();
+
+    void onQueueBuffer(const sp<AMessage> &msg);
+    void onQueueEOS(const sp<AMessage> &msg);
+    void onFlush(const sp<AMessage> &msg);
+    void onAudioSinkChanged();
+    void onDisableOffloadAudio();
+    void onEnableOffloadAudio();
+    status_t onConfigPlayback(const AudioPlaybackRate &rate /* sanitized */);
+    status_t onGetPlaybackSettings(AudioPlaybackRate *rate /* nonnull */);
+    status_t onConfigSync(const AVSyncSettings &sync, float videoFpsHint);
+    status_t onGetSyncSettings(AVSyncSettings *sync /* nonnull */, float *videoFps /* nonnull */);
+
+    void onPause();
+    void onResume();
+    void onSetVideoFrameRate(float fps);
+    int32_t getQueueGeneration(bool audio);
+    int32_t getDrainGeneration(bool audio);
+    bool getSyncQueues();
+    void onAudioTearDown(AudioTearDownReason reason);
+    status_t onOpenAudioSink(
+            const sp<AMessage> &format,
+            bool offloadOnly,
+            bool hasVideo,
+            uint32_t flags,
+            bool isStreaming);
+    void onCloseAudioSink();
+    void onChangeAudioFormat(const sp<AMessage> &meta, const sp<AMessage> &notify);
+
+    void notifyEOS(bool audio, status_t finalResult, int64_t delayUs = 0);
+    void notifyEOS_l(bool audio, status_t finalResult, int64_t delayUs = 0);
+    void notifyFlushComplete(bool audio);
+    void notifyPosition();
+    void notifyVideoLateBy(int64_t lateByUs);
+    void notifyVideoRenderingStart();
+    void notifyAudioTearDown(AudioTearDownReason reason);
+
+    void flushQueue(List<QueueEntry> *queue);
+    bool dropBufferIfStale(bool audio, const sp<AMessage> &msg);
+    void syncQueuesDone_l();
+
+    bool offloadingAudio() const { return (mFlags & FLAG_OFFLOAD_AUDIO) != 0; }
+
+    void startAudioOffloadPauseTimeout();
+    void cancelAudioOffloadPauseTimeout();
+
+    int64_t getDurationUsIfPlayedAtSampleRate(uint32_t numFrames);
+
+    DISALLOW_EVIL_CONSTRUCTORS(Renderer);
+};
+
+} // namespace android
+
+#endif  // NUPLAYER2_RENDERER_H_
diff --git a/media/libmedia/nuplayer2/NuPlayer2Source.h b/media/libmedia/nuplayer2/NuPlayer2Source.h
new file mode 100644
index 0000000..2df6136
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2Source.h
@@ -0,0 +1,172 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NUPLAYER2_SOURCE_H_
+
+#define NUPLAYER2_SOURCE_H_
+
+#include "NuPlayer2.h"
+
+#include <media/mediaplayer2.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MetaData.h>
+#include <utils/Vector.h>
+
+namespace android {
+
+struct ABuffer;
+struct AMediaCryptoWrapper;
+class MediaBuffer;
+
+struct NuPlayer2::Source : public AHandler {
+    enum Flags {
+        FLAG_CAN_PAUSE          = 1,
+        FLAG_CAN_SEEK_BACKWARD  = 2,  // the "10 sec back button"
+        FLAG_CAN_SEEK_FORWARD   = 4,  // the "10 sec forward button"
+        FLAG_CAN_SEEK           = 8,  // the "seek bar"
+        FLAG_DYNAMIC_DURATION   = 16,
+        FLAG_SECURE             = 32, // Secure codec is required.
+        FLAG_PROTECTED          = 64, // The screen needs to be protected (screenshot is disabled).
+    };
+
+    enum {
+        kWhatPrepared,
+        kWhatFlagsChanged,
+        kWhatVideoSizeChanged,
+        kWhatBufferingUpdate,
+        kWhatPauseOnBufferingStart,
+        kWhatResumeOnBufferingEnd,
+        kWhatCacheStats,
+        kWhatSubtitleData,
+        kWhatTimedTextData,
+        kWhatTimedMetaData,
+        kWhatQueueDecoderShutdown,
+        kWhatDrmNoLicense,
+        kWhatInstantiateSecureDecoders,
+        // Modular DRM
+        kWhatDrmInfo,
+    };
+
+    // The provides message is used to notify the player about various
+    // events.
+    explicit Source(const sp<AMessage> &notify)
+        : mNotify(notify) {
+    }
+
+    virtual status_t getBufferingSettings(
+            BufferingSettings* buffering /* nonnull */) = 0;
+    virtual status_t setBufferingSettings(const BufferingSettings& buffering) = 0;
+
+    virtual void prepareAsync() = 0;
+
+    virtual void start() = 0;
+    virtual void stop() {}
+    virtual void pause() {}
+    virtual void resume() {}
+
+    // Explicitly disconnect the underling data source
+    virtual void disconnect() {}
+
+    // Returns OK iff more data was available,
+    // an error or ERROR_END_OF_STREAM if not.
+    virtual status_t feedMoreTSData() = 0;
+
+    // Returns non-NULL format when the specified track exists.
+    // When the format has "err" set to -EWOULDBLOCK, source needs more time to get valid meta data.
+    // Returns NULL if the specified track doesn't exist or is invalid;
+    virtual sp<AMessage> getFormat(bool audio);
+
+    virtual sp<MetaData> getFormatMeta(bool /* audio */) { return NULL; }
+    virtual sp<MetaData> getFileFormatMeta() const { return NULL; }
+
+    virtual status_t dequeueAccessUnit(
+            bool audio, sp<ABuffer> *accessUnit) = 0;
+
+    virtual status_t getDuration(int64_t * /* durationUs */) {
+        return INVALID_OPERATION;
+    }
+
+    virtual size_t getTrackCount() const {
+        return 0;
+    }
+
+    virtual sp<AMessage> getTrackInfo(size_t /* trackIndex */) const {
+        return NULL;
+    }
+
+    virtual ssize_t getSelectedTrack(media_track_type /* type */) const {
+        return INVALID_OPERATION;
+    }
+
+    virtual status_t selectTrack(size_t /* trackIndex */, bool /* select */, int64_t /* timeUs*/) {
+        return INVALID_OPERATION;
+    }
+
+    virtual status_t seekTo(
+            int64_t /* seekTimeUs */,
+            MediaPlayer2SeekMode /* mode */ = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC) {
+        return INVALID_OPERATION;
+    }
+
+    virtual status_t setBuffers(bool /* audio */, Vector<MediaBuffer *> &/* buffers */) {
+        return INVALID_OPERATION;
+    }
+
+    virtual bool isRealTime() const {
+        return false;
+    }
+
+    virtual bool isStreaming() const {
+        return true;
+    }
+
+    virtual void setOffloadAudio(bool /* offload */) {}
+
+    // Modular DRM
+    virtual status_t prepareDrm(
+            const uint8_t /* uuid */[16], const Vector<uint8_t> & /* drmSessionId */,
+            sp<AMediaCryptoWrapper> * /* crypto */) {
+        return INVALID_OPERATION;
+    }
+
+    virtual status_t releaseDrm() {
+        return INVALID_OPERATION;
+    }
+
+protected:
+    virtual ~Source() {}
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+    sp<AMessage> dupNotify() const { return mNotify->dup(); }
+
+    void notifyFlagsChanged(uint32_t flags);
+    void notifyVideoSizeChanged(const sp<AMessage> &format = NULL);
+    void notifyInstantiateSecureDecoders(const sp<AMessage> &reply);
+    void notifyPrepared(status_t err = OK);
+    // Modular DRM
+    void notifyDrmInfo(const sp<ABuffer> &buffer);
+
+private:
+    sp<AMessage> mNotify;
+
+    DISALLOW_EVIL_CONSTRUCTORS(Source);
+};
+
+}  // namespace android
+
+#endif  // NUPLAYER2_SOURCE_H_
+
diff --git a/media/libmedia/nuplayer2/NuPlayer2StreamListener.cpp b/media/libmedia/nuplayer2/NuPlayer2StreamListener.cpp
new file mode 100644
index 0000000..0769711
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2StreamListener.cpp
@@ -0,0 +1,173 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayer2StreamListener"
+#include <utils/Log.h>
+
+#include "NuPlayer2StreamListener.h"
+
+#include <binder/MemoryDealer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaErrors.h>
+
+namespace android {
+
+NuPlayer2::StreamListener::StreamListener(
+        const sp<IStreamSource> &source,
+        const sp<AHandler> &targetHandler)
+    : mSource(source),
+      mTargetHandler(targetHandler),
+      mEOS(false),
+      mSendDataNotification(true) {
+    mSource->setListener(this);
+
+    mMemoryDealer = new MemoryDealer(kNumBuffers * kBufferSize);
+    for (size_t i = 0; i < kNumBuffers; ++i) {
+        sp<IMemory> mem = mMemoryDealer->allocate(kBufferSize);
+        CHECK(mem != NULL);
+
+        mBuffers.push(mem);
+    }
+    mSource->setBuffers(mBuffers);
+}
+
+void NuPlayer2::StreamListener::start() {
+    for (size_t i = 0; i < kNumBuffers; ++i) {
+        mSource->onBufferAvailable(i);
+    }
+}
+
+void NuPlayer2::StreamListener::queueBuffer(size_t index, size_t size) {
+    QueueEntry entry;
+    entry.mIsCommand = false;
+    entry.mIndex = index;
+    entry.mSize = size;
+    entry.mOffset = 0;
+
+    Mutex::Autolock autoLock(mLock);
+    mQueue.push_back(entry);
+
+    if (mSendDataNotification) {
+        mSendDataNotification = false;
+
+        if (mTargetHandler != NULL) {
+            (new AMessage(kWhatMoreDataQueued, mTargetHandler))->post();
+        }
+    }
+}
+
+void NuPlayer2::StreamListener::issueCommand(
+        Command cmd, bool synchronous, const sp<AMessage> &extra) {
+    CHECK(!synchronous);
+
+    QueueEntry entry;
+    entry.mIsCommand = true;
+    entry.mCommand = cmd;
+    entry.mExtra = extra;
+
+    Mutex::Autolock autoLock(mLock);
+    mQueue.push_back(entry);
+
+    if (mSendDataNotification) {
+        mSendDataNotification = false;
+
+        if (mTargetHandler != NULL) {
+            (new AMessage(kWhatMoreDataQueued, mTargetHandler))->post();
+        }
+    }
+}
+
+ssize_t NuPlayer2::StreamListener::read(
+        void *data, size_t size, sp<AMessage> *extra) {
+    CHECK_GT(size, 0u);
+
+    extra->clear();
+
+    Mutex::Autolock autoLock(mLock);
+
+    if (mEOS) {
+        return 0;
+    }
+
+    if (mQueue.empty()) {
+        mSendDataNotification = true;
+
+        return -EWOULDBLOCK;
+    }
+
+    QueueEntry *entry = &*mQueue.begin();
+
+    if (entry->mIsCommand) {
+        switch (entry->mCommand) {
+            case EOS:
+            {
+                mQueue.erase(mQueue.begin());
+                entry = NULL;
+
+                mEOS = true;
+                return 0;
+            }
+
+            case DISCONTINUITY:
+            {
+                *extra = entry->mExtra;
+
+                mQueue.erase(mQueue.begin());
+                entry = NULL;
+
+                return INFO_DISCONTINUITY;
+            }
+
+            default:
+                TRESPASS();
+                break;
+        }
+    }
+
+    size_t copy = entry->mSize;
+    if (copy > size) {
+        copy = size;
+    }
+
+    if (entry->mIndex >= mBuffers.size()) {
+        return ERROR_MALFORMED;
+    }
+
+    sp<IMemory> mem = mBuffers.editItemAt(entry->mIndex);
+    if (mem == NULL || mem->size() < copy || mem->size() - copy < entry->mOffset) {
+        return ERROR_MALFORMED;
+    }
+
+    memcpy(data,
+           (const uint8_t *)mem->pointer()
+            + entry->mOffset,
+           copy);
+
+    entry->mOffset += copy;
+    entry->mSize -= copy;
+
+    if (entry->mSize == 0) {
+        mSource->onBufferAvailable(entry->mIndex);
+        mQueue.erase(mQueue.begin());
+        entry = NULL;
+    }
+
+    return copy;
+}
+
+}  // namespace android
diff --git a/media/libmedia/nuplayer2/NuPlayer2StreamListener.h b/media/libmedia/nuplayer2/NuPlayer2StreamListener.h
new file mode 100644
index 0000000..4327b24
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2StreamListener.h
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NUPLAYER2_STREAM_LISTENER_H_
+
+#define NUPLAYER2_STREAM_LISTENER_H_
+
+#include "NuPlayer2.h"
+
+#include <media/IStreamSource.h>
+
+namespace android {
+
+class MemoryDealer;
+
+struct NuPlayer2::StreamListener : public BnStreamListener {
+    StreamListener(
+            const sp<IStreamSource> &source,
+            const sp<AHandler> &targetHandler);
+
+    virtual void queueBuffer(size_t index, size_t size);
+
+    virtual void issueCommand(
+            Command cmd, bool synchronous, const sp<AMessage> &extra);
+
+    void start();
+    ssize_t read(void *data, size_t size, sp<AMessage> *extra);
+
+private:
+    enum {
+        kNumBuffers = 8,
+        kBufferSize = 188 * 10
+    };
+
+    struct QueueEntry {
+        bool mIsCommand;
+
+        size_t mIndex;
+        size_t mSize;
+        size_t mOffset;
+
+        Command mCommand;
+        sp<AMessage> mExtra;
+    };
+
+    Mutex mLock;
+
+    sp<IStreamSource> mSource;
+    sp<AHandler> mTargetHandler;
+    sp<MemoryDealer> mMemoryDealer;
+    Vector<sp<IMemory> > mBuffers;
+    List<QueueEntry> mQueue;
+    bool mEOS;
+    bool mSendDataNotification;
+
+    DISALLOW_EVIL_CONSTRUCTORS(StreamListener);
+};
+
+}  // namespace android
+
+#endif // NUPLAYER2_STREAM_LISTENER_H_
diff --git a/media/libmedia/nuplayer2/RTSPSource.cpp b/media/libmedia/nuplayer2/RTSPSource.cpp
new file mode 100644
index 0000000..3f6966d
--- /dev/null
+++ b/media/libmedia/nuplayer2/RTSPSource.cpp
@@ -0,0 +1,904 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "RTSPSource"
+#include <utils/Log.h>
+
+#include "RTSPSource.h"
+
+#include "AnotherPacketSource.h"
+#include "MyHandler.h"
+#include "SDPLoader.h"
+
+#include <media/MediaHTTPService.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaData.h>
+
+namespace android {
+
+const int64_t kNearEOSTimeoutUs = 2000000ll; // 2 secs
+
+// Default Buffer Underflow/Prepare/StartServer/Overflow Marks
+static const int kUnderflowMarkMs   =  1000;  // 1 second
+static const int kPrepareMarkMs     =  3000;  // 3 seconds
+//static const int kStartServerMarkMs =  5000;
+static const int kOverflowMarkMs    = 10000;  // 10 seconds
+
+NuPlayer2::RTSPSource::RTSPSource(
+        const sp<AMessage> &notify,
+        const sp<MediaHTTPService> &httpService,
+        const char *url,
+        const KeyedVector<String8, String8> *headers,
+        bool uidValid,
+        uid_t uid,
+        bool isSDP)
+    : Source(notify),
+      mHTTPService(httpService),
+      mURL(url),
+      mUIDValid(uidValid),
+      mUID(uid),
+      mFlags(0),
+      mIsSDP(isSDP),
+      mState(DISCONNECTED),
+      mFinalResult(OK),
+      mDisconnectReplyID(0),
+      mBuffering(false),
+      mInPreparationPhase(true),
+      mEOSPending(false),
+      mSeekGeneration(0),
+      mEOSTimeoutAudio(0),
+      mEOSTimeoutVideo(0) {
+    mBufferingSettings.mInitialMarkMs = kPrepareMarkMs;
+    mBufferingSettings.mResumePlaybackMarkMs = kOverflowMarkMs;
+    if (headers) {
+        mExtraHeaders = *headers;
+
+        ssize_t index =
+            mExtraHeaders.indexOfKey(String8("x-hide-urls-from-log"));
+
+        if (index >= 0) {
+            mFlags |= kFlagIncognito;
+
+            mExtraHeaders.removeItemsAt(index);
+        }
+    }
+}
+
+NuPlayer2::RTSPSource::~RTSPSource() {
+    if (mLooper != NULL) {
+        mLooper->unregisterHandler(id());
+        mLooper->stop();
+    }
+}
+
+status_t NuPlayer2::RTSPSource::getBufferingSettings(
+            BufferingSettings* buffering /* nonnull */) {
+    Mutex::Autolock _l(mBufferingSettingsLock);
+    *buffering = mBufferingSettings;
+    return OK;
+}
+
+status_t NuPlayer2::RTSPSource::setBufferingSettings(const BufferingSettings& buffering) {
+    Mutex::Autolock _l(mBufferingSettingsLock);
+    mBufferingSettings = buffering;
+    return OK;
+}
+
+void NuPlayer2::RTSPSource::prepareAsync() {
+    if (mIsSDP && mHTTPService == NULL) {
+        notifyPrepared(BAD_VALUE);
+        return;
+    }
+
+    if (mLooper == NULL) {
+        mLooper = new ALooper;
+        mLooper->setName("rtsp");
+        mLooper->start();
+
+        mLooper->registerHandler(this);
+    }
+
+    CHECK(mHandler == NULL);
+    CHECK(mSDPLoader == NULL);
+
+    sp<AMessage> notify = new AMessage(kWhatNotify, this);
+
+    CHECK_EQ(mState, (int)DISCONNECTED);
+    mState = CONNECTING;
+
+    if (mIsSDP) {
+        mSDPLoader = new SDPLoader(notify,
+                (mFlags & kFlagIncognito) ? SDPLoader::kFlagIncognito : 0,
+                mHTTPService);
+
+        mSDPLoader->load(
+                mURL.c_str(), mExtraHeaders.isEmpty() ? NULL : &mExtraHeaders);
+    } else {
+        mHandler = new MyHandler(mURL.c_str(), notify, mUIDValid, mUID);
+        mLooper->registerHandler(mHandler);
+
+        mHandler->connect();
+    }
+
+    startBufferingIfNecessary();
+}
+
+void NuPlayer2::RTSPSource::start() {
+}
+
+void NuPlayer2::RTSPSource::stop() {
+    if (mLooper == NULL) {
+        return;
+    }
+    sp<AMessage> msg = new AMessage(kWhatDisconnect, this);
+
+    sp<AMessage> dummy;
+    msg->postAndAwaitResponse(&dummy);
+}
+
+status_t NuPlayer2::RTSPSource::feedMoreTSData() {
+    Mutex::Autolock _l(mBufferingLock);
+    return mFinalResult;
+}
+
+sp<MetaData> NuPlayer2::RTSPSource::getFormatMeta(bool audio) {
+    sp<AnotherPacketSource> source = getSource(audio);
+
+    if (source == NULL) {
+        return NULL;
+    }
+
+    return source->getFormat();
+}
+
+bool NuPlayer2::RTSPSource::haveSufficientDataOnAllTracks() {
+    // We're going to buffer at least 2 secs worth data on all tracks before
+    // starting playback (both at startup and after a seek).
+
+    static const int64_t kMinDurationUs = 2000000ll;
+
+    int64_t mediaDurationUs = 0;
+    getDuration(&mediaDurationUs);
+    if ((mAudioTrack != NULL && mAudioTrack->isFinished(mediaDurationUs))
+            || (mVideoTrack != NULL && mVideoTrack->isFinished(mediaDurationUs))) {
+        return true;
+    }
+
+    status_t err;
+    int64_t durationUs;
+    if (mAudioTrack != NULL
+            && (durationUs = mAudioTrack->getBufferedDurationUs(&err))
+                    < kMinDurationUs
+            && err == OK) {
+        ALOGV("audio track doesn't have enough data yet. (%.2f secs buffered)",
+              durationUs / 1E6);
+        return false;
+    }
+
+    if (mVideoTrack != NULL
+            && (durationUs = mVideoTrack->getBufferedDurationUs(&err))
+                    < kMinDurationUs
+            && err == OK) {
+        ALOGV("video track doesn't have enough data yet. (%.2f secs buffered)",
+              durationUs / 1E6);
+        return false;
+    }
+
+    return true;
+}
+
+status_t NuPlayer2::RTSPSource::dequeueAccessUnit(
+        bool audio, sp<ABuffer> *accessUnit) {
+    if (!stopBufferingIfNecessary()) {
+        return -EWOULDBLOCK;
+    }
+
+    sp<AnotherPacketSource> source = getSource(audio);
+
+    if (source == NULL) {
+        return -EWOULDBLOCK;
+    }
+
+    status_t finalResult;
+    if (!source->hasBufferAvailable(&finalResult)) {
+        if (finalResult == OK) {
+
+            // If other source already signaled EOS, this source should also return EOS
+            if (sourceReachedEOS(!audio)) {
+                return ERROR_END_OF_STREAM;
+            }
+
+            // If this source has detected near end, give it some time to retrieve more
+            // data before returning EOS
+            int64_t mediaDurationUs = 0;
+            getDuration(&mediaDurationUs);
+            if (source->isFinished(mediaDurationUs)) {
+                int64_t eosTimeout = audio ? mEOSTimeoutAudio : mEOSTimeoutVideo;
+                if (eosTimeout == 0) {
+                    setEOSTimeout(audio, ALooper::GetNowUs());
+                } else if ((ALooper::GetNowUs() - eosTimeout) > kNearEOSTimeoutUs) {
+                    setEOSTimeout(audio, 0);
+                    return ERROR_END_OF_STREAM;
+                }
+                return -EWOULDBLOCK;
+            }
+
+            if (!sourceNearEOS(!audio)) {
+                // We should not enter buffering mode
+                // if any of the sources already have detected EOS.
+                startBufferingIfNecessary();
+            }
+
+            return -EWOULDBLOCK;
+        }
+        return finalResult;
+    }
+
+    setEOSTimeout(audio, 0);
+
+    return source->dequeueAccessUnit(accessUnit);
+}
+
+sp<AnotherPacketSource> NuPlayer2::RTSPSource::getSource(bool audio) {
+    if (mTSParser != NULL) {
+        sp<MediaSource> source = mTSParser->getSource(
+                audio ? ATSParser::AUDIO : ATSParser::VIDEO);
+
+        return static_cast<AnotherPacketSource *>(source.get());
+    }
+
+    return audio ? mAudioTrack : mVideoTrack;
+}
+
+void NuPlayer2::RTSPSource::setEOSTimeout(bool audio, int64_t timeout) {
+    if (audio) {
+        mEOSTimeoutAudio = timeout;
+    } else {
+        mEOSTimeoutVideo = timeout;
+    }
+}
+
+status_t NuPlayer2::RTSPSource::getDuration(int64_t *durationUs) {
+    *durationUs = -1ll;
+
+    int64_t audioDurationUs;
+    if (mAudioTrack != NULL
+            && mAudioTrack->getFormat()->findInt64(
+                kKeyDuration, &audioDurationUs)
+            && audioDurationUs > *durationUs) {
+        *durationUs = audioDurationUs;
+    }
+
+    int64_t videoDurationUs;
+    if (mVideoTrack != NULL
+            && mVideoTrack->getFormat()->findInt64(
+                kKeyDuration, &videoDurationUs)
+            && videoDurationUs > *durationUs) {
+        *durationUs = videoDurationUs;
+    }
+
+    return OK;
+}
+
+status_t NuPlayer2::RTSPSource::seekTo(int64_t seekTimeUs, MediaPlayer2SeekMode mode) {
+    sp<AMessage> msg = new AMessage(kWhatPerformSeek, this);
+    msg->setInt32("generation", ++mSeekGeneration);
+    msg->setInt64("timeUs", seekTimeUs);
+    msg->setInt32("mode", mode);
+
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+    if (err == OK && response != NULL) {
+        CHECK(response->findInt32("err", &err));
+    }
+
+    return err;
+}
+
+void NuPlayer2::RTSPSource::performSeek(int64_t seekTimeUs) {
+    if (mState != CONNECTED) {
+        finishSeek(INVALID_OPERATION);
+        return;
+    }
+
+    mState = SEEKING;
+    mHandler->seek(seekTimeUs);
+    mEOSPending = false;
+}
+
+void NuPlayer2::RTSPSource::schedulePollBuffering() {
+    sp<AMessage> msg = new AMessage(kWhatPollBuffering, this);
+    msg->post(1000000ll); // 1 second intervals
+}
+
+void NuPlayer2::RTSPSource::checkBuffering(
+        bool *prepared, bool *underflow, bool *overflow, bool *startServer, bool *finished) {
+    size_t numTracks = mTracks.size();
+    size_t preparedCount, underflowCount, overflowCount, startCount, finishedCount;
+    preparedCount = underflowCount = overflowCount = startCount = finishedCount = 0;
+
+    size_t count = numTracks;
+    for (size_t i = 0; i < count; ++i) {
+        status_t finalResult;
+        TrackInfo *info = &mTracks.editItemAt(i);
+        sp<AnotherPacketSource> src = info->mSource;
+        if (src == NULL) {
+            --numTracks;
+            continue;
+        }
+        int64_t bufferedDurationUs = src->getBufferedDurationUs(&finalResult);
+
+        int64_t initialMarkUs;
+        int64_t maxRebufferingMarkUs;
+        {
+            Mutex::Autolock _l(mBufferingSettingsLock);
+            initialMarkUs = mBufferingSettings.mInitialMarkMs * 1000ll;
+            // TODO: maxRebufferingMarkUs could be larger than
+            // mBufferingSettings.mResumePlaybackMarkMs * 1000ll.
+            maxRebufferingMarkUs = mBufferingSettings.mResumePlaybackMarkMs * 1000ll;
+        }
+        // isFinished when duration is 0 checks for EOS result only
+        if (bufferedDurationUs > initialMarkUs
+                || src->isFinished(/* duration */ 0)) {
+            ++preparedCount;
+        }
+
+        if (src->isFinished(/* duration */ 0)) {
+            ++overflowCount;
+            ++finishedCount;
+        } else {
+            // TODO: redefine kUnderflowMarkMs to a fair value,
+            if (bufferedDurationUs < kUnderflowMarkMs * 1000) {
+                ++underflowCount;
+            }
+            if (bufferedDurationUs > maxRebufferingMarkUs) {
+                ++overflowCount;
+            }
+            int64_t startServerMarkUs =
+                    (kUnderflowMarkMs * 1000ll + maxRebufferingMarkUs) / 2;
+            if (bufferedDurationUs < startServerMarkUs) {
+                ++startCount;
+            }
+        }
+    }
+
+    *prepared    = (preparedCount == numTracks);
+    *underflow   = (underflowCount > 0);
+    *overflow    = (overflowCount == numTracks);
+    *startServer = (startCount > 0);
+    *finished    = (finishedCount > 0);
+}
+
+void NuPlayer2::RTSPSource::onPollBuffering() {
+    bool prepared, underflow, overflow, startServer, finished;
+    checkBuffering(&prepared, &underflow, &overflow, &startServer, &finished);
+
+    if (prepared && mInPreparationPhase) {
+        mInPreparationPhase = false;
+        notifyPrepared();
+    }
+
+    if (!mInPreparationPhase && underflow) {
+        startBufferingIfNecessary();
+    }
+
+    if (haveSufficientDataOnAllTracks()) {
+        stopBufferingIfNecessary();
+    }
+
+    if (overflow && mHandler != NULL) {
+        mHandler->pause();
+    }
+
+    if (startServer && mHandler != NULL) {
+        mHandler->resume();
+    }
+
+    if (finished && mHandler != NULL) {
+        mHandler->cancelAccessUnitTimeoutCheck();
+    }
+
+    schedulePollBuffering();
+}
+
+void NuPlayer2::RTSPSource::signalSourceEOS(status_t result) {
+    const bool audio = true;
+    const bool video = false;
+
+    sp<AnotherPacketSource> source = getSource(audio);
+    if (source != NULL) {
+        source->signalEOS(result);
+    }
+
+    source = getSource(video);
+    if (source != NULL) {
+        source->signalEOS(result);
+    }
+}
+
+bool NuPlayer2::RTSPSource::sourceReachedEOS(bool audio) {
+    sp<AnotherPacketSource> source = getSource(audio);
+    status_t finalResult;
+    return (source != NULL &&
+            !source->hasBufferAvailable(&finalResult) &&
+            finalResult == ERROR_END_OF_STREAM);
+}
+
+bool NuPlayer2::RTSPSource::sourceNearEOS(bool audio) {
+    sp<AnotherPacketSource> source = getSource(audio);
+    int64_t mediaDurationUs = 0;
+    getDuration(&mediaDurationUs);
+    return (source != NULL && source->isFinished(mediaDurationUs));
+}
+
+void NuPlayer2::RTSPSource::onSignalEOS(const sp<AMessage> &msg) {
+    int32_t generation;
+    CHECK(msg->findInt32("generation", &generation));
+
+    if (generation != mSeekGeneration) {
+        return;
+    }
+
+    if (mEOSPending) {
+        signalSourceEOS(ERROR_END_OF_STREAM);
+        mEOSPending = false;
+    }
+}
+
+void NuPlayer2::RTSPSource::postSourceEOSIfNecessary() {
+    const bool audio = true;
+    const bool video = false;
+    // If a source has detected near end, give it some time to retrieve more
+    // data before signaling EOS
+    if (sourceNearEOS(audio) || sourceNearEOS(video)) {
+        if (!mEOSPending) {
+            sp<AMessage> msg = new AMessage(kWhatSignalEOS, this);
+            msg->setInt32("generation", mSeekGeneration);
+            msg->post(kNearEOSTimeoutUs);
+            mEOSPending = true;
+        }
+    }
+}
+
+void NuPlayer2::RTSPSource::onMessageReceived(const sp<AMessage> &msg) {
+    if (msg->what() == kWhatDisconnect) {
+        sp<AReplyToken> replyID;
+        CHECK(msg->senderAwaitsResponse(&replyID));
+
+        mDisconnectReplyID = replyID;
+        finishDisconnectIfPossible();
+        return;
+    } else if (msg->what() == kWhatPerformSeek) {
+        int32_t generation;
+        CHECK(msg->findInt32("generation", &generation));
+        CHECK(msg->senderAwaitsResponse(&mSeekReplyID));
+
+        if (generation != mSeekGeneration) {
+            // obsolete.
+            finishSeek(OK);
+            return;
+        }
+
+        int64_t seekTimeUs;
+        int32_t mode;
+        CHECK(msg->findInt64("timeUs", &seekTimeUs));
+        CHECK(msg->findInt32("mode", &mode));
+
+        // TODO: add "mode" to performSeek.
+        performSeek(seekTimeUs/*, (MediaPlayer2SeekMode)mode */);
+        return;
+    } else if (msg->what() == kWhatPollBuffering) {
+        onPollBuffering();
+        return;
+    } else if (msg->what() == kWhatSignalEOS) {
+        onSignalEOS(msg);
+        return;
+    }
+
+    CHECK_EQ(msg->what(), kWhatNotify);
+
+    int32_t what;
+    CHECK(msg->findInt32("what", &what));
+
+    switch (what) {
+        case MyHandler::kWhatConnected:
+        {
+            onConnected();
+
+            notifyVideoSizeChanged();
+
+            uint32_t flags = 0;
+
+            if (mHandler->isSeekable()) {
+                flags = FLAG_CAN_PAUSE
+                        | FLAG_CAN_SEEK
+                        | FLAG_CAN_SEEK_BACKWARD
+                        | FLAG_CAN_SEEK_FORWARD;
+            }
+
+            notifyFlagsChanged(flags);
+            schedulePollBuffering();
+            break;
+        }
+
+        case MyHandler::kWhatDisconnected:
+        {
+            onDisconnected(msg);
+            break;
+        }
+
+        case MyHandler::kWhatSeekDone:
+        {
+            mState = CONNECTED;
+            // Unblock seekTo here in case we attempted to seek in a live stream
+            finishSeek(OK);
+            break;
+        }
+
+        case MyHandler::kWhatSeekPaused:
+        {
+            sp<AnotherPacketSource> source = getSource(true /* audio */);
+            if (source != NULL) {
+                source->queueDiscontinuity(ATSParser::DISCONTINUITY_NONE,
+                        /* extra */ NULL,
+                        /* discard */ true);
+            }
+            source = getSource(false /* video */);
+            if (source != NULL) {
+                source->queueDiscontinuity(ATSParser::DISCONTINUITY_NONE,
+                        /* extra */ NULL,
+                        /* discard */ true);
+            };
+
+            status_t err = OK;
+            msg->findInt32("err", &err);
+
+            if (err == OK) {
+                int64_t timeUs;
+                CHECK(msg->findInt64("time", &timeUs));
+                mHandler->continueSeekAfterPause(timeUs);
+            } else {
+                finishSeek(err);
+            }
+            break;
+        }
+
+        case MyHandler::kWhatAccessUnit:
+        {
+            size_t trackIndex;
+            CHECK(msg->findSize("trackIndex", &trackIndex));
+
+            if (mTSParser == NULL) {
+                CHECK_LT(trackIndex, mTracks.size());
+            } else {
+                CHECK_EQ(trackIndex, 0u);
+            }
+
+            sp<ABuffer> accessUnit;
+            CHECK(msg->findBuffer("accessUnit", &accessUnit));
+
+            int32_t damaged;
+            if (accessUnit->meta()->findInt32("damaged", &damaged)
+                    && damaged) {
+                ALOGI("dropping damaged access unit.");
+                break;
+            }
+
+            if (mTSParser != NULL) {
+                size_t offset = 0;
+                status_t err = OK;
+                while (offset + 188 <= accessUnit->size()) {
+                    err = mTSParser->feedTSPacket(
+                            accessUnit->data() + offset, 188);
+                    if (err != OK) {
+                        break;
+                    }
+
+                    offset += 188;
+                }
+
+                if (offset < accessUnit->size()) {
+                    err = ERROR_MALFORMED;
+                }
+
+                if (err != OK) {
+                    signalSourceEOS(err);
+                }
+
+                postSourceEOSIfNecessary();
+                break;
+            }
+
+            TrackInfo *info = &mTracks.editItemAt(trackIndex);
+
+            sp<AnotherPacketSource> source = info->mSource;
+            if (source != NULL) {
+                uint32_t rtpTime;
+                CHECK(accessUnit->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+
+                if (!info->mNPTMappingValid) {
+                    // This is a live stream, we didn't receive any normal
+                    // playtime mapping. We won't map to npt time.
+                    source->queueAccessUnit(accessUnit);
+                    break;
+                }
+
+                int64_t nptUs =
+                    ((double)rtpTime - (double)info->mRTPTime)
+                        / info->mTimeScale
+                        * 1000000ll
+                        + info->mNormalPlaytimeUs;
+
+                accessUnit->meta()->setInt64("timeUs", nptUs);
+
+                source->queueAccessUnit(accessUnit);
+            }
+            postSourceEOSIfNecessary();
+            break;
+        }
+
+        case MyHandler::kWhatEOS:
+        {
+            int32_t finalResult;
+            CHECK(msg->findInt32("finalResult", &finalResult));
+            CHECK_NE(finalResult, (status_t)OK);
+
+            if (mTSParser != NULL) {
+                signalSourceEOS(finalResult);
+            }
+
+            size_t trackIndex;
+            CHECK(msg->findSize("trackIndex", &trackIndex));
+            CHECK_LT(trackIndex, mTracks.size());
+
+            TrackInfo *info = &mTracks.editItemAt(trackIndex);
+            sp<AnotherPacketSource> source = info->mSource;
+            if (source != NULL) {
+                source->signalEOS(finalResult);
+            }
+
+            break;
+        }
+
+        case MyHandler::kWhatSeekDiscontinuity:
+        {
+            size_t trackIndex;
+            CHECK(msg->findSize("trackIndex", &trackIndex));
+            CHECK_LT(trackIndex, mTracks.size());
+
+            TrackInfo *info = &mTracks.editItemAt(trackIndex);
+            sp<AnotherPacketSource> source = info->mSource;
+            if (source != NULL) {
+                source->queueDiscontinuity(
+                        ATSParser::DISCONTINUITY_TIME,
+                        NULL,
+                        true /* discard */);
+            }
+
+            break;
+        }
+
+        case MyHandler::kWhatNormalPlayTimeMapping:
+        {
+            size_t trackIndex;
+            CHECK(msg->findSize("trackIndex", &trackIndex));
+            CHECK_LT(trackIndex, mTracks.size());
+
+            uint32_t rtpTime;
+            CHECK(msg->findInt32("rtpTime", (int32_t *)&rtpTime));
+
+            int64_t nptUs;
+            CHECK(msg->findInt64("nptUs", &nptUs));
+
+            TrackInfo *info = &mTracks.editItemAt(trackIndex);
+            info->mRTPTime = rtpTime;
+            info->mNormalPlaytimeUs = nptUs;
+            info->mNPTMappingValid = true;
+            break;
+        }
+
+        case SDPLoader::kWhatSDPLoaded:
+        {
+            onSDPLoaded(msg);
+            break;
+        }
+
+        default:
+            TRESPASS();
+    }
+}
+
+void NuPlayer2::RTSPSource::onConnected() {
+    CHECK(mAudioTrack == NULL);
+    CHECK(mVideoTrack == NULL);
+
+    size_t numTracks = mHandler->countTracks();
+    for (size_t i = 0; i < numTracks; ++i) {
+        int32_t timeScale;
+        sp<MetaData> format = mHandler->getTrackFormat(i, &timeScale);
+
+        const char *mime;
+        CHECK(format->findCString(kKeyMIMEType, &mime));
+
+        if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_MPEG2TS)) {
+            // Very special case for MPEG2 Transport Streams.
+            CHECK_EQ(numTracks, 1u);
+
+            mTSParser = new ATSParser;
+            return;
+        }
+
+        bool isAudio = !strncasecmp(mime, "audio/", 6);
+        bool isVideo = !strncasecmp(mime, "video/", 6);
+
+        TrackInfo info;
+        info.mTimeScale = timeScale;
+        info.mRTPTime = 0;
+        info.mNormalPlaytimeUs = 0ll;
+        info.mNPTMappingValid = false;
+
+        if ((isAudio && mAudioTrack == NULL)
+                || (isVideo && mVideoTrack == NULL)) {
+            sp<AnotherPacketSource> source = new AnotherPacketSource(format);
+
+            if (isAudio) {
+                mAudioTrack = source;
+            } else {
+                mVideoTrack = source;
+            }
+
+            info.mSource = source;
+        }
+
+        mTracks.push(info);
+    }
+
+    mState = CONNECTED;
+}
+
+void NuPlayer2::RTSPSource::onSDPLoaded(const sp<AMessage> &msg) {
+    status_t err;
+    CHECK(msg->findInt32("result", &err));
+
+    mSDPLoader.clear();
+
+    if (mDisconnectReplyID != 0) {
+        err = UNKNOWN_ERROR;
+    }
+
+    if (err == OK) {
+        sp<ASessionDescription> desc;
+        sp<RefBase> obj;
+        CHECK(msg->findObject("description", &obj));
+        desc = static_cast<ASessionDescription *>(obj.get());
+
+        AString rtspUri;
+        if (!desc->findAttribute(0, "a=control", &rtspUri)) {
+            ALOGE("Unable to find url in SDP");
+            err = UNKNOWN_ERROR;
+        } else {
+            sp<AMessage> notify = new AMessage(kWhatNotify, this);
+
+            mHandler = new MyHandler(rtspUri.c_str(), notify, mUIDValid, mUID);
+            mLooper->registerHandler(mHandler);
+
+            mHandler->loadSDP(desc);
+        }
+    }
+
+    if (err != OK) {
+        if (mState == CONNECTING) {
+            // We're still in the preparation phase, signal that it
+            // failed.
+            notifyPrepared(err);
+        }
+
+        mState = DISCONNECTED;
+        setError(err);
+
+        if (mDisconnectReplyID != 0) {
+            finishDisconnectIfPossible();
+        }
+    }
+}
+
+void NuPlayer2::RTSPSource::onDisconnected(const sp<AMessage> &msg) {
+    if (mState == DISCONNECTED) {
+        return;
+    }
+
+    status_t err;
+    CHECK(msg->findInt32("result", &err));
+    CHECK_NE(err, (status_t)OK);
+
+    mLooper->unregisterHandler(mHandler->id());
+    mHandler.clear();
+
+    if (mState == CONNECTING) {
+        // We're still in the preparation phase, signal that it
+        // failed.
+        notifyPrepared(err);
+    }
+
+    mState = DISCONNECTED;
+    setError(err);
+
+    if (mDisconnectReplyID != 0) {
+        finishDisconnectIfPossible();
+    }
+}
+
+void NuPlayer2::RTSPSource::finishDisconnectIfPossible() {
+    if (mState != DISCONNECTED) {
+        if (mHandler != NULL) {
+            mHandler->disconnect();
+        } else if (mSDPLoader != NULL) {
+            mSDPLoader->cancel();
+        }
+        return;
+    }
+
+    (new AMessage)->postReply(mDisconnectReplyID);
+    mDisconnectReplyID = 0;
+}
+
+void NuPlayer2::RTSPSource::setError(status_t err) {
+    Mutex::Autolock _l(mBufferingLock);
+    mFinalResult = err;
+}
+
+void NuPlayer2::RTSPSource::startBufferingIfNecessary() {
+    Mutex::Autolock _l(mBufferingLock);
+
+    if (!mBuffering) {
+        mBuffering = true;
+
+        sp<AMessage> notify = dupNotify();
+        notify->setInt32("what", kWhatPauseOnBufferingStart);
+        notify->post();
+    }
+}
+
+bool NuPlayer2::RTSPSource::stopBufferingIfNecessary() {
+    Mutex::Autolock _l(mBufferingLock);
+
+    if (mBuffering) {
+        if (!haveSufficientDataOnAllTracks()) {
+            return false;
+        }
+
+        mBuffering = false;
+
+        sp<AMessage> notify = dupNotify();
+        notify->setInt32("what", kWhatResumeOnBufferingEnd);
+        notify->post();
+    }
+
+    return true;
+}
+
+void NuPlayer2::RTSPSource::finishSeek(status_t err) {
+    if (mSeekReplyID == NULL) {
+        return;
+    }
+    sp<AMessage> seekReply = new AMessage;
+    seekReply->setInt32("err", err);
+    seekReply->postReply(mSeekReplyID);
+    mSeekReplyID = NULL;
+}
+
+}  // namespace android
diff --git a/media/libmedia/nuplayer2/RTSPSource.h b/media/libmedia/nuplayer2/RTSPSource.h
new file mode 100644
index 0000000..9bce473
--- /dev/null
+++ b/media/libmedia/nuplayer2/RTSPSource.h
@@ -0,0 +1,169 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef RTSP_SOURCE_H_
+
+#define RTSP_SOURCE_H_
+
+#include "NuPlayer2Source.h"
+
+#include "ATSParser.h"
+
+namespace android {
+
+struct ALooper;
+struct AReplyToken;
+struct AnotherPacketSource;
+struct MyHandler;
+struct SDPLoader;
+
+struct NuPlayer2::RTSPSource : public NuPlayer2::Source {
+    RTSPSource(
+            const sp<AMessage> &notify,
+            const sp<MediaHTTPService> &httpService,
+            const char *url,
+            const KeyedVector<String8, String8> *headers,
+            bool uidValid = false,
+            uid_t uid = 0,
+            bool isSDP = false);
+
+    virtual status_t getBufferingSettings(
+            BufferingSettings* buffering /* nonnull */) override;
+    virtual status_t setBufferingSettings(const BufferingSettings& buffering) override;
+
+    virtual void prepareAsync();
+    virtual void start();
+    virtual void stop();
+
+    virtual status_t feedMoreTSData();
+
+    virtual status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit);
+
+    virtual status_t getDuration(int64_t *durationUs);
+    virtual status_t seekTo(
+            int64_t seekTimeUs,
+            MediaPlayer2SeekMode mode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC) override;
+
+    void onMessageReceived(const sp<AMessage> &msg);
+
+protected:
+    virtual ~RTSPSource();
+
+    virtual sp<MetaData> getFormatMeta(bool audio);
+
+private:
+    enum {
+        kWhatNotify          = 'noti',
+        kWhatDisconnect      = 'disc',
+        kWhatPerformSeek     = 'seek',
+        kWhatPollBuffering   = 'poll',
+        kWhatSignalEOS       = 'eos ',
+    };
+
+    enum State {
+        DISCONNECTED,
+        CONNECTING,
+        CONNECTED,
+        SEEKING,
+    };
+
+    enum Flags {
+        // Don't log any URLs.
+        kFlagIncognito = 1,
+    };
+
+    struct TrackInfo {
+        sp<AnotherPacketSource> mSource;
+
+        int32_t mTimeScale;
+        uint32_t mRTPTime;
+        int64_t mNormalPlaytimeUs;
+        bool mNPTMappingValid;
+    };
+
+    sp<MediaHTTPService> mHTTPService;
+    AString mURL;
+    KeyedVector<String8, String8> mExtraHeaders;
+    bool mUIDValid;
+    uid_t mUID;
+    uint32_t mFlags;
+    bool mIsSDP;
+    State mState;
+    status_t mFinalResult;
+    sp<AReplyToken> mDisconnectReplyID;
+    Mutex mBufferingLock;
+    bool mBuffering;
+    bool mInPreparationPhase;
+    bool mEOSPending;
+
+    Mutex mBufferingSettingsLock;
+    BufferingSettings mBufferingSettings;
+
+    sp<ALooper> mLooper;
+    sp<MyHandler> mHandler;
+    sp<SDPLoader> mSDPLoader;
+
+    Vector<TrackInfo> mTracks;
+    sp<AnotherPacketSource> mAudioTrack;
+    sp<AnotherPacketSource> mVideoTrack;
+
+    sp<ATSParser> mTSParser;
+
+    int32_t mSeekGeneration;
+
+    int64_t mEOSTimeoutAudio;
+    int64_t mEOSTimeoutVideo;
+
+    sp<AReplyToken> mSeekReplyID;
+
+    sp<AnotherPacketSource> getSource(bool audio);
+
+    void onConnected();
+    void onSDPLoaded(const sp<AMessage> &msg);
+    void onDisconnected(const sp<AMessage> &msg);
+    void finishDisconnectIfPossible();
+
+    void performSeek(int64_t seekTimeUs);
+    void schedulePollBuffering();
+    void checkBuffering(
+            bool *prepared,
+            bool *underflow,
+            bool *overflow,
+            bool *startServer,
+            bool *finished);
+    void onPollBuffering();
+
+    bool haveSufficientDataOnAllTracks();
+
+    void setEOSTimeout(bool audio, int64_t timeout);
+    void setError(status_t err);
+    void startBufferingIfNecessary();
+    bool stopBufferingIfNecessary();
+    void finishSeek(status_t err);
+
+    void postSourceEOSIfNecessary();
+    void signalSourceEOS(status_t result);
+    void onSignalEOS(const sp<AMessage> &msg);
+
+    bool sourceNearEOS(bool audio);
+    bool sourceReachedEOS(bool audio);
+
+    DISALLOW_EVIL_CONSTRUCTORS(RTSPSource);
+};
+
+}  // namespace android
+
+#endif  // RTSP_SOURCE_H_
diff --git a/media/libmedia/nuplayer2/StreamingSource.cpp b/media/libmedia/nuplayer2/StreamingSource.cpp
new file mode 100644
index 0000000..ed990c4
--- /dev/null
+++ b/media/libmedia/nuplayer2/StreamingSource.cpp
@@ -0,0 +1,309 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "StreamingSource"
+#include <utils/Log.h>
+
+#include "StreamingSource.h"
+
+#include "ATSParser.h"
+#include "AnotherPacketSource.h"
+#include "NuPlayer2StreamListener.h"
+
+#include <media/MediaSource.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
+
+namespace android {
+
+const int32_t kNumListenerQueuePackets = 80;
+
+NuPlayer2::StreamingSource::StreamingSource(
+        const sp<AMessage> &notify,
+        const sp<IStreamSource> &source)
+    : Source(notify),
+      mSource(source),
+      mFinalResult(OK),
+      mBuffering(false) {
+}
+
+NuPlayer2::StreamingSource::~StreamingSource() {
+    if (mLooper != NULL) {
+        mLooper->unregisterHandler(id());
+        mLooper->stop();
+    }
+}
+
+status_t NuPlayer2::StreamingSource::getBufferingSettings(
+        BufferingSettings *buffering /* nonnull */) {
+    *buffering = BufferingSettings();
+    return OK;
+}
+
+status_t NuPlayer2::StreamingSource::setBufferingSettings(
+        const BufferingSettings & /* buffering */) {
+    return OK;
+}
+
+void NuPlayer2::StreamingSource::prepareAsync() {
+    if (mLooper == NULL) {
+        mLooper = new ALooper;
+        mLooper->setName("streaming");
+        mLooper->start();
+
+        mLooper->registerHandler(this);
+    }
+
+    notifyVideoSizeChanged();
+    notifyFlagsChanged(0);
+    notifyPrepared();
+}
+
+void NuPlayer2::StreamingSource::start() {
+    mStreamListener = new StreamListener(mSource, NULL);
+
+    uint32_t sourceFlags = mSource->flags();
+
+    uint32_t parserFlags = ATSParser::TS_TIMESTAMPS_ARE_ABSOLUTE;
+    if (sourceFlags & IStreamSource::kFlagAlignedVideoData) {
+        parserFlags |= ATSParser::ALIGNED_VIDEO_DATA;
+    }
+
+    mTSParser = new ATSParser(parserFlags);
+
+    mStreamListener->start();
+
+    postReadBuffer();
+}
+
+status_t NuPlayer2::StreamingSource::feedMoreTSData() {
+    return postReadBuffer();
+}
+
+void NuPlayer2::StreamingSource::onReadBuffer() {
+    for (int32_t i = 0; i < kNumListenerQueuePackets; ++i) {
+        char buffer[188];
+        sp<AMessage> extra;
+        ssize_t n = mStreamListener->read(buffer, sizeof(buffer), &extra);
+
+        if (n == 0) {
+            ALOGI("input data EOS reached.");
+            mTSParser->signalEOS(ERROR_END_OF_STREAM);
+            setError(ERROR_END_OF_STREAM);
+            break;
+        } else if (n == INFO_DISCONTINUITY) {
+            int32_t type = ATSParser::DISCONTINUITY_TIME;
+
+            int32_t mask;
+            if (extra != NULL
+                    && extra->findInt32(
+                        IStreamListener::kKeyDiscontinuityMask, &mask)) {
+                if (mask == 0) {
+                    ALOGE("Client specified an illegal discontinuity type.");
+                    setError(ERROR_UNSUPPORTED);
+                    break;
+                }
+
+                type = mask;
+            }
+
+            mTSParser->signalDiscontinuity(
+                    (ATSParser::DiscontinuityType)type, extra);
+        } else if (n < 0) {
+            break;
+        } else {
+            if (buffer[0] == 0x00) {
+                // XXX legacy
+
+                if (extra == NULL) {
+                    extra = new AMessage;
+                }
+
+                uint8_t type = buffer[1];
+
+                if (type & 2) {
+                    int64_t mediaTimeUs;
+                    memcpy(&mediaTimeUs, &buffer[2], sizeof(mediaTimeUs));
+
+                    extra->setInt64(IStreamListener::kKeyMediaTimeUs, mediaTimeUs);
+                }
+
+                mTSParser->signalDiscontinuity(
+                        ((type & 1) == 0)
+                            ? ATSParser::DISCONTINUITY_TIME
+                            : ATSParser::DISCONTINUITY_FORMATCHANGE,
+                        extra);
+            } else {
+                status_t err = mTSParser->feedTSPacket(buffer, sizeof(buffer));
+
+                if (err != OK) {
+                    ALOGE("TS Parser returned error %d", err);
+
+                    mTSParser->signalEOS(err);
+                    setError(err);
+                    break;
+                }
+            }
+        }
+    }
+}
+
+status_t NuPlayer2::StreamingSource::postReadBuffer() {
+    {
+        Mutex::Autolock _l(mBufferingLock);
+        if (mFinalResult != OK) {
+            return mFinalResult;
+        }
+        if (mBuffering) {
+            return OK;
+        }
+        mBuffering = true;
+    }
+
+    (new AMessage(kWhatReadBuffer, this))->post();
+    return OK;
+}
+
+bool NuPlayer2::StreamingSource::haveSufficientDataOnAllTracks() {
+    // We're going to buffer at least 2 secs worth data on all tracks before
+    // starting playback (both at startup and after a seek).
+
+    static const int64_t kMinDurationUs = 2000000ll;
+
+    sp<AnotherPacketSource> audioTrack = getSource(true /*audio*/);
+    sp<AnotherPacketSource> videoTrack = getSource(false /*audio*/);
+
+    status_t err;
+    int64_t durationUs;
+    if (audioTrack != NULL
+            && (durationUs = audioTrack->getBufferedDurationUs(&err))
+                    < kMinDurationUs
+            && err == OK) {
+        ALOGV("audio track doesn't have enough data yet. (%.2f secs buffered)",
+              durationUs / 1E6);
+        return false;
+    }
+
+    if (videoTrack != NULL
+            && (durationUs = videoTrack->getBufferedDurationUs(&err))
+                    < kMinDurationUs
+            && err == OK) {
+        ALOGV("video track doesn't have enough data yet. (%.2f secs buffered)",
+              durationUs / 1E6);
+        return false;
+    }
+
+    return true;
+}
+
+void NuPlayer2::StreamingSource::setError(status_t err) {
+    Mutex::Autolock _l(mBufferingLock);
+    mFinalResult = err;
+}
+
+sp<AnotherPacketSource> NuPlayer2::StreamingSource::getSource(bool audio) {
+    if (mTSParser == NULL) {
+        return NULL;
+    }
+
+    sp<MediaSource> source = mTSParser->getSource(
+            audio ? ATSParser::AUDIO : ATSParser::VIDEO);
+
+    return static_cast<AnotherPacketSource *>(source.get());
+}
+
+sp<AMessage> NuPlayer2::StreamingSource::getFormat(bool audio) {
+    sp<AnotherPacketSource> source = getSource(audio);
+
+    sp<AMessage> format = new AMessage;
+    if (source == NULL) {
+        format->setInt32("err", -EWOULDBLOCK);
+        return format;
+    }
+
+    sp<MetaData> meta = source->getFormat();
+    if (meta == NULL) {
+        format->setInt32("err", -EWOULDBLOCK);
+        return format;
+    }
+    status_t err = convertMetaDataToMessage(meta, &format);
+    if (err != OK) { // format may have been cleared on error
+        return NULL;
+    }
+    return format;
+}
+
+status_t NuPlayer2::StreamingSource::dequeueAccessUnit(
+        bool audio, sp<ABuffer> *accessUnit) {
+    sp<AnotherPacketSource> source = getSource(audio);
+
+    if (source == NULL) {
+        return -EWOULDBLOCK;
+    }
+
+    if (!haveSufficientDataOnAllTracks()) {
+        postReadBuffer();
+    }
+
+    status_t finalResult;
+    if (!source->hasBufferAvailable(&finalResult)) {
+        return finalResult == OK ? -EWOULDBLOCK : finalResult;
+    }
+
+    status_t err = source->dequeueAccessUnit(accessUnit);
+
+#if !defined(LOG_NDEBUG) || LOG_NDEBUG == 0
+    if (err == OK) {
+        int64_t timeUs;
+        CHECK((*accessUnit)->meta()->findInt64("timeUs", &timeUs));
+        ALOGV("dequeueAccessUnit timeUs=%lld us", timeUs);
+    }
+#endif
+
+    return err;
+}
+
+bool NuPlayer2::StreamingSource::isRealTime() const {
+    return mSource->flags() & IStreamSource::kFlagIsRealTimeData;
+}
+
+void NuPlayer2::StreamingSource::onMessageReceived(
+        const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatReadBuffer:
+        {
+            onReadBuffer();
+
+            {
+                Mutex::Autolock _l(mBufferingLock);
+                mBuffering = false;
+            }
+            break;
+        }
+        default:
+        {
+            TRESPASS();
+        }
+    }
+}
+
+
+}  // namespace android
+
diff --git a/media/libmedia/nuplayer2/StreamingSource.h b/media/libmedia/nuplayer2/StreamingSource.h
new file mode 100644
index 0000000..4b89c38
--- /dev/null
+++ b/media/libmedia/nuplayer2/StreamingSource.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef STREAMING_SOURCE_H_
+
+#define STREAMING_SOURCE_H_
+
+#include "NuPlayer2.h"
+#include "NuPlayer2Source.h"
+
+namespace android {
+
+struct ABuffer;
+struct ATSParser;
+struct AnotherPacketSource;
+
+struct NuPlayer2::StreamingSource : public NuPlayer2::Source {
+    StreamingSource(
+            const sp<AMessage> &notify,
+            const sp<IStreamSource> &source);
+
+    virtual status_t getBufferingSettings(
+            BufferingSettings* buffering /* nonnull */) override;
+    virtual status_t setBufferingSettings(const BufferingSettings& buffering) override;
+
+    virtual void prepareAsync();
+    virtual void start();
+
+    virtual status_t feedMoreTSData();
+
+    virtual status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit);
+
+    virtual bool isRealTime() const;
+
+protected:
+    virtual ~StreamingSource();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+    virtual sp<AMessage> getFormat(bool audio);
+
+private:
+    enum {
+        kWhatReadBuffer,
+    };
+    sp<IStreamSource> mSource;
+    status_t mFinalResult;
+    sp<StreamListener> mStreamListener;
+    sp<ATSParser> mTSParser;
+
+    bool mBuffering;
+    Mutex mBufferingLock;
+    sp<ALooper> mLooper;
+
+    void setError(status_t err);
+    sp<AnotherPacketSource> getSource(bool audio);
+    bool haveSufficientDataOnAllTracks();
+    status_t postReadBuffer();
+    void onReadBuffer();
+
+    DISALLOW_EVIL_CONSTRUCTORS(StreamingSource);
+};
+
+}  // namespace android
+
+#endif  // STREAMING_SOURCE_H_
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index a618676..26de4ca 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -174,6 +174,90 @@
     },
 }
 
+cc_library_shared {
+    name: "libstagefright_player2",
+
+    srcs: [
+        "CallbackDataSource.cpp",
+        "CallbackMediaSource.cpp",
+        "DataSourceFactory.cpp",
+        "DataURISource.cpp",
+        "FileSource.cpp",
+        "HTTPBase.cpp",
+        "HevcUtils.cpp",
+        "InterfaceUtils.cpp",
+        "MediaClock.cpp",
+        "MediaExtractorFactory.cpp",
+        "NuCachedSource2.cpp",
+        "RemoteMediaExtractor.cpp",
+        "RemoteMediaSource.cpp",
+        "SurfaceUtils.cpp",
+        "Utils.cpp",
+        "VideoFrameScheduler.cpp",
+        "http/MediaHTTP.cpp",
+    ],
+
+    shared_libs: [
+        "libbinder",
+        "libcutils",
+        "libdrmframework",
+        "libgui",
+        "liblog",
+        "libmedia_omx",
+        "libmedia_player2_util",
+        "libaudioclient",
+        "libmediaextractor",
+        "libmediametrics",
+        "libmediautils",
+        "libnetd_client",
+        "libui",
+        "libutils",
+        "libmedia_helper",
+        "libstagefright_foundation",
+        "libdl",
+    ],
+
+    static_libs: [
+        "libstagefright_esds",
+        "libstagefright_id3",
+        "libstagefright_mpeg2support",
+        "libstagefright_timedtext",
+    ],
+
+    export_shared_lib_headers: [
+        "libmedia_player2_util",
+    ],
+
+    export_include_dirs: [
+        "include",
+    ],
+
+    cflags: [
+        "-Wno-multichar",
+        "-Werror",
+        "-Wno-error=deprecated-declarations",
+        "-Wall",
+    ],
+
+    product_variables: {
+        debuggable: {
+            // enable experiments only in userdebug and eng builds
+            cflags: ["-DENABLE_STAGEFRIGHT_EXPERIMENTS"],
+        },
+    },
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        diag: {
+            cfi: true,
+        },
+    },
+}
+
 subdirs = [
     "codec2",
     "codecs/*",
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/avc_utils.h b/media/libstagefright/foundation/include/media/stagefright/foundation/avc_utils.h
index f4eb692..a939f12 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/avc_utils.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/avc_utils.h
@@ -18,7 +18,6 @@
 
 #define AVC_UTILS_H_
 
-#include <media/MediaCodecBuffer.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <utils/Errors.h>