Add support of audio offloading for NuPlayer.

Change-Id: Ic83973339fb46a83b48382e6097925f45d200867
diff --git a/media/libmediaplayerservice/nuplayer/Android.mk b/media/libmediaplayerservice/nuplayer/Android.mk
index f97ba57..25002e3 100644
--- a/media/libmediaplayerservice/nuplayer/Android.mk
+++ b/media/libmediaplayerservice/nuplayer/Android.mk
@@ -6,6 +6,7 @@
         HTTPLiveSource.cpp              \
         NuPlayer.cpp                    \
         NuPlayerDecoder.cpp             \
+        NuPlayerDecoderPassThrough.cpp  \
         NuPlayerDriver.cpp              \
         NuPlayerRenderer.cpp            \
         NuPlayerStreamListener.cpp      \
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index b333043..88c59bf 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -22,6 +22,7 @@
 
 #include "HTTPLiveSource.h"
 #include "NuPlayerDecoder.h"
+#include "NuPlayerDecoderPassThrough.h"
 #include "NuPlayerDriver.h"
 #include "NuPlayerRenderer.h"
 #include "NuPlayerSource.h"
@@ -143,6 +144,7 @@
     : mUIDValid(false),
       mSourceFlags(0),
       mVideoIsAVC(false),
+      mOffloadAudio(false),
       mAudioEOS(false),
       mVideoEOS(false),
       mScanSourcesPending(false),
@@ -500,6 +502,7 @@
             ALOGV("kWhatStart");
 
             mVideoIsAVC = false;
+            mOffloadAudio = false;
             mAudioEOS = false;
             mVideoEOS = false;
             mSkipRenderingAudioUntilMediaTimeUs = -1;
@@ -517,6 +520,21 @@
                 flags |= Renderer::FLAG_REAL_TIME;
             }
 
+            sp<MetaData> audioMeta = mSource->getFormatMeta(true /* audio */);
+            audio_stream_type_t streamType = AUDIO_STREAM_MUSIC;
+            if (mAudioSink != NULL) {
+                streamType = mAudioSink->getAudioStreamType();
+            }
+
+            sp<AMessage> videoFormat = mSource->getFormat(false /* audio */);
+
+            mOffloadAudio =
+                canOffloadStream(audioMeta, (videoFormat != NULL),
+                                 true /* is_streaming */, streamType);
+            if (mOffloadAudio) {
+                flags |= Renderer::FLAG_OFFLOAD_AUDIO;
+            }
+
             mRenderer = new Renderer(
                     mAudioSink,
                     new AMessage(kWhatRendererNotify, id()),
@@ -661,7 +679,7 @@
 
                     mAudioSink->close();
 
-                    audio_output_flags_t flags;
+                    uint32_t flags;
                     int64_t durationUs;
                     // FIXME: we should handle the case where the video decoder
                     // is created after we receive the format change indication.
@@ -682,17 +700,92 @@
                         channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER;
                     }
 
-                    CHECK_EQ(mAudioSink->open(
-                                sampleRate,
-                                numChannels,
-                                (audio_channel_mask_t)channelMask,
-                                AUDIO_FORMAT_PCM_16_BIT,
-                                8 /* bufferCount */,
-                                NULL,
-                                NULL,
-                                flags),
-                             (status_t)OK);
-                    mAudioSink->start();
+                    if (mOffloadAudio) {
+                        audio_format_t audioFormat = AUDIO_FORMAT_PCM_16_BIT;
+                        audio_offload_info_t offloadInfo =
+                                AUDIO_INFO_INITIALIZER;
+
+                        AString mime;
+                        CHECK(format->findString("mime", &mime));
+
+                        status_t err =
+                            mapMimeToAudioFormat(audioFormat, mime.c_str());
+                        if (err != OK) {
+                            ALOGE("Couldn't map mime \"%s\" to a valid "
+                                    "audio_format", mime.c_str());
+                            mOffloadAudio = false;
+                        } else {
+                            ALOGV("Mime \"%s\" mapped to audio_format 0x%x",
+                                    mime.c_str(), audioFormat);
+
+                            flags |= AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
+
+                            offloadInfo.duration_us = -1;
+                            format->findInt64(
+                                    "durationUs", &offloadInfo.duration_us);
+
+                            int avgBitRate = -1;
+                            format->findInt32("bit-rate", &avgBitRate);
+
+                            offloadInfo.sample_rate = sampleRate;
+                            offloadInfo.channel_mask = channelMask;
+                            offloadInfo.format = audioFormat;
+                            offloadInfo.stream_type = AUDIO_STREAM_MUSIC;
+                            offloadInfo.bit_rate = avgBitRate;
+                            offloadInfo.has_video = (mVideoDecoder != NULL);
+                            offloadInfo.is_streaming = true;
+
+                            err = mAudioSink->open(
+                                    sampleRate,
+                                    numChannels,
+                                    (audio_channel_mask_t)channelMask,
+                                    audioFormat,
+                                    8 /* bufferCount */,
+                                    &NuPlayer::Renderer::AudioSinkCallback,
+                                    mRenderer.get(),
+                                    (audio_output_flags_t)flags,
+                                    &offloadInfo);
+
+                            if (err == OK) {
+                                // If the playback is offloaded to h/w, we pass
+                                // the HAL some metadata information.
+                                // We don't want to do this for PCM because it
+                                // will be going through the AudioFlinger mixer
+                                // before reaching the hardware.
+                                sp<MetaData> audioMeta =
+                                    mSource->getFormatMeta(true /* audio */);
+                                sendMetaDataToHal(mAudioSink, audioMeta);
+
+                                err = mAudioSink->start();
+                            }
+                        }
+
+                        if (err != OK) {
+                            // Clean up, fall back to non offload mode.
+                            mAudioSink->close();
+                            mAudioDecoder.clear();
+                            mRenderer->signalDisableOffloadAudio();
+                            mOffloadAudio = false;
+
+                            instantiateDecoder(
+                                    true /* audio */, &mAudioDecoder);
+                        }
+                    }
+
+                    if (!mOffloadAudio) {
+                        flags &= ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
+                        CHECK_EQ(mAudioSink->open(
+                                    sampleRate,
+                                    numChannels,
+                                    (audio_channel_mask_t)channelMask,
+                                    AUDIO_FORMAT_PCM_16_BIT,
+                                    8 /* bufferCount */,
+                                    NULL,
+                                    NULL,
+                                    (audio_output_flags_t)flags),
+                                 (status_t)OK);
+                        mAudioSink->start();
+                    }
 
                     mRenderer->signalAudioSinkChanged();
                 } else {
@@ -968,8 +1061,15 @@
         new AMessage(audio ? kWhatAudioNotify : kWhatVideoNotify,
                      id());
 
-    *decoder = audio ? new Decoder(notify) :
-                       new Decoder(notify, mNativeWindow);
+    if (audio) {
+        if (mOffloadAudio) {
+            *decoder = new DecoderPassThrough(notify);
+        } else {
+            *decoder = new Decoder(notify);
+        }
+    } else {
+        *decoder = new Decoder(notify, mNativeWindow);
+    }
     (*decoder)->init();
     (*decoder)->configure(format);
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index 5be71fb..d7c00aa 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -76,6 +76,7 @@
 
 private:
     struct Decoder;
+    struct DecoderPassThrough;
     struct CCDecoder;
     struct GenericSource;
     struct HTTPLiveSource;
@@ -120,6 +121,7 @@
     sp<MediaPlayerBase::AudioSink> mAudioSink;
     sp<Decoder> mVideoDecoder;
     bool mVideoIsAVC;
+    bool mOffloadAudio;
     sp<Decoder> mAudioDecoder;
     sp<CCDecoder> mCCDecoder;
     sp<Renderer> mRenderer;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
index 1a4f4ab..4fa0dbd 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
@@ -31,14 +31,14 @@
     Decoder(const sp<AMessage> &notify,
             const sp<NativeWindowWrapper> &nativeWindow = NULL);
 
-    void configure(const sp<AMessage> &format);
-    void init();
+    virtual void configure(const sp<AMessage> &format);
+    virtual void init();
 
-    void signalFlush();
-    void signalResume();
-    void initiateShutdown();
+    virtual void signalFlush();
+    virtual void signalResume();
+    virtual void initiateShutdown();
 
-    bool supportsSeamlessFormatChange(const sp<AMessage> &to) const;
+    virtual bool supportsSeamlessFormatChange(const sp<AMessage> &to) const;
 
     enum {
         kWhatFillThisBuffer      = 'flTB',
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp
new file mode 100644
index 0000000..eec6960
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp
@@ -0,0 +1,237 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayerDecoderPassThrough"
+#include <utils/Log.h>
+#include <inttypes.h>
+
+#include "NuPlayerDecoderPassThrough.h"
+
+#include <media/ICrypto.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+
+namespace android {
+
+static const int kMaxPendingBuffers = 10;
+
+NuPlayer::DecoderPassThrough::DecoderPassThrough(
+        const sp<AMessage> &notify)
+    : Decoder(notify),
+      mNotify(notify),
+      mBufferGeneration(0),
+      mReachedEOS(true),
+      mPendingBuffers(0),
+      mComponentName("pass through decoder") {
+    mDecoderLooper = new ALooper;
+    mDecoderLooper->setName("NuPlayerDecoderPassThrough");
+    mDecoderLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+}
+
+NuPlayer::DecoderPassThrough::~DecoderPassThrough() {
+}
+
+void NuPlayer::DecoderPassThrough::configure(const sp<AMessage> &format) {
+    sp<AMessage> msg = new AMessage(kWhatConfigure, id());
+    msg->setMessage("format", format);
+    msg->post();
+}
+
+void NuPlayer::DecoderPassThrough::init() {
+    mDecoderLooper->registerHandler(this);
+}
+
+void NuPlayer::DecoderPassThrough::signalFlush() {
+    (new AMessage(kWhatFlush, id()))->post();
+}
+
+void NuPlayer::DecoderPassThrough::signalResume() {
+    (new AMessage(kWhatResume, id()))->post();
+}
+
+void NuPlayer::DecoderPassThrough::initiateShutdown() {
+    (new AMessage(kWhatShutdown, id()))->post();
+}
+
+bool NuPlayer::DecoderPassThrough::supportsSeamlessFormatChange(
+        const sp<AMessage> & /* targetFormat */) const {
+    return true;
+}
+
+void NuPlayer::DecoderPassThrough::onConfigure(const sp<AMessage> &format) {
+    ALOGV("[%s] onConfigure", mComponentName.c_str());
+    mPendingBuffers = 0;
+    mReachedEOS = false;
+    ++mBufferGeneration;
+
+    requestABuffer();
+
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatOutputFormatChanged);
+    notify->setMessage("format", format);
+    notify->post();
+}
+
+bool NuPlayer::DecoderPassThrough::isStaleReply(const sp<AMessage> &msg) {
+    int32_t generation;
+    CHECK(msg->findInt32("generation", &generation));
+    return generation != mBufferGeneration;
+}
+
+void NuPlayer::DecoderPassThrough::requestABuffer() {
+    if (mPendingBuffers >= kMaxPendingBuffers || mReachedEOS) {
+        ALOGV("[%s] mReachedEOS=%d, max pending buffers(%d:%d)",
+                mComponentName.c_str(), (mReachedEOS ? 1 : 0),
+                mPendingBuffers, kMaxPendingBuffers);
+        return;
+    }
+
+    sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, id());
+    reply->setInt32("generation", mBufferGeneration);
+
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatFillThisBuffer);
+    notify->setMessage("reply", reply);
+    notify->post();
+    mPendingBuffers++;
+
+    sp<AMessage> message = new AMessage(kWhatRequestABuffer, id());
+    message->setInt32("generation", mBufferGeneration);
+    message->post();
+    return;
+}
+
+void android::NuPlayer::DecoderPassThrough::onInputBufferFilled(
+        const sp<AMessage> &msg) {
+    if (mReachedEOS) {
+        return;
+    }
+
+    sp<ABuffer> buffer;
+    msg->findBuffer("buffer", &buffer);
+    if (buffer == NULL) {
+        mReachedEOS = true;
+
+        sp<AMessage> notify = mNotify->dup();
+        notify->setInt32("what", kWhatEOS);
+        notify->setInt32("err", ERROR_END_OF_STREAM);
+        notify->post();
+        return;
+    }
+
+    sp<AMessage> reply = new AMessage(kWhatBufferConsumed, id());
+    reply->setInt32("generation", mBufferGeneration);
+
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatDrainThisBuffer);
+    notify->setBuffer("buffer", buffer);
+    notify->setMessage("reply", reply);
+    notify->post();
+}
+
+void NuPlayer::DecoderPassThrough::onBufferConsumed() {
+    mPendingBuffers--;
+    sp<AMessage> message = new AMessage(kWhatRequestABuffer, id());
+    message->setInt32("generation", mBufferGeneration);
+    message->post();
+}
+
+void NuPlayer::DecoderPassThrough::onFlush() {
+    ++mBufferGeneration;
+
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatFlushCompleted);
+    notify->post();
+    mPendingBuffers = 0;
+    mReachedEOS = false;
+}
+
+void NuPlayer::DecoderPassThrough::onShutdown() {
+    ++mBufferGeneration;
+
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatShutdownCompleted);
+    notify->post();
+    mReachedEOS = true;
+}
+
+void NuPlayer::DecoderPassThrough::onMessageReceived(const sp<AMessage> &msg) {
+    ALOGV("[%s] onMessage: %s", mComponentName.c_str(),
+            msg->debugString().c_str());
+
+    switch (msg->what()) {
+        case kWhatConfigure:
+        {
+            sp<AMessage> format;
+            CHECK(msg->findMessage("format", &format));
+            onConfigure(format);
+            break;
+        }
+
+        case kWhatRequestABuffer:
+        {
+            if (!isStaleReply(msg)) {
+                requestABuffer();
+            }
+
+            break;
+        }
+
+        case kWhatInputBufferFilled:
+        {
+            if (!isStaleReply(msg)) {
+                onInputBufferFilled(msg);
+            }
+            break;
+        }
+
+        case kWhatBufferConsumed:
+        {
+            if (!isStaleReply(msg)) {
+                onBufferConsumed();
+            }
+            break;
+        }
+
+        case kWhatFlush:
+        {
+            onFlush();
+            break;
+        }
+
+        case kWhatResume:
+        {
+            requestABuffer();
+            break;
+        }
+
+        case kWhatShutdown:
+        {
+            onShutdown();
+            break;
+        }
+
+        default:
+            TRESPASS();
+            break;
+    }
+}
+
+}  // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h
new file mode 100644
index 0000000..e9e5658
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h
@@ -0,0 +1,77 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NUPLAYER_DECODER_PASS_THROUGH_H_
+
+#define NUPLAYER_DECODER_PASS_THROUGH_H_
+
+#include "NuPlayer.h"
+
+#include "NuPlayerDecoder.h"
+
+namespace android {
+
+struct NuPlayer::DecoderPassThrough : public Decoder {
+    DecoderPassThrough(const sp<AMessage> &notify);
+
+    virtual void configure(const sp<AMessage> &format);
+    virtual void init();
+
+    virtual void signalFlush();
+    virtual void signalResume();
+    virtual void initiateShutdown();
+
+    bool supportsSeamlessFormatChange(const sp<AMessage> &to) const;
+
+protected:
+
+    virtual ~DecoderPassThrough();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+private:
+    enum {
+        kWhatRequestABuffer     = 'reqB',
+        kWhatConfigure          = 'conf',
+        kWhatInputBufferFilled  = 'inpF',
+        kWhatBufferConsumed     = 'bufC',
+        kWhatFlush              = 'flus',
+        kWhatShutdown           = 'shuD',
+    };
+
+    sp<AMessage> mNotify;
+    sp<ALooper> mDecoderLooper;
+
+    void requestABuffer();
+    bool isStaleReply(const sp<AMessage> &msg);
+
+    void onConfigure(const sp<AMessage> &format);
+    void onFlush();
+    void onInputBufferFilled(const sp<AMessage> &msg);
+    void onBufferConsumed();
+    void onShutdown();
+
+    int32_t mBufferGeneration;
+    bool mReachedEOS;
+    int32_t mPendingBuffers;
+    AString mComponentName;
+
+    DISALLOW_EVIL_CONSTRUCTORS(DecoderPassThrough);
+};
+
+}  // namespace android
+
+#endif  // NUPLAYER_DECODER_PASS_THROUGH_H_
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index a070c1a..f520ff7 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -23,6 +23,8 @@
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
 
 namespace android {
 
@@ -41,6 +43,7 @@
       mDrainVideoQueuePending(false),
       mAudioQueueGeneration(0),
       mVideoQueueGeneration(0),
+      mFirstAudioTimeUs(-1),
       mAnchorTimeMediaUs(-1),
       mAnchorTimeRealUs(-1),
       mFlushingAudio(false),
@@ -57,6 +60,11 @@
 }
 
 NuPlayer::Renderer::~Renderer() {
+    if (offloadingAudio()) {
+        mAudioSink->stop();
+        mAudioSink->flush();
+        mAudioSink->close();
+    }
 }
 
 void NuPlayer::Renderer::queueBuffer(
@@ -97,6 +105,7 @@
 }
 
 void NuPlayer::Renderer::signalTimeDiscontinuity() {
+    Mutex::Autolock autoLock(mLock);
     // CHECK(mAudioQueue.empty());
     // CHECK(mVideoQueue.empty());
     mAnchorTimeMediaUs = -1;
@@ -114,6 +123,12 @@
 
 void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
     switch (msg->what()) {
+        case kWhatStopAudioSink:
+        {
+            mAudioSink->stop();
+            break;
+        }
+
         case kWhatDrainAudioQueue:
         {
             int32_t generation;
@@ -140,7 +155,10 @@
 
                 // Let's give it more data after about half that time
                 // has elapsed.
-                postDrainAudioQueue(delayUs / 2);
+                // kWhatDrainAudioQueue is used for non-offloading mode,
+                // and mLock is used only for offloading mode. Therefore,
+                // no need to acquire mLock here.
+                postDrainAudioQueue_l(delayUs / 2);
             }
             break;
         }
@@ -185,6 +203,12 @@
             break;
         }
 
+        case kWhatDisableOffloadAudio:
+        {
+            onDisableOffloadAudio();
+            break;
+        }
+
         case kWhatPause:
         {
             onPause();
@@ -203,8 +227,9 @@
     }
 }
 
-void NuPlayer::Renderer::postDrainAudioQueue(int64_t delayUs) {
-    if (mDrainAudioQueuePending || mSyncQueues || mPaused) {
+void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) {
+    if (mDrainAudioQueuePending || mSyncQueues || mPaused
+            || offloadingAudio()) {
         return;
     }
 
@@ -222,6 +247,10 @@
     (new AMessage(kWhatAudioSinkChanged, id()))->post();
 }
 
+void NuPlayer::Renderer::signalDisableOffloadAudio() {
+    (new AMessage(kWhatDisableOffloadAudio, id()))->post();
+}
+
 void NuPlayer::Renderer::prepareForMediaRenderingStart() {
     mAudioRenderingStartGeneration = mAudioQueueGeneration;
     mVideoRenderingStartGeneration = mVideoQueueGeneration;
@@ -239,6 +268,109 @@
     }
 }
 
+// static
+size_t NuPlayer::Renderer::AudioSinkCallback(
+        MediaPlayerBase::AudioSink * /* audioSink */,
+        void *buffer,
+        size_t size,
+        void *cookie,
+        MediaPlayerBase::AudioSink::cb_event_t event) {
+    NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie;
+
+    switch (event) {
+        case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER:
+        {
+            return me->fillAudioBuffer(buffer, size);
+            break;
+        }
+
+        case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END:
+        {
+            me->notifyEOS(true /* audio */, ERROR_END_OF_STREAM);
+            break;
+        }
+
+        case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN:
+        {
+            // TODO: send this to player.
+            break;
+        }
+    }
+
+    return 0;
+}
+
+size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
+    Mutex::Autolock autoLock(mLock);
+
+    if (!offloadingAudio()) {
+        return 0;
+    }
+
+    bool hasEOS = false;
+
+    size_t sizeCopied = 0;
+    while (sizeCopied < size && !mAudioQueue.empty()) {
+        QueueEntry *entry = &*mAudioQueue.begin();
+
+        if (entry->mBuffer == NULL) { // EOS
+            hasEOS = true;
+            mAudioQueue.erase(mAudioQueue.begin());
+            entry = NULL;
+            break;
+        }
+
+        if (entry->mOffset == 0) {
+            int64_t mediaTimeUs;
+            CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
+            ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
+            if (mFirstAudioTimeUs == -1) {
+                mFirstAudioTimeUs = mediaTimeUs;
+            }
+            mAnchorTimeMediaUs = mediaTimeUs;
+
+            uint32_t numFramesPlayed;
+            CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
+
+            // TODO: figure out how to calculate initial latency.
+            // Otherwise, the initial time is not correct till the first sample
+            // is played.
+            mAnchorTimeMediaUs = mFirstAudioTimeUs
+                    + (numFramesPlayed * mAudioSink->msecsPerFrame()) * 1000ll;
+            mAnchorTimeRealUs = ALooper::GetNowUs();
+        }
+
+        size_t copy = entry->mBuffer->size() - entry->mOffset;
+        size_t sizeRemaining = size - sizeCopied;
+        if (copy > sizeRemaining) {
+            copy = sizeRemaining;
+        }
+
+        memcpy((char *)buffer + sizeCopied,
+               entry->mBuffer->data() + entry->mOffset,
+               copy);
+
+        entry->mOffset += copy;
+        if (entry->mOffset == entry->mBuffer->size()) {
+            entry->mNotifyConsumed->post();
+            mAudioQueue.erase(mAudioQueue.begin());
+            entry = NULL;
+        }
+        sizeCopied += copy;
+        notifyIfMediaRenderingStarted();
+    }
+
+    if (sizeCopied != 0) {
+        notifyPosition();
+    }
+
+    if (hasEOS) {
+        (new AMessage(kWhatStopAudioSink, id()))->post();
+    }
+
+    return sizeCopied;
+}
+
 bool NuPlayer::Renderer::onDrainAudioQueue() {
     uint32_t numFramesPlayed;
     if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
@@ -474,13 +606,15 @@
     entry.mFinalResult = OK;
 
     if (audio) {
+        Mutex::Autolock autoLock(mLock);
         mAudioQueue.push_back(entry);
-        postDrainAudioQueue();
+        postDrainAudioQueue_l();
     } else {
         mVideoQueue.push_back(entry);
         postDrainVideoQueue();
     }
 
+    Mutex::Autolock autoLock(mLock);
     if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
         return;
     }
@@ -490,7 +624,7 @@
 
     if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
         // EOS signalled on either queue.
-        syncQueuesDone();
+        syncQueuesDone_l();
         return;
     }
 
@@ -514,10 +648,10 @@
         return;
     }
 
-    syncQueuesDone();
+    syncQueuesDone_l();
 }
 
-void NuPlayer::Renderer::syncQueuesDone() {
+void NuPlayer::Renderer::syncQueuesDone_l() {
     if (!mSyncQueues) {
         return;
     }
@@ -525,7 +659,7 @@
     mSyncQueues = false;
 
     if (!mAudioQueue.empty()) {
-        postDrainAudioQueue();
+        postDrainAudioQueue_l();
     }
 
     if (!mVideoQueue.empty()) {
@@ -549,14 +683,16 @@
     entry.mFinalResult = finalResult;
 
     if (audio) {
+        Mutex::Autolock autoLock(mLock);
         if (mAudioQueue.empty() && mSyncQueues) {
-            syncQueuesDone();
+            syncQueuesDone_l();
         }
         mAudioQueue.push_back(entry);
-        postDrainAudioQueue();
+        postDrainAudioQueue_l();
     } else {
         if (mVideoQueue.empty() && mSyncQueues) {
-            syncQueuesDone();
+            Mutex::Autolock autoLock(mLock);
+            syncQueuesDone_l();
         }
         mVideoQueue.push_back(entry);
         postDrainVideoQueue();
@@ -575,11 +711,17 @@
     // corresponding discontinuity on the other queue.
     // Therefore we'll stop syncing the queues if at least one of them
     // is flushed.
-    syncQueuesDone();
+    {
+         Mutex::Autolock autoLock(mLock);
+         syncQueuesDone_l();
+    }
 
     ALOGV("flushing %s", audio ? "audio" : "video");
     if (audio) {
-        flushQueue(&mAudioQueue);
+        {
+            Mutex::Autolock autoLock(mLock);
+            flushQueue(&mAudioQueue);
+        }
 
         Mutex::Autolock autoLock(mFlushLock);
         mFlushingAudio = false;
@@ -588,6 +730,12 @@
         ++mAudioQueueGeneration;
 
         prepareForMediaRenderingStart();
+        if (offloadingAudio()) {
+            mFirstAudioTimeUs = -1;
+            mAudioSink->pause();
+            mAudioSink->flush();
+            mAudioSink->start();
+        }
     } else {
         flushQueue(&mVideoQueue);
 
@@ -649,6 +797,9 @@
 }
 
 void NuPlayer::Renderer::onAudioSinkChanged() {
+    if (offloadingAudio()) {
+        return;
+    }
     CHECK(!mDrainAudioQueuePending);
     mNumFramesWritten = 0;
     uint32_t written;
@@ -657,6 +808,11 @@
     }
 }
 
+void NuPlayer::Renderer::onDisableOffloadAudio() {
+    Mutex::Autolock autoLock(mLock);
+    mFlags &= ~FLAG_OFFLOAD_AUDIO;
+}
+
 void NuPlayer::Renderer::notifyPosition() {
     if (mAnchorTimeRealUs < 0 || mAnchorTimeMediaUs < 0) {
         return;
@@ -711,8 +867,9 @@
 
     mPaused = false;
 
+    Mutex::Autolock autoLock(mLock);
     if (!mAudioQueue.empty()) {
-        postDrainAudioQueue();
+        postDrainAudioQueue_l();
     }
 
     if (!mVideoQueue.empty()) {
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
index 94a05ea..6e86a8f 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
@@ -27,11 +27,17 @@
 struct NuPlayer::Renderer : public AHandler {
     enum Flags {
         FLAG_REAL_TIME = 1,
+        FLAG_OFFLOAD_AUDIO = 2,
     };
     Renderer(const sp<MediaPlayerBase::AudioSink> &sink,
              const sp<AMessage> &notify,
              uint32_t flags = 0);
 
+    static size_t AudioSinkCallback(
+            MediaPlayerBase::AudioSink *audioSink,
+            void *data, size_t size, void *me,
+            MediaPlayerBase::AudioSink::cb_event_t event);
+
     void queueBuffer(
             bool audio,
             const sp<ABuffer> &buffer,
@@ -45,6 +51,8 @@
 
     void signalAudioSinkChanged();
 
+    void signalDisableOffloadAudio();
+
     void pause();
     void resume();
 
@@ -63,14 +71,16 @@
 
 private:
     enum {
-        kWhatDrainAudioQueue    = 'draA',
-        kWhatDrainVideoQueue    = 'draV',
-        kWhatQueueBuffer        = 'queB',
-        kWhatQueueEOS           = 'qEOS',
-        kWhatFlush              = 'flus',
-        kWhatAudioSinkChanged   = 'auSC',
-        kWhatPause              = 'paus',
-        kWhatResume             = 'resm',
+        kWhatDrainAudioQueue     = 'draA',
+        kWhatDrainVideoQueue     = 'draV',
+        kWhatQueueBuffer         = 'queB',
+        kWhatQueueEOS            = 'qEOS',
+        kWhatFlush               = 'flus',
+        kWhatAudioSinkChanged    = 'auSC',
+        kWhatPause               = 'paus',
+        kWhatResume              = 'resm',
+        kWhatStopAudioSink       = 'stpA',
+        kWhatDisableOffloadAudio = 'noOA',
     };
 
     struct QueueEntry {
@@ -84,6 +94,7 @@
 
     sp<MediaPlayerBase::AudioSink> mAudioSink;
     sp<AMessage> mNotify;
+    Mutex mLock;
     uint32_t mFlags;
     List<QueueEntry> mAudioQueue;
     List<QueueEntry> mVideoQueue;
@@ -94,6 +105,7 @@
     int32_t mAudioQueueGeneration;
     int32_t mVideoQueueGeneration;
 
+    int64_t mFirstAudioTimeUs;
     int64_t mAnchorTimeMediaUs;
     int64_t mAnchorTimeRealUs;
 
@@ -113,8 +125,10 @@
     int64_t mLastPositionUpdateUs;
     int64_t mVideoLateByUs;
 
+    size_t fillAudioBuffer(void *buffer, size_t size);
+
     bool onDrainAudioQueue();
-    void postDrainAudioQueue(int64_t delayUs = 0);
+    void postDrainAudioQueue_l(int64_t delayUs = 0);
 
     void onDrainVideoQueue();
     void postDrainVideoQueue();
@@ -126,6 +140,7 @@
     void onQueueEOS(const sp<AMessage> &msg);
     void onFlush(const sp<AMessage> &msg);
     void onAudioSinkChanged();
+    void onDisableOffloadAudio();
     void onPause();
     void onResume();
 
@@ -137,7 +152,9 @@
 
     void flushQueue(List<QueueEntry> *queue);
     bool dropBufferWhileFlushing(bool audio, const sp<AMessage> &msg);
-    void syncQueuesDone();
+    void syncQueuesDone_l();
+
+    bool offloadingAudio() const { return (mFlags & FLAG_OFFLOAD_AUDIO) != 0; }
 
     DISALLOW_EVIL_CONSTRUCTORS(Renderer);
 };
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
index f5a1d6d..632c4a6 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
@@ -64,6 +64,7 @@
     virtual status_t feedMoreTSData() = 0;
 
     virtual sp<AMessage> getFormat(bool audio);
+    virtual sp<MetaData> getFormatMeta(bool /* audio */) { return NULL; }
 
     virtual status_t dequeueAccessUnit(
             bool audio, sp<ABuffer> *accessUnit) = 0;
@@ -97,8 +98,6 @@
 
     virtual void onMessageReceived(const sp<AMessage> &msg);
 
-    virtual sp<MetaData> getFormatMeta(bool /* audio */) { return NULL; }
-
     sp<AMessage> dupNotify() const { return mNotify->dup(); }
 
     void notifyFlagsChanged(uint32_t flags);
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index d53051e..5771c6c 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -84,6 +84,11 @@
         msg->setInt64("durationUs", durationUs);
     }
 
+    int avgBitRate;
+    if (meta->findInt32(kKeyBitRate, &avgBitRate)) {
+        msg->setInt32("bit-rate", avgBitRate);
+    }
+
     int32_t isSync;
     if (meta->findInt32(kKeyIsSyncFrame, &isSync) && isSync != 0) {
         msg->setInt32("is-sync-frame", 1);