NuPlayer: set anchor time for each audio buffer.

And use anchor time to compute current position

Bug: 17999949
Bug: 18008307
Bug: 18032127
Change-Id: Ie493c9a1d45d7b788aef65d863f710da6326fcc1
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index e5c83dd..46a2590 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -66,8 +66,8 @@
       mAudioQueueGeneration(0),
       mVideoQueueGeneration(0),
       mAudioFirstAnchorTimeMediaUs(-1),
-      mVideoAnchorTimeMediaUs(-1),
-      mVideoAnchorTimeRealUs(-1),
+      mAnchorTimeMediaUs(-1),
+      mAnchorTimeRealUs(-1),
       mVideoLateByUs(0ll),
       mHasAudio(false),
       mHasVideo(false),
@@ -140,7 +140,7 @@
     // CHECK(mAudioQueue.empty());
     // CHECK(mVideoQueue.empty());
     setAudioFirstAnchorTime(-1);
-    setVideoAnchorTime(-1, -1);
+    setAnchorTime(-1, -1);
     setVideoLateByUs(0);
     mSyncQueues = false;
 }
@@ -177,22 +177,19 @@
         return NO_INIT;
     }
 
-    int64_t positionUs = 0;
-    if (!mHasAudio) {
-        if (mVideoAnchorTimeMediaUs < 0) {
-            return NO_INIT;
-        }
-        positionUs = (nowUs - mVideoAnchorTimeRealUs) + mVideoAnchorTimeMediaUs;
-
-        if (mPauseStartedTimeRealUs != -1) {
-            positionUs -= (nowUs - mPauseStartedTimeRealUs);
-        }
-    } else {
-        if (mAudioFirstAnchorTimeMediaUs < 0) {
-            return NO_INIT;
-        }
-        positionUs = mAudioFirstAnchorTimeMediaUs + getPlayedOutAudioDurationUs(nowUs);
+    if (mAnchorTimeMediaUs < 0) {
+        return NO_INIT;
     }
+    int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs;
+
+    if (mPauseStartedTimeRealUs != -1) {
+        positionUs -= (nowUs - mPauseStartedTimeRealUs);
+    }
+
+    if (positionUs < mAudioFirstAnchorTimeMediaUs) {
+        positionUs = mAudioFirstAnchorTimeMediaUs;
+    }
+
     *mediaUs = (positionUs <= 0) ? 0 : positionUs;
     return OK;
 }
@@ -218,10 +215,13 @@
     }
 }
 
-void NuPlayer::Renderer::setVideoAnchorTime(int64_t mediaUs, int64_t realUs) {
+void NuPlayer::Renderer::setAnchorTime(int64_t mediaUs, int64_t realUs, bool resume) {
     Mutex::Autolock autoLock(mTimeLock);
-    mVideoAnchorTimeMediaUs = mediaUs;
-    mVideoAnchorTimeRealUs = realUs;
+    mAnchorTimeMediaUs = mediaUs;
+    mAnchorTimeRealUs = realUs;
+    if (resume) {
+        mPauseStartedTimeRealUs = -1;
+    }
 }
 
 void NuPlayer::Renderer::setVideoLateByUs(int64_t lateUs) {
@@ -526,7 +526,7 @@
             int64_t mediaTimeUs;
             CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
             ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
-            setAudioFirstAnchorTimeIfNeeded(mediaTimeUs);
+            onNewAudioMediaTime(mediaTimeUs);
         }
 
         size_t copy = entry->mBuffer->size() - entry->mOffset;
@@ -597,8 +597,7 @@
             int64_t mediaTimeUs;
             CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
             ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
-
-            setAudioFirstAnchorTimeIfNeeded(mediaTimeUs);
+            onNewAudioMediaTime(mediaTimeUs);
         }
 
         size_t copy = entry->mBuffer->size() - entry->mOffset;
@@ -659,11 +658,24 @@
 int64_t NuPlayer::Renderer::getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs) {
     int64_t currentPositionUs;
     if (getCurrentPosition(&currentPositionUs, nowUs) != OK) {
-        currentPositionUs = 0;
+        // If failed to get current position, e.g. due to audio clock is not ready, then just
+        // play out video immediately without delay.
+        return nowUs;
     }
     return (mediaTimeUs - currentPositionUs) + nowUs;
 }
 
+void NuPlayer::Renderer::onNewAudioMediaTime(int64_t mediaTimeUs) {
+    // TRICKY: vorbis decoder generates multiple frames with the same
+    // timestamp, so only update on the first frame with a given timestamp
+    if (mediaTimeUs == mAnchorTimeMediaUs) {
+        return;
+    }
+    setAudioFirstAnchorTimeIfNeeded(mediaTimeUs);
+    int64_t nowUs = ALooper::GetNowUs();
+    setAnchorTime(mediaTimeUs, nowUs + getPendingAudioPlayoutDurationUs(nowUs));
+}
+
 void NuPlayer::Renderer::postDrainVideoQueue() {
     if (mDrainVideoQueuePending
             || mSyncQueues
@@ -698,8 +710,8 @@
         int64_t mediaTimeUs;
         CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
 
-        if (mVideoAnchorTimeMediaUs < 0) {
-            setVideoAnchorTime(mediaTimeUs, nowUs);
+        if (mAnchorTimeMediaUs < 0) {
+            setAnchorTime(mediaTimeUs, nowUs);
             realTimeUs = nowUs;
         } else {
             realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
@@ -769,14 +781,14 @@
         } else {
             ALOGV("rendering video at media time %.2f secs",
                     (mFlags & FLAG_REAL_TIME ? realTimeUs :
-                    (realTimeUs + mVideoAnchorTimeMediaUs - mVideoAnchorTimeRealUs)) / 1E6);
+                    (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6);
         }
     } else {
         setVideoLateByUs(0);
-        if (!mVideoSampleReceived) {
+        if (!mVideoSampleReceived && !mHasAudio) {
             // This will ensure that the first frame after a flush won't be used as anchor
             // when renderer is in paused state, because resume can happen any time after seek.
-            setVideoAnchorTime(-1, -1);
+            setAnchorTime(-1, -1);
         }
     }
 
@@ -1108,9 +1120,8 @@
     mPaused = false;
     if (mPauseStartedTimeRealUs != -1) {
         int64_t newAnchorRealUs =
-            mVideoAnchorTimeRealUs + ALooper::GetNowUs() - mPauseStartedTimeRealUs;
-        setVideoAnchorTime(mVideoAnchorTimeMediaUs, newAnchorRealUs);
-        setPauseStartedTimeRealUs(-1);
+            mAnchorTimeRealUs + ALooper::GetNowUs() - mPauseStartedTimeRealUs;
+        setAnchorTime(mAnchorTimeMediaUs, newAnchorRealUs, true /* resume */);
     }
 
     if (!mAudioQueue.empty()) {
@@ -1175,7 +1186,7 @@
 
     // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours.
     //CHECK_EQ(numFramesPlayed & (1 << 31), 0);  // can't be negative until 12.4 hrs, test
-    int64_t durationUs = (int32_t)numFramesPlayed * 1000LL * mAudioSink->msecsPerFrame()
+    int64_t durationUs = (int64_t)((int32_t)numFramesPlayed * 1000LL * mAudioSink->msecsPerFrame())
             + nowUs - numFramesPlayedAt;
     if (durationUs < 0) {
         // Occurs when numFramesPlayed position is very small and the following: