blob: 57a652cfc32fc3ac7166e06df08794ce8ffb1952 [file] [log] [blame]
Andreas Huberf9334412010-12-15 15:17:42 -08001/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "NuPlayerRenderer"
19#include <utils/Log.h>
20
21#include "NuPlayerRenderer.h"
22
23#include <media/stagefright/foundation/ABuffer.h>
24#include <media/stagefright/foundation/ADebug.h>
25
26namespace android {
27
28NuPlayer::Renderer::Renderer(
29 const sp<MediaPlayerBase::AudioSink> &sink,
30 const sp<AMessage> &notify)
31 : mAudioSink(sink),
32 mNotify(notify),
33 mNumFramesWritten(0),
34 mDrainAudioQueuePending(false),
35 mDrainVideoQueuePending(false),
36 mAudioQueueGeneration(0),
37 mVideoQueueGeneration(0),
38 mAnchorTimeMediaUs(-1),
39 mAnchorTimeRealUs(-1),
40 mFlushingAudio(false),
41 mFlushingVideo(false),
Andreas Huber3831a062010-12-21 10:22:33 -080042 mHasAudio(mAudioSink != NULL),
43 mHasVideo(true),
44 mSyncQueues(mHasAudio && mHasVideo) {
Andreas Huberf9334412010-12-15 15:17:42 -080045}
46
47NuPlayer::Renderer::~Renderer() {
48}
49
50void NuPlayer::Renderer::queueBuffer(
51 bool audio,
52 const sp<ABuffer> &buffer,
53 const sp<AMessage> &notifyConsumed) {
54 sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id());
55 msg->setInt32("audio", static_cast<int32_t>(audio));
56 msg->setObject("buffer", buffer);
57 msg->setMessage("notifyConsumed", notifyConsumed);
58 msg->post();
59}
60
61void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) {
62 CHECK_NE(finalResult, (status_t)OK);
63
64 sp<AMessage> msg = new AMessage(kWhatQueueEOS, id());
65 msg->setInt32("audio", static_cast<int32_t>(audio));
66 msg->setInt32("finalResult", finalResult);
67 msg->post();
68}
69
70void NuPlayer::Renderer::flush(bool audio) {
71 {
72 Mutex::Autolock autoLock(mFlushLock);
73 if (audio) {
74 CHECK(!mFlushingAudio);
75 mFlushingAudio = true;
76 } else {
77 CHECK(!mFlushingVideo);
78 mFlushingVideo = true;
79 }
80 }
81
82 sp<AMessage> msg = new AMessage(kWhatFlush, id());
83 msg->setInt32("audio", static_cast<int32_t>(audio));
84 msg->post();
85}
86
87void NuPlayer::Renderer::signalTimeDiscontinuity() {
88 CHECK(mAudioQueue.empty());
89 CHECK(mVideoQueue.empty());
90 mAnchorTimeMediaUs = -1;
91 mAnchorTimeRealUs = -1;
Andreas Huber3831a062010-12-21 10:22:33 -080092 mSyncQueues = mHasAudio && mHasVideo;
Andreas Huberf9334412010-12-15 15:17:42 -080093}
94
95void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
96 switch (msg->what()) {
97 case kWhatDrainAudioQueue:
98 {
99 int32_t generation;
100 CHECK(msg->findInt32("generation", &generation));
101 if (generation != mAudioQueueGeneration) {
102 break;
103 }
104
105 mDrainAudioQueuePending = false;
106
107 onDrainAudioQueue();
108
109 postDrainAudioQueue();
110 break;
111 }
112
113 case kWhatDrainVideoQueue:
114 {
115 int32_t generation;
116 CHECK(msg->findInt32("generation", &generation));
117 if (generation != mVideoQueueGeneration) {
118 break;
119 }
120
121 mDrainVideoQueuePending = false;
122
123 onDrainVideoQueue();
124
125 postDrainVideoQueue();
126 break;
127 }
128
129 case kWhatQueueBuffer:
130 {
131 onQueueBuffer(msg);
132 break;
133 }
134
135 case kWhatQueueEOS:
136 {
137 onQueueEOS(msg);
138 break;
139 }
140
141 case kWhatFlush:
142 {
143 onFlush(msg);
144 break;
145 }
146
Andreas Huber3831a062010-12-21 10:22:33 -0800147 case kWhatAudioSinkChanged:
148 {
149 onAudioSinkChanged();
150 break;
151 }
152
Andreas Huberf9334412010-12-15 15:17:42 -0800153 default:
154 TRESPASS();
155 break;
156 }
157}
158
159void NuPlayer::Renderer::postDrainAudioQueue() {
160 if (mDrainAudioQueuePending || mSyncQueues) {
161 return;
162 }
163
164 if (mAudioQueue.empty()) {
165 return;
166 }
167
168 mDrainAudioQueuePending = true;
169 sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id());
170 msg->setInt32("generation", mAudioQueueGeneration);
171 msg->post(10000);
172}
173
Andreas Huber3831a062010-12-21 10:22:33 -0800174void NuPlayer::Renderer::signalAudioSinkChanged() {
175 (new AMessage(kWhatAudioSinkChanged, id()))->post();
176}
177
Andreas Huberf9334412010-12-15 15:17:42 -0800178void NuPlayer::Renderer::onDrainAudioQueue() {
179 uint32_t numFramesPlayed;
180 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
181
182 ssize_t numFramesAvailableToWrite =
183 mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
184
185 CHECK_GE(numFramesAvailableToWrite, 0);
186
187 size_t numBytesAvailableToWrite =
188 numFramesAvailableToWrite * mAudioSink->frameSize();
189
190 while (numBytesAvailableToWrite > 0) {
191 if (mAudioQueue.empty()) {
192 break;
193 }
194
195 QueueEntry *entry = &*mAudioQueue.begin();
196
197 if (entry->mBuffer == NULL) {
198 // EOS
199
200 notifyEOS(true /* audio */);
201
202 mAudioQueue.erase(mAudioQueue.begin());
203 entry = NULL;
204 return;
205 }
206
207 if (entry->mOffset == 0) {
208 int64_t mediaTimeUs;
209 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
210
211 LOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
212
213 mAnchorTimeMediaUs = mediaTimeUs;
214
215 uint32_t numFramesPlayed;
216 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
217
218 uint32_t numFramesPendingPlayout =
219 mNumFramesWritten - numFramesPlayed;
220
221 int64_t realTimeOffsetUs =
222 (mAudioSink->latency() / 2 /* XXX */
223 + numFramesPendingPlayout
224 * mAudioSink->msecsPerFrame()) * 1000ll;
225
226 // LOGI("realTimeOffsetUs = %lld us", realTimeOffsetUs);
227
228 mAnchorTimeRealUs =
229 ALooper::GetNowUs() + realTimeOffsetUs;
230 }
231
232 size_t copy = entry->mBuffer->size() - entry->mOffset;
233 if (copy > numBytesAvailableToWrite) {
234 copy = numBytesAvailableToWrite;
235 }
236
237 CHECK_EQ(mAudioSink->write(
238 entry->mBuffer->data() + entry->mOffset, copy),
239 (ssize_t)copy);
240
241 entry->mOffset += copy;
242 if (entry->mOffset == entry->mBuffer->size()) {
243 entry->mNotifyConsumed->post();
244 mAudioQueue.erase(mAudioQueue.begin());
245 entry = NULL;
246 }
247
248 numBytesAvailableToWrite -= copy;
249 mNumFramesWritten += copy / mAudioSink->frameSize();
250 }
251}
252
253void NuPlayer::Renderer::postDrainVideoQueue() {
254 if (mDrainVideoQueuePending || mSyncQueues) {
255 return;
256 }
257
258 if (mVideoQueue.empty()) {
259 return;
260 }
261
262 QueueEntry &entry = *mVideoQueue.begin();
263
264 sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id());
265 msg->setInt32("generation", mVideoQueueGeneration);
266
267 int64_t delayUs;
268
269 if (entry.mBuffer == NULL) {
270 // EOS doesn't carry a timestamp.
271 delayUs = 0;
272 } else {
273 int64_t mediaTimeUs;
274 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
275
276 if (mAnchorTimeMediaUs < 0) {
277 delayUs = 0;
278
Andreas Huber3831a062010-12-21 10:22:33 -0800279 if (!mHasAudio) {
Andreas Huberf9334412010-12-15 15:17:42 -0800280 mAnchorTimeMediaUs = mediaTimeUs;
281 mAnchorTimeRealUs = ALooper::GetNowUs();
282 }
283 } else {
284 int64_t realTimeUs =
285 (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs;
286
287 delayUs = realTimeUs - ALooper::GetNowUs();
288 }
289 }
290
291 msg->post(delayUs);
292
293 mDrainVideoQueuePending = true;
294}
295
296void NuPlayer::Renderer::onDrainVideoQueue() {
297 if (mVideoQueue.empty()) {
298 return;
299 }
300
301 QueueEntry *entry = &*mVideoQueue.begin();
302
303 if (entry->mBuffer == NULL) {
304 // EOS
305
306 notifyEOS(false /* audio */);
307
308 mVideoQueue.erase(mVideoQueue.begin());
309 entry = NULL;
310 return;
311 }
312
313#if 0
314 int64_t mediaTimeUs;
315 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
316
317 LOGI("rendering video at media time %.2f secs", mediaTimeUs / 1E6);
318#endif
319
320 entry->mNotifyConsumed->setInt32("render", true);
321 entry->mNotifyConsumed->post();
322 mVideoQueue.erase(mVideoQueue.begin());
323 entry = NULL;
324}
325
326void NuPlayer::Renderer::notifyEOS(bool audio) {
327 sp<AMessage> notify = mNotify->dup();
328 notify->setInt32("what", kWhatEOS);
329 notify->setInt32("audio", static_cast<int32_t>(audio));
330 notify->post();
331}
332
333void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
334 int32_t audio;
335 CHECK(msg->findInt32("audio", &audio));
336
337 if (dropBufferWhileFlushing(audio, msg)) {
338 return;
339 }
340
341 sp<RefBase> obj;
342 CHECK(msg->findObject("buffer", &obj));
343 sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get());
344
345 sp<AMessage> notifyConsumed;
346 CHECK(msg->findMessage("notifyConsumed", &notifyConsumed));
347
348 QueueEntry entry;
349 entry.mBuffer = buffer;
350 entry.mNotifyConsumed = notifyConsumed;
351 entry.mOffset = 0;
352 entry.mFinalResult = OK;
353
354 if (audio) {
355 mAudioQueue.push_back(entry);
356 postDrainAudioQueue();
357 } else {
358 mVideoQueue.push_back(entry);
359 postDrainVideoQueue();
360 }
361
362 if (mSyncQueues && !mAudioQueue.empty() && !mVideoQueue.empty()) {
363 int64_t firstAudioTimeUs;
364 int64_t firstVideoTimeUs;
365 CHECK((*mAudioQueue.begin()).mBuffer->meta()
366 ->findInt64("timeUs", &firstAudioTimeUs));
367 CHECK((*mVideoQueue.begin()).mBuffer->meta()
368 ->findInt64("timeUs", &firstVideoTimeUs));
369
370 int64_t diff = firstVideoTimeUs - firstAudioTimeUs;
371
372 LOGV("queueDiff = %.2f secs", diff / 1E6);
373
374 if (diff > 100000ll) {
375 // Audio data starts More than 0.1 secs before video.
376 // Drop some audio.
377
378 (*mAudioQueue.begin()).mNotifyConsumed->post();
379 mAudioQueue.erase(mAudioQueue.begin());
380 return;
381 }
382
383 syncQueuesDone();
384 }
385}
386
387void NuPlayer::Renderer::syncQueuesDone() {
388 if (!mSyncQueues) {
389 return;
390 }
391
392 mSyncQueues = false;
393
394 if (!mAudioQueue.empty()) {
395 postDrainAudioQueue();
396 }
397
398 if (!mVideoQueue.empty()) {
399 postDrainVideoQueue();
400 }
401}
402
403void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
404 int32_t audio;
405 CHECK(msg->findInt32("audio", &audio));
406
407 if (dropBufferWhileFlushing(audio, msg)) {
408 return;
409 }
410
411 int32_t finalResult;
412 CHECK(msg->findInt32("finalResult", &finalResult));
413
414 QueueEntry entry;
415 entry.mOffset = 0;
416 entry.mFinalResult = finalResult;
417
418 if (audio) {
419 mAudioQueue.push_back(entry);
420 postDrainAudioQueue();
421 } else {
422 mVideoQueue.push_back(entry);
423 postDrainVideoQueue();
424 }
425}
426
427void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
428 int32_t audio;
429 CHECK(msg->findInt32("audio", &audio));
430
431 // If we're currently syncing the queues, i.e. dropping audio while
432 // aligning the first audio/video buffer times and only one of the
433 // two queues has data, we may starve that queue by not requesting
434 // more buffers from the decoder. If the other source then encounters
435 // a discontinuity that leads to flushing, we'll never find the
436 // corresponding discontinuity on the other queue.
437 // Therefore we'll stop syncing the queues if at least one of them
438 // is flushed.
439 syncQueuesDone();
440
441 if (audio) {
442 flushQueue(&mAudioQueue);
443
444 Mutex::Autolock autoLock(mFlushLock);
445 mFlushingAudio = false;
446
447 mDrainAudioQueuePending = false;
448 ++mAudioQueueGeneration;
449 } else {
450 flushQueue(&mVideoQueue);
451
452 Mutex::Autolock autoLock(mFlushLock);
453 mFlushingVideo = false;
454
455 mDrainVideoQueuePending = false;
456 ++mVideoQueueGeneration;
457 }
458
459 notifyFlushComplete(audio);
460}
461
462void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) {
463 while (!queue->empty()) {
464 QueueEntry *entry = &*queue->begin();
465
466 if (entry->mBuffer != NULL) {
467 entry->mNotifyConsumed->post();
468 }
469
470 queue->erase(queue->begin());
471 entry = NULL;
472 }
473}
474
475void NuPlayer::Renderer::notifyFlushComplete(bool audio) {
476 sp<AMessage> notify = mNotify->dup();
477 notify->setInt32("what", kWhatFlushComplete);
478 notify->setInt32("audio", static_cast<int32_t>(audio));
479 notify->post();
480}
481
482bool NuPlayer::Renderer::dropBufferWhileFlushing(
483 bool audio, const sp<AMessage> &msg) {
484 bool flushing = false;
485
486 {
487 Mutex::Autolock autoLock(mFlushLock);
488 if (audio) {
489 flushing = mFlushingAudio;
490 } else {
491 flushing = mFlushingVideo;
492 }
493 }
494
495 if (!flushing) {
496 return false;
497 }
498
499 sp<AMessage> notifyConsumed;
500 if (msg->findMessage("notifyConsumed", &notifyConsumed)) {
501 notifyConsumed->post();
502 }
503
504 return true;
505}
506
Andreas Huber3831a062010-12-21 10:22:33 -0800507void NuPlayer::Renderer::onAudioSinkChanged() {
508 CHECK(!mDrainAudioQueuePending);
509 mNumFramesWritten = 0;
510}
511
Andreas Huberf9334412010-12-15 15:17:42 -0800512} // namespace android
513