blob: 403029a79031ba15cfec74c596e5dcf140736eb4 [file] [log] [blame]
Andreas Huberf9334412010-12-15 15:17:42 -08001/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "NuPlayer"
19#include <utils/Log.h>
20
21#include "NuPlayer.h"
22#include "NuPlayerDecoder.h"
23#include "NuPlayerRenderer.h"
24#include "NuPlayerStreamListener.h"
25
26#include <media/stagefright/foundation/ABuffer.h>
27#include <media/stagefright/foundation/ADebug.h>
28#include <media/stagefright/foundation/AMessage.h>
29#include <media/stagefright/ACodec.h>
30#include <media/stagefright/MediaErrors.h>
31#include <media/stagefright/MetaData.h>
32#include <surfaceflinger/Surface.h>
33
34namespace android {
35
36////////////////////////////////////////////////////////////////////////////////
37
38NuPlayer::NuPlayer()
39 : mEOS(false),
40 mAudioEOS(false),
41 mVideoEOS(false),
42 mFlushingAudio(NONE),
43 mFlushingVideo(NONE) {
44}
45
46NuPlayer::~NuPlayer() {
47}
48
49void NuPlayer::setListener(const wp<MediaPlayerBase> &listener) {
50 mListener = listener;
51}
52
53void NuPlayer::setDataSource(const sp<IStreamSource> &source) {
54 sp<AMessage> msg = new AMessage(kWhatSetDataSource, id());
55
56 source->incStrong(this);
57 msg->setPointer("source", source.get()); // XXX unsafe.
58
59 msg->post();
60}
61
62void NuPlayer::setVideoSurface(const sp<Surface> &surface) {
63 sp<AMessage> msg = new AMessage(kWhatSetVideoSurface, id());
64 msg->setObject("surface", surface);
65 msg->post();
66}
67
68void NuPlayer::setAudioSink(const sp<MediaPlayerBase::AudioSink> &sink) {
69 sp<AMessage> msg = new AMessage(kWhatSetAudioSink, id());
70 msg->setObject("sink", sink);
71 msg->post();
72}
73
74void NuPlayer::start() {
75 (new AMessage(kWhatStart, id()))->post();
76}
77
78void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
79 switch (msg->what()) {
80 case kWhatSetDataSource:
81 {
82 LOGI("kWhatSetDataSource");
83
84 CHECK(mSource == NULL);
85
86 void *ptr;
87 CHECK(msg->findPointer("source", &ptr));
88
89 mSource = static_cast<IStreamSource *>(ptr);
90 mSource->decStrong(this);
91
92 mStreamListener = new NuPlayerStreamListener(mSource, id());
93 mTSParser = new ATSParser;
94 break;
95 }
96
97 case kWhatSetVideoSurface:
98 {
99 LOGI("kWhatSetVideoSurface");
100
101 sp<RefBase> obj;
102 CHECK(msg->findObject("surface", &obj));
103
104 mSurface = static_cast<Surface *>(obj.get());
105 break;
106 }
107
108 case kWhatSetAudioSink:
109 {
110 LOGI("kWhatSetAudioSink");
111
112 sp<RefBase> obj;
113 CHECK(msg->findObject("sink", &obj));
114
115 mAudioSink = static_cast<MediaPlayerBase::AudioSink *>(obj.get());
116 break;
117 }
118
119 case kWhatStart:
120 {
121 mStreamListener->start();
122
123 mRenderer = new Renderer(
124 mAudioSink,
125 new AMessage(kWhatRendererNotify, id()));
126
127 looper()->registerHandler(mRenderer);
128
129 (new AMessage(kWhatScanSources, id()))->post();
130 break;
131 }
132
133 case kWhatScanSources:
134 {
135 instantiateDecoder(false, &mVideoDecoder);
136
137 if (mAudioSink != NULL) {
138 instantiateDecoder(true, &mAudioDecoder);
139 }
140
141 if (mEOS) {
142 break;
143 }
144
145 feedMoreTSData();
146
147 if (mAudioDecoder == NULL || mVideoDecoder == NULL) {
148 msg->post(100000ll);
149 }
150 break;
151 }
152
153 case kWhatVideoNotify:
154 case kWhatAudioNotify:
155 {
156 bool audio = msg->what() == kWhatAudioNotify;
157
158 sp<AMessage> codecRequest;
159 CHECK(msg->findMessage("codec-request", &codecRequest));
160
161 int32_t what;
162 CHECK(codecRequest->findInt32("what", &what));
163
164 if (what == ACodec::kWhatFillThisBuffer) {
165 status_t err = feedDecoderInputData(
166 audio, codecRequest);
167
168 if (err == -EWOULDBLOCK && !mEOS) {
169 feedMoreTSData();
170 msg->post();
171 }
172 } else if (what == ACodec::kWhatEOS) {
173 mRenderer->queueEOS(audio, ERROR_END_OF_STREAM);
174 } else if (what == ACodec::kWhatFlushCompleted) {
175 if (audio) {
176 CHECK_EQ((int)mFlushingAudio, (int)FLUSHING_DECODER);
177 mFlushingAudio = FLUSHED;
178 } else {
179 CHECK_EQ((int)mFlushingVideo, (int)FLUSHING_DECODER);
180 mFlushingVideo = FLUSHED;
181 }
182
183 LOGI("decoder %s flush completed", audio ? "audio" : "video");
184
185 if (mFlushingAudio == FLUSHED && mFlushingVideo == FLUSHED) {
186 LOGI("both audio and video are flushed now.");
187
188 mRenderer->signalTimeDiscontinuity();
189
190 if (mAudioDecoder != NULL) {
191 mAudioDecoder->signalResume();
192 }
193
194 if (mVideoDecoder != NULL) {
195 mVideoDecoder->signalResume();
196 }
197
198 mFlushingAudio = NONE;
199 mFlushingVideo = NONE;
200 }
201 } else {
202 CHECK_EQ((int)what, (int)ACodec::kWhatDrainThisBuffer);
203
204 renderBuffer(audio, codecRequest);
205 }
206
207 break;
208 }
209
210 case kWhatRendererNotify:
211 {
212 int32_t what;
213 CHECK(msg->findInt32("what", &what));
214
215 if (what == Renderer::kWhatEOS) {
216 int32_t audio;
217 CHECK(msg->findInt32("audio", &audio));
218
219 if (audio) {
220 mAudioEOS = true;
221 } else {
222 mVideoEOS = true;
223 }
224
225 LOGI("reached %s EOS", audio ? "audio" : "video");
226
227 if ((mAudioEOS || mAudioDecoder == NULL)
228 && (mVideoEOS || mVideoDecoder == NULL)) {
229 notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0);
230 }
231 } else {
232 CHECK_EQ(what, (int32_t)Renderer::kWhatFlushComplete);
233
234 int32_t audio;
235 CHECK(msg->findInt32("audio", &audio));
236
237 LOGI("renderer %s flush completed.", audio ? "audio" : "video");
238 }
239 break;
240 }
241
242 case kWhatMoreDataQueued:
243 {
244 break;
245 }
246
247 default:
248 TRESPASS();
249 break;
250 }
251}
252
253void NuPlayer::feedMoreTSData() {
254 CHECK(!mEOS);
255
256 for (int32_t i = 0; i < 10; ++i) {
257 char buffer[188];
258 ssize_t n = mStreamListener->read(buffer, sizeof(buffer));
259
260 if (n == 0) {
261 LOGI("input data EOS reached.");
262 mTSParser->signalEOS(ERROR_END_OF_STREAM);
263 mEOS = true;
264 break;
265 } else if (n == INFO_DISCONTINUITY) {
266 mTSParser->signalDiscontinuity(ATSParser::DISCONTINUITY_SEEK);
267 } else if (n < 0) {
268 CHECK_EQ(n, -EWOULDBLOCK);
269 break;
270 } else {
271 if (buffer[0] == 0x00) {
272 // XXX legacy
273 mTSParser->signalDiscontinuity(ATSParser::DISCONTINUITY_SEEK);
274 } else {
275 mTSParser->feedTSPacket(buffer, sizeof(buffer));
276 }
277 }
278 }
279}
280
281status_t NuPlayer::dequeueNextAccessUnit(
282 ATSParser::SourceType *type, sp<ABuffer> *accessUnit) {
283 accessUnit->clear();
284
285 status_t audioErr = -EWOULDBLOCK;
286 int64_t audioTimeUs;
287
288 sp<AnotherPacketSource> audioSource =
289 static_cast<AnotherPacketSource *>(
290 mTSParser->getSource(ATSParser::MPEG2ADTS_AUDIO).get());
291
292 if (audioSource != NULL) {
293 audioErr = audioSource->nextBufferTime(&audioTimeUs);
294 }
295
296 status_t videoErr = -EWOULDBLOCK;
297 int64_t videoTimeUs;
298
299 sp<AnotherPacketSource> videoSource =
300 static_cast<AnotherPacketSource *>(
301 mTSParser->getSource(ATSParser::AVC_VIDEO).get());
302
303 if (videoSource != NULL) {
304 videoErr = videoSource->nextBufferTime(&videoTimeUs);
305 }
306
307 if (audioErr == -EWOULDBLOCK || videoErr == -EWOULDBLOCK) {
308 return -EWOULDBLOCK;
309 }
310
311 if (audioErr != OK && videoErr != OK) {
312 return audioErr;
313 }
314
315 if (videoErr != OK || (audioErr == OK && audioTimeUs < videoTimeUs)) {
316 *type = ATSParser::MPEG2ADTS_AUDIO;
317 return audioSource->dequeueAccessUnit(accessUnit);
318 } else {
319 *type = ATSParser::AVC_VIDEO;
320 return videoSource->dequeueAccessUnit(accessUnit);
321 }
322}
323
324status_t NuPlayer::dequeueAccessUnit(
325 ATSParser::SourceType type, sp<ABuffer> *accessUnit) {
326 sp<AnotherPacketSource> source =
327 static_cast<AnotherPacketSource *>(mTSParser->getSource(type).get());
328
329 if (source == NULL) {
330 return -EWOULDBLOCK;
331 }
332
333 status_t finalResult;
334 if (!source->hasBufferAvailable(&finalResult)) {
335 return finalResult == OK ? -EWOULDBLOCK : finalResult;
336 }
337
338 return source->dequeueAccessUnit(accessUnit);
339}
340
341status_t NuPlayer::instantiateDecoder(
342 bool audio, sp<Decoder> *decoder) {
343 if (*decoder != NULL) {
344 return OK;
345 }
346
347 ATSParser::SourceType type =
348 audio ? ATSParser::MPEG2ADTS_AUDIO : ATSParser::AVC_VIDEO;
349
350 sp<AnotherPacketSource> source =
351 static_cast<AnotherPacketSource *>(
352 mTSParser->getSource(type).get());
353
354 if (source == NULL) {
355 return -EWOULDBLOCK;
356 }
357
358 sp<AMessage> notify =
359 new AMessage(audio ? kWhatAudioNotify : kWhatVideoNotify,
360 id());
361
362 *decoder = new Decoder(notify, audio ? NULL : mSurface);
363 looper()->registerHandler(*decoder);
364
365 const sp<MetaData> &meta = source->getFormat();
366 (*decoder)->configure(meta);
367
368 if (audio) {
369 int32_t sampleRate;
370 int32_t channelCount;
371 CHECK(meta->findInt32(kKeySampleRate, &sampleRate));
372 CHECK(meta->findInt32(kKeyChannelCount, &channelCount));
373
374 channelCount = 2; // XXX
375
376 CHECK_EQ(mAudioSink->open(sampleRate, channelCount), (status_t)OK);
377 mAudioSink->start();
378 }
379
380 return OK;
381}
382
383status_t NuPlayer::feedDecoderInputData(bool audio, const sp<AMessage> &msg) {
384 sp<AMessage> reply;
385 CHECK(msg->findMessage("reply", &reply));
386
387 if ((audio && mFlushingAudio == FLUSHING_DECODER)
388 || (!audio && mFlushingVideo == FLUSHING_DECODER)) {
389 reply->setInt32("err", INFO_DISCONTINUITY);
390 reply->post();
391 return OK;
392 }
393
394 sp<ABuffer> accessUnit;
395 status_t err = dequeueAccessUnit(
396 audio ? ATSParser::MPEG2ADTS_AUDIO : ATSParser::AVC_VIDEO,
397 &accessUnit);
398
399 if (err == -EWOULDBLOCK) {
400 return err;
401 } else if (err != OK) {
402 if (err == INFO_DISCONTINUITY) {
403 LOGI("%s discontinuity", audio ? "audio" : "video");
404 (audio ? mAudioDecoder : mVideoDecoder)->signalFlush();
405 mRenderer->flush(audio);
406
407 if (audio) {
408 CHECK(mFlushingAudio == NONE
409 || mFlushingAudio == AWAITING_DISCONTINUITY);
410 mFlushingAudio = FLUSHING_DECODER;
411 if (mFlushingVideo == NONE) {
412 mFlushingVideo = (mVideoDecoder != NULL)
413 ? AWAITING_DISCONTINUITY
414 : FLUSHED;
415 }
416 } else {
417 CHECK(mFlushingVideo == NONE
418 || mFlushingVideo == AWAITING_DISCONTINUITY);
419 mFlushingVideo = FLUSHING_DECODER;
420 if (mFlushingAudio == NONE) {
421 mFlushingAudio = (mAudioDecoder != NULL)
422 ? AWAITING_DISCONTINUITY
423 : FLUSHED;
424 }
425 }
426 }
427
428 reply->setInt32("err", err);
429 reply->post();
430 return OK;
431 }
432
433 LOGV("returned a valid buffer of %s data", audio ? "audio" : "video");
434
435#if 0
436 int64_t mediaTimeUs;
437 CHECK(accessUnit->meta()->findInt64("timeUs", &mediaTimeUs));
438 LOGI("feeding %s input buffer at media time %.2f secs",
439 audio ? "audio" : "video",
440 mediaTimeUs / 1E6);
441#endif
442
443 reply->setObject("buffer", accessUnit);
444 reply->post();
445
446 return OK;
447}
448
449void NuPlayer::renderBuffer(bool audio, const sp<AMessage> &msg) {
450 LOGV("renderBuffer %s", audio ? "audio" : "video");
451
452 sp<AMessage> reply;
453 CHECK(msg->findMessage("reply", &reply));
454
455 sp<RefBase> obj;
456 CHECK(msg->findObject("buffer", &obj));
457
458 sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get());
459
460 mRenderer->queueBuffer(audio, buffer, reply);
461}
462
463void NuPlayer::notifyListener(int msg, int ext1, int ext2) {
464 if (mListener == NULL) {
465 return;
466 }
467
468 sp<MediaPlayerBase> listener = mListener.promote();
469
470 if (listener == NULL) {
471 return;
472 }
473
474 listener->sendEvent(msg, ext1, ext2);
475}
476
477} // namespace android