blob: a23e6ca126c7498b46af60e4935c0f903f4d98b0 [file] [log] [blame]
Dharmaray Kundargi643290d2011-01-16 16:02:42 -08001/*
2 * Copyright (C) 2011 NXP Software
3 * Copyright (C) 2011 The Android Open Source Project
4 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17
18#define LOG_NDEBUG 1
19#define LOG_TAG "PreviewPlayer"
20#include <utils/Log.h>
21
22#include <dlfcn.h>
23
24#include "include/ARTSPController.h"
25#include "PreviewPlayer.h"
26#include "DummyAudioSource.h"
27#include "DummyVideoSource.h"
28#include "VideoEditorSRC.h"
29#include "include/LiveSession.h"
30#include "include/NuCachedSource2.h"
31#include "include/ThrottledSource.h"
32
33
34#include "PreviewRenderer.h"
35
36#include <binder/IPCThreadState.h>
37#include <media/stagefright/DataSource.h>
38#include <media/stagefright/FileSource.h>
39#include <media/stagefright/MediaBuffer.h>
40#include <media/stagefright/MediaDefs.h>
41#include <media/stagefright/MediaExtractor.h>
42#include <media/stagefright/MediaDebug.h>
43#include <media/stagefright/MediaSource.h>
44#include <media/stagefright/MetaData.h>
45#include <media/stagefright/OMXCodec.h>
46
47#include <surfaceflinger/Surface.h>
48#include <media/stagefright/foundation/ALooper.h>
49
50namespace android {
51
52
53struct PreviewPlayerEvent : public TimedEventQueue::Event {
54 PreviewPlayerEvent(
55 PreviewPlayer *player,
56 void (PreviewPlayer::*method)())
57 : mPlayer(player),
58 mMethod(method) {
59 }
60
61protected:
62 virtual ~PreviewPlayerEvent() {}
63
64 virtual void fire(TimedEventQueue *queue, int64_t /* now_us */) {
65 (mPlayer->*mMethod)();
66 }
67
68private:
69 PreviewPlayer *mPlayer;
70 void (PreviewPlayer::*mMethod)();
71
72 PreviewPlayerEvent(const PreviewPlayerEvent &);
73 PreviewPlayerEvent &operator=(const PreviewPlayerEvent &);
74};
75
76
77struct PreviewLocalRenderer : public PreviewPlayerRenderer {
78 PreviewLocalRenderer(
79 bool previewOnly,
80 OMX_COLOR_FORMATTYPE colorFormat,
81 const sp<Surface> &surface,
82 size_t displayWidth, size_t displayHeight,
83 size_t decodedWidth, size_t decodedHeight,
84 int32_t rotationDegrees = 0)
85 : mTarget(NULL) {
86 init(previewOnly,
87 colorFormat, surface,
88 displayWidth, displayHeight,
89 decodedWidth, decodedHeight,
90 rotationDegrees);
91 }
92
93 virtual void render(MediaBuffer *buffer) {
94 render((const uint8_t *)buffer->data() + buffer->range_offset(),
95 buffer->range_length());
96 }
97
98 void render(const void *data, size_t size) {
99 mTarget->render(data, size, NULL);
100 }
101 void render() {
102 mTarget->renderYV12();
103 }
104 void getBuffer(uint8_t **data, size_t *stride) {
105 mTarget->getBufferYV12(data, stride);
106 }
107
108protected:
109 virtual ~PreviewLocalRenderer() {
110 delete mTarget;
111 mTarget = NULL;
112 }
113
114private:
115 PreviewRenderer *mTarget;
116
117 void init(
118 bool previewOnly,
119 OMX_COLOR_FORMATTYPE colorFormat,
120 const sp<Surface> &surface,
121 size_t displayWidth, size_t displayHeight,
122 size_t decodedWidth, size_t decodedHeight,
123 int32_t rotationDegrees = 0);
124
125 PreviewLocalRenderer(const PreviewLocalRenderer &);
126 PreviewLocalRenderer &operator=(const PreviewLocalRenderer &);;
127};
128
129void PreviewLocalRenderer::init(
130 bool previewOnly,
131 OMX_COLOR_FORMATTYPE colorFormat,
132 const sp<Surface> &surface,
133 size_t displayWidth, size_t displayHeight,
134 size_t decodedWidth, size_t decodedHeight,
135 int32_t rotationDegrees) {
136 mTarget = new PreviewRenderer(
137 colorFormat, surface, displayWidth, displayHeight,
138 decodedWidth, decodedHeight, rotationDegrees);
139}
140
141PreviewPlayer::PreviewPlayer()
142 : AwesomePlayer(),
143 mFrameRGBBuffer(NULL),
Dharmaray Kundargi35cb2de2011-01-19 19:09:27 -0800144 mFrameYUVBuffer(NULL),
145 mReportedWidth(0),
Dharmaray Kundargie6c07502011-01-21 16:58:31 -0800146 mReportedHeight(0),
147 mCurrFramingEffectIndex(0) {
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800148
149 mVideoRenderer = NULL;
150 mLastVideoBuffer = NULL;
151 mSuspensionState = NULL;
152 mEffectsSettings = NULL;
153 mAudioMixStoryBoardTS = 0;
154 mCurrentMediaBeginCutTime = 0;
155 mCurrentMediaVolumeValue = 0;
156 mNumberEffects = 0;
157 mDecodedVideoTs = 0;
158 mDecVideoTsStoryBoard = 0;
159 mCurrentVideoEffect = VIDEO_EFFECT_NONE;
160 mProgressCbInterval = 0;
161 mNumberDecVideoFrames = 0;
Dharmaray Kundargie6c07502011-01-21 16:58:31 -0800162 mOverlayUpdateEventPosted = false;
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800163
164 mVideoEvent = new PreviewPlayerEvent(this, &PreviewPlayer::onVideoEvent);
165 mVideoEventPending = false;
166 mStreamDoneEvent = new PreviewPlayerEvent(this,
Dharmaray Kundargie6c07502011-01-21 16:58:31 -0800167 &AwesomePlayer::onStreamDone);
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800168
169 mStreamDoneEventPending = false;
170
171 mCheckAudioStatusEvent = new PreviewPlayerEvent(
Dharmaray Kundargie6c07502011-01-21 16:58:31 -0800172 this, &AwesomePlayer::onCheckAudioStatus);
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800173
174 mAudioStatusEventPending = false;
175
176 mProgressCbEvent = new PreviewPlayerEvent(this,
Dharmaray Kundargie6c07502011-01-21 16:58:31 -0800177 &PreviewPlayer::onProgressCbEvent);
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800178
Dharmaray Kundargie6c07502011-01-21 16:58:31 -0800179 mOverlayUpdateEvent = new PreviewPlayerEvent(this,
180 &PreviewPlayer::onUpdateOverlayEvent);
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800181 mProgressCbEventPending = false;
Dharmaray Kundargie6c07502011-01-21 16:58:31 -0800182
183 mOverlayUpdateEventPending = false;
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800184 mResizedVideoBuffer = NULL;
185 mVideoResizedOrCropped = false;
186 mRenderingMode = (M4xVSS_MediaRendering)MEDIA_RENDERING_INVALID;
187 mIsFiftiesEffectStarted = false;
188 reset();
189}
190
191PreviewPlayer::~PreviewPlayer() {
192
193 if (mQueueStarted) {
194 mQueue.stop();
195 }
196
197 reset();
198
199 if(mResizedVideoBuffer != NULL) {
200 M4OSA_free((M4OSA_MemAddr32)(mResizedVideoBuffer->data()));
201 mResizedVideoBuffer = NULL;
202 }
203
204 mVideoRenderer.clear();
205 mVideoRenderer = NULL;
206}
207
208void PreviewPlayer::cancelPlayerEvents(bool keepBufferingGoing) {
209 mQueue.cancelEvent(mVideoEvent->eventID());
210 mVideoEventPending = false;
211 mQueue.cancelEvent(mStreamDoneEvent->eventID());
212 mStreamDoneEventPending = false;
213 mQueue.cancelEvent(mCheckAudioStatusEvent->eventID());
214 mAudioStatusEventPending = false;
215
216 mQueue.cancelEvent(mProgressCbEvent->eventID());
217 mProgressCbEventPending = false;
218}
219
220status_t PreviewPlayer::setDataSource(
221 const char *uri, const KeyedVector<String8, String8> *headers) {
222 Mutex::Autolock autoLock(mLock);
223 return setDataSource_l(uri, headers);
224}
225
226status_t PreviewPlayer::setDataSource_l(
227 const char *uri, const KeyedVector<String8, String8> *headers) {
228 reset_l();
229
230 mUri = uri;
231
232 if (headers) {
233 mUriHeaders = *headers;
234 }
235
236 // The actual work will be done during preparation in the call to
237 // ::finishSetDataSource_l to avoid blocking the calling thread in
238 // setDataSource for any significant time.
239 return OK;
240}
241
242status_t PreviewPlayer::setDataSource_l(const sp<MediaExtractor> &extractor) {
243 bool haveAudio = false;
244 bool haveVideo = false;
245 for (size_t i = 0; i < extractor->countTracks(); ++i) {
246 sp<MetaData> meta = extractor->getTrackMetaData(i);
247
248 const char *mime;
249 CHECK(meta->findCString(kKeyMIMEType, &mime));
250
251 if (!haveVideo && !strncasecmp(mime, "video/", 6)) {
252 setVideoSource(extractor->getTrack(i));
253 haveVideo = true;
254 } else if (!haveAudio && !strncasecmp(mime, "audio/", 6)) {
255 setAudioSource(extractor->getTrack(i));
256 haveAudio = true;
257
258 if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) {
259 // Only do this for vorbis audio, none of the other audio
260 // formats even support this ringtone specific hack and
261 // retrieving the metadata on some extractors may turn out
262 // to be very expensive.
263 sp<MetaData> fileMeta = extractor->getMetaData();
264 int32_t loop;
265 if (fileMeta != NULL
266 && fileMeta->findInt32(kKeyAutoLoop, &loop)
267 && loop != 0) {
268 mFlags |= AUTO_LOOPING;
269 }
270 }
271 }
272
273 if (haveAudio && haveVideo) {
274 break;
275 }
276 }
277
278 /* Add the support for Dummy audio*/
279 if( !haveAudio ){
280 LOGV("PreviewPlayer: setDataSource_l Dummyaudiocreation started");
281
282 mAudioTrack = DummyAudioSource::Create(32000, 2, 20000,
283 ((mPlayEndTimeMsec)*1000));
284 LOGV("PreviewPlayer: setDataSource_l Dummyauiosource created");
285 if(mAudioTrack != NULL) {
286 haveAudio = true;
287 }
288 }
289
290 if (!haveAudio && !haveVideo) {
291 return UNKNOWN_ERROR;
292 }
293
294 mExtractorFlags = extractor->flags();
295 return OK;
296}
297
298status_t PreviewPlayer::setDataSource_l_jpg() {
299 M4OSA_ERR err = M4NO_ERROR;
300 LOGV("PreviewPlayer: setDataSource_l_jpg started");
301
302 mAudioSource = DummyAudioSource::Create(32000, 2, 20000,
303 ((mPlayEndTimeMsec)*1000));
304 LOGV("PreviewPlayer: setDataSource_l_jpg Dummyaudiosource created");
305 if(mAudioSource != NULL) {
306 setAudioSource(mAudioSource);
307 }
308 status_t error = mAudioSource->start();
309 if (error != OK) {
310 LOGV("Error starting dummy audio source");
311 mAudioSource.clear();
312 return err;
313 }
314
315 mDurationUs = (mPlayEndTimeMsec - mPlayBeginTimeMsec)*1000;
316
317 mVideoSource = DummyVideoSource::Create(mVideoWidth, mVideoHeight,
318 mDurationUs, mUri);
Dharmaray Kundargi35cb2de2011-01-19 19:09:27 -0800319 mReportedWidth = mVideoWidth;
320 mReportedHeight = mVideoHeight;
321
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800322 setVideoSource(mVideoSource);
323 status_t err1 = mVideoSource->start();
324 if (err1 != OK) {
325 mVideoSource.clear();
326 return err;
327 }
328
329 mIsVideoSourceJpg = true;
330 return OK;
331}
332
333void PreviewPlayer::reset() {
334 Mutex::Autolock autoLock(mLock);
335 reset_l();
336}
337
338void PreviewPlayer::reset_l() {
339
340 if (mFlags & PREPARING) {
341 mFlags |= PREPARE_CANCELLED;
342 }
343
344 while (mFlags & PREPARING) {
345 mPreparedCondition.wait(mLock);
346 }
347
348 cancelPlayerEvents();
349 mAudioTrack.clear();
350 mVideoTrack.clear();
351
352 // Shutdown audio first, so that the respone to the reset request
353 // appears to happen instantaneously as far as the user is concerned
354 // If we did this later, audio would continue playing while we
355 // shutdown the video-related resources and the player appear to
356 // not be as responsive to a reset request.
357 if (mAudioPlayer == NULL && mAudioSource != NULL) {
358 // If we had an audio player, it would have effectively
359 // taken possession of the audio source and stopped it when
360 // _it_ is stopped. Otherwise this is still our responsibility.
361 mAudioSource->stop();
362 }
363 mAudioSource.clear();
364
365 mTimeSource = NULL;
366
367 delete mAudioPlayer;
368 mAudioPlayer = NULL;
369
370 if (mLastVideoBuffer) {
371 mLastVideoBuffer->release();
372 mLastVideoBuffer = NULL;
373 }
374
375 if (mVideoBuffer) {
376 mVideoBuffer->release();
377 mVideoBuffer = NULL;
378 }
379
380 if (mVideoSource != NULL) {
381 mVideoSource->stop();
382
383 // The following hack is necessary to ensure that the OMX
384 // component is completely released by the time we may try
385 // to instantiate it again.
386 wp<MediaSource> tmp = mVideoSource;
387 mVideoSource.clear();
388 while (tmp.promote() != NULL) {
389 usleep(1000);
390 }
391 IPCThreadState::self()->flushCommands();
392 }
393
394 mDurationUs = -1;
395 mFlags = 0;
396 mExtractorFlags = 0;
397 mVideoWidth = mVideoHeight = -1;
398 mTimeSourceDeltaUs = 0;
399 mVideoTimeUs = 0;
400
401 mSeeking = false;
402 mSeekNotificationSent = false;
403 mSeekTimeUs = 0;
404
405 mUri.setTo("");
406 mUriHeaders.clear();
407
408 mFileSource.clear();
409
410 delete mSuspensionState;
411 mSuspensionState = NULL;
412
413 mCurrentVideoEffect = VIDEO_EFFECT_NONE;
414 mIsVideoSourceJpg = false;
415 mFrameRGBBuffer = NULL;
416 if(mFrameYUVBuffer != NULL) {
417 M4OSA_free((M4OSA_MemAddr32)mFrameYUVBuffer);
418 mFrameYUVBuffer = NULL;
419 }
420}
421
422void PreviewPlayer::partial_reset_l() {
423
424 if (mLastVideoBuffer) {
425 mLastVideoBuffer->release();
426 mLastVideoBuffer = NULL;
427 }
428
429 /* call base struct */
430 AwesomePlayer::partial_reset_l();
431
432}
433
434status_t PreviewPlayer::play() {
435 Mutex::Autolock autoLock(mLock);
436
437 mFlags &= ~CACHE_UNDERRUN;
438
439 return play_l();
440}
441
442status_t PreviewPlayer::play_l() {
443VideoEditorAudioPlayer *mVePlayer;
444 if (mFlags & PLAYING) {
445 return OK;
446 }
447 mStartNextPlayer = false;
448
449 if (!(mFlags & PREPARED)) {
450 status_t err = prepare_l();
451
452 if (err != OK) {
453 return err;
454 }
455 }
456
457 mFlags |= PLAYING;
458 mFlags |= FIRST_FRAME;
459
460 bool deferredAudioSeek = false;
461
462 if (mAudioSource != NULL) {
463 if (mAudioPlayer == NULL) {
464 if (mAudioSink != NULL) {
465
466 mAudioPlayer = new VideoEditorAudioPlayer(mAudioSink, this);
467 mVePlayer =
468 (VideoEditorAudioPlayer*)mAudioPlayer;
469
470 mAudioPlayer->setSource(mAudioSource);
471
472 mVePlayer->setAudioMixSettings(
473 mPreviewPlayerAudioMixSettings);
474
475 mVePlayer->setAudioMixPCMFileHandle(
476 mAudioMixPCMFileHandle);
477
478 mVePlayer->setAudioMixStoryBoardSkimTimeStamp(
479 mAudioMixStoryBoardTS, mCurrentMediaBeginCutTime,
480 mCurrentMediaVolumeValue);
481
482 // We've already started the MediaSource in order to enable
483 // the prefetcher to read its data.
484 status_t err = mVePlayer->start(
485 true /* sourceAlreadyStarted */);
486
487 if (err != OK) {
488 delete mAudioPlayer;
489 mAudioPlayer = NULL;
490
491 mFlags &= ~(PLAYING | FIRST_FRAME);
492 return err;
493 }
494
495 mTimeSource = mVePlayer; //mAudioPlayer;
496
497 deferredAudioSeek = true;
498 mWatchForAudioSeekComplete = false;
499 mWatchForAudioEOS = true;
500 }
501 } else {
502 mVePlayer->resume();
503 }
504
505 }
506
507 if (mTimeSource == NULL && mAudioPlayer == NULL) {
508 mTimeSource = &mSystemTimeSource;
509 }
510
511 if (mVideoSource != NULL) {
512 // Kick off video playback
513 postVideoEvent_l();
514 }
515
516 if (deferredAudioSeek) {
517 // If there was a seek request while we were paused
518 // and we're just starting up again, honor the request now.
519 seekAudioIfNecessary_l();
520 }
521
522 if (mFlags & AT_EOS) {
523 // Legacy behaviour, if a stream finishes playing and then
524 // is started again, we play from the start...
525 seekTo_l(0);
526 }
527
528 return OK;
529}
530
531
532void PreviewPlayer::initRenderer_l() {
533 if (mSurface != NULL || mISurface != NULL) {
534 sp<MetaData> meta = mVideoSource->getFormat();
535
536 int32_t format;
537 const char *component;
538 int32_t decodedWidth, decodedHeight;
539 CHECK(meta->findInt32(kKeyColorFormat, &format));
540 CHECK(meta->findCString(kKeyDecoderComponent, &component));
541 CHECK(meta->findInt32(kKeyWidth, &decodedWidth));
542 CHECK(meta->findInt32(kKeyHeight, &decodedHeight));
543
544 // Must ensure that mVideoRenderer's destructor is actually executed
545 // before creating a new one.
546 IPCThreadState::self()->flushCommands();
547
548 // always use localrenderer since decoded buffers are modified
549 // by postprocessing module
550 // Other decoders are instantiated locally and as a consequence
551 // allocate their buffers in local address space.
552 if(mVideoRenderer == NULL) {
553
554 mVideoRenderer = new PreviewLocalRenderer(
555 false, // previewOnly
556 (OMX_COLOR_FORMATTYPE)format,
557 mSurface,
558 mOutputVideoWidth, mOutputVideoHeight,
559 mOutputVideoWidth, mOutputVideoHeight);
560 }
561 }
562}
563
564
565void PreviewPlayer::setISurface(const sp<ISurface> &isurface) {
566 Mutex::Autolock autoLock(mLock);
567 mISurface = isurface;
568}
569
570
571status_t PreviewPlayer::seekTo(int64_t timeUs) {
572
573 if ((mExtractorFlags & MediaExtractor::CAN_SEEK) || (mIsVideoSourceJpg)) {
574 Mutex::Autolock autoLock(mLock);
575 return seekTo_l(timeUs);
576 }
577
578 return OK;
579}
580
581
582status_t PreviewPlayer::getVideoDimensions(
583 int32_t *width, int32_t *height) const {
584 Mutex::Autolock autoLock(mLock);
585
586 if (mVideoWidth < 0 || mVideoHeight < 0) {
587 return UNKNOWN_ERROR;
588 }
589
590 *width = mVideoWidth;
591 *height = mVideoHeight;
592
593 return OK;
594}
595
596
597status_t PreviewPlayer::initAudioDecoder() {
598 sp<MetaData> meta = mAudioTrack->getFormat();
599 const char *mime;
600 CHECK(meta->findCString(kKeyMIMEType, &mime));
601
602 if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) {
603 mAudioSource = mAudioTrack;
604 } else {
605 sp<MediaSource> aRawSource;
606 aRawSource = OMXCodec::Create(
607 mClient.interface(), mAudioTrack->getFormat(),
608 false, // createEncoder
609 mAudioTrack);
610
611 if(aRawSource != NULL) {
612 LOGV("initAudioDecoder: new VideoEditorSRC");
613 mAudioSource = new VideoEditorSRC(aRawSource);
614 }
615 }
616
617 if (mAudioSource != NULL) {
618 int64_t durationUs;
619 if (mAudioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
620 Mutex::Autolock autoLock(mMiscStateLock);
621 if (mDurationUs < 0 || durationUs > mDurationUs) {
622 mDurationUs = durationUs;
623 }
624 }
625 status_t err = mAudioSource->start();
626
627 if (err != OK) {
628 mAudioSource.clear();
629 return err;
630 }
631 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_QCELP)) {
632 // For legacy reasons we're simply going to ignore the absence
633 // of an audio decoder for QCELP instead of aborting playback
634 // altogether.
635 return OK;
636 }
637
638 return mAudioSource != NULL ? OK : UNKNOWN_ERROR;
639}
640
641
642status_t PreviewPlayer::initVideoDecoder(uint32_t flags) {
643
644 mVideoSource = OMXCodec::Create(
645 mClient.interface(), mVideoTrack->getFormat(),
646 false,
647 mVideoTrack,
648 NULL, flags);
649
650 if (mVideoSource != NULL) {
651 int64_t durationUs;
652 if (mVideoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
653 Mutex::Autolock autoLock(mMiscStateLock);
654 if (mDurationUs < 0 || durationUs > mDurationUs) {
655 mDurationUs = durationUs;
656 }
657 }
658
659 CHECK(mVideoTrack->getFormat()->findInt32(kKeyWidth, &mVideoWidth));
660 CHECK(mVideoTrack->getFormat()->findInt32(kKeyHeight, &mVideoHeight));
661
Dharmaray Kundargi35cb2de2011-01-19 19:09:27 -0800662 mReportedWidth = mVideoWidth;
663 mReportedHeight = mVideoHeight;
664
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800665 status_t err = mVideoSource->start();
666
667 if (err != OK) {
668 mVideoSource.clear();
669 return err;
670 }
671 }
672
673 return mVideoSource != NULL ? OK : UNKNOWN_ERROR;
674}
675
676
677void PreviewPlayer::onVideoEvent() {
678 uint32_t i=0;
679 bool bAppliedVideoEffect = false;
680 M4OSA_ERR err1 = M4NO_ERROR;
681 int64_t imageFrameTimeUs = 0;
682
683 Mutex::Autolock autoLock(mLock);
684 if (!mVideoEventPending) {
685 // The event has been cancelled in reset_l() but had already
686 // been scheduled for execution at that time.
687 return;
688 }
689 mVideoEventPending = false;
690
691 TimeSource *ts_st = &mSystemTimeSource;
692 int64_t timeStartUs = ts_st->getRealTimeUs();
693
694 if (mSeeking) {
695 if (mLastVideoBuffer) {
696 mLastVideoBuffer->release();
697 mLastVideoBuffer = NULL;
698 }
699
700
701 if(mAudioSource != NULL) {
702
703 // We're going to seek the video source first, followed by
704 // the audio source.
705 // In order to avoid jumps in the DataSource offset caused by
706 // the audio codec prefetching data from the old locations
707 // while the video codec is already reading data from the new
708 // locations, we'll "pause" the audio source, causing it to
709 // stop reading input data until a subsequent seek.
710
711 if (mAudioPlayer != NULL) {
712 mAudioPlayer->pause();
713 }
714 mAudioSource->pause();
715 }
716 }
717
718 if (!mVideoBuffer) {
719 MediaSource::ReadOptions options;
720 if (mSeeking) {
721 LOGV("LV PLAYER seeking to %lld us (%.2f secs)", mSeekTimeUs,
722 mSeekTimeUs / 1E6);
723
724 options.setSeekTo(
725 mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST);
726 }
727 for (;;) {
728 status_t err = mVideoSource->read(&mVideoBuffer, &options);
729 options.clearSeekTo();
730
731 if (err != OK) {
732 CHECK_EQ(mVideoBuffer, NULL);
733
734 if (err == INFO_FORMAT_CHANGED) {
735 LOGV("LV PLAYER VideoSource signalled format change");
736 notifyVideoSize_l();
Dharmaray Kundargi35cb2de2011-01-19 19:09:27 -0800737 sp<MetaData> meta = mVideoSource->getFormat();
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800738
Dharmaray Kundargi35cb2de2011-01-19 19:09:27 -0800739 CHECK(meta->findInt32(kKeyWidth, &mReportedWidth));
740 CHECK(meta->findInt32(kKeyHeight, &mReportedHeight));
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800741 if (mVideoRenderer != NULL) {
742 mVideoRendererIsPreview = false;
743 initRenderer_l();
744 }
745 continue;
746 }
747 // So video playback is complete, but we may still have
Santosh Madhava342f9322011-01-27 16:27:12 -0800748 // a seek request pending that needs to be applied to the audio track
749 if (mSeeking) {
750 LOGV("video stream ended while seeking!");
751 }
752 finishSeekIfNecessary(-1);
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800753 LOGV("PreviewPlayer: onVideoEvent EOS reached.");
754 mFlags |= VIDEO_AT_EOS;
Santosh Madhava342f9322011-01-27 16:27:12 -0800755 if (mOverlayUpdateEventPosted) {
756 mOverlayUpdateEventPosted = false;
757 postOverlayUpdateEvent_l();
758 }
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800759 postStreamDoneEvent_l(err);
760 return;
761 }
762
763 if (mVideoBuffer->range_length() == 0) {
764 // Some decoders, notably the PV AVC software decoder
765 // return spurious empty buffers that we just want to ignore.
766
767 mVideoBuffer->release();
768 mVideoBuffer = NULL;
769 continue;
770 }
771
772 int64_t videoTimeUs;
773 CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &videoTimeUs));
774
775 if((videoTimeUs/1000) < mPlayBeginTimeMsec) {
776 // Frames are before begin cut time
777 // Donot render
778 mVideoBuffer->release();
779 mVideoBuffer = NULL;
780 continue;
781 }
782
783 break;
784 }
785 }
786
787 mNumberDecVideoFrames++;
788
789 int64_t timeUs;
790 CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
791
792 {
793 Mutex::Autolock autoLock(mMiscStateLock);
794 mVideoTimeUs = timeUs;
795 }
796
797 mDecodedVideoTs = timeUs;
798
799 if(!mStartNextPlayer) {
800 int64_t playbackTimeRemaining = (mPlayEndTimeMsec*1000) - timeUs;
801 if(playbackTimeRemaining <= 1500000) {
802 //When less than 1.5 sec of playback left
803 // send notification to start next player
804
805 mStartNextPlayer = true;
806 notifyListener_l(0xAAAAAAAA);
807 }
808 }
809
810 bool wasSeeking = mSeeking;
811 finishSeekIfNecessary(timeUs);
812
813 TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource;
814
815 if(ts == NULL) {
816 mVideoBuffer->release();
817 mVideoBuffer = NULL;
818 return;
819 }
820
821 if(!mIsVideoSourceJpg) {
822 if (mFlags & FIRST_FRAME) {
823 mFlags &= ~FIRST_FRAME;
824
825 mTimeSourceDeltaUs = ts->getRealTimeUs() - timeUs;
826 }
827
828 int64_t realTimeUs, mediaTimeUs;
829 if (!(mFlags & AUDIO_AT_EOS) && mAudioPlayer != NULL
830 && mAudioPlayer->getMediaTimeMapping(&realTimeUs, &mediaTimeUs)) {
831 mTimeSourceDeltaUs = realTimeUs - mediaTimeUs;
832 }
833
834 int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs;
835
836 int64_t latenessUs = nowUs - timeUs;
837
838 if (wasSeeking) {
Santosh Madhava342f9322011-01-27 16:27:12 -0800839 // Let's display the first frame after seeking right away.
840 latenessUs = 0;
841 }
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800842 LOGV("Audio time stamp = %lld and video time stamp = %lld",
843 ts->getRealTimeUs(),timeUs);
844 if (latenessUs > 40000) {
845 // We're more than 40ms late.
846
847 LOGV("LV PLAYER we're late by %lld us (%.2f secs)",
848 latenessUs, latenessUs / 1E6);
849
850 mVideoBuffer->release();
851 mVideoBuffer = NULL;
852
853 postVideoEvent_l();
854 return;
855 }
856
857 if (latenessUs < -10000) {
858 // We're more than 10ms early.
859 LOGV("We're more than 10ms early, lateness %lld", latenessUs);
860
861 postVideoEvent_l(10000);
862 return;
863 }
864 }
865
866 if (mVideoRendererIsPreview || mVideoRenderer == NULL) {
867 mVideoRendererIsPreview = false;
868
869 initRenderer_l();
870 }
871
872 // If timestamp exceeds endCutTime of clip, donot render
873 if((timeUs/1000) > mPlayEndTimeMsec) {
874 if (mLastVideoBuffer) {
875 mLastVideoBuffer->release();
876 mLastVideoBuffer = NULL;
877 }
878 mLastVideoBuffer = mVideoBuffer;
879 mVideoBuffer = NULL;
880 mFlags |= VIDEO_AT_EOS;
881 mFlags |= AUDIO_AT_EOS;
882 LOGI("PreviewPlayer: onVideoEvent timeUs > mPlayEndTime; send EOS..");
Dharmaray Kundargie6c07502011-01-21 16:58:31 -0800883 if (mOverlayUpdateEventPosted) {
884 mOverlayUpdateEventPosted = false;
885 postOverlayUpdateEvent_l();
886 }
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800887 postStreamDoneEvent_l(ERROR_END_OF_STREAM);
888 return;
889 }
890
891 // Post processing to apply video effects
892 for(i=0;i<mNumberEffects;i++) {
893 // First check if effect starttime matches the clip being previewed
894 if((mEffectsSettings[i].uiStartTime < (mDecVideoTsStoryBoard/1000)) ||
895 (mEffectsSettings[i].uiStartTime >=
896 ((mDecVideoTsStoryBoard/1000) + mPlayEndTimeMsec - mPlayBeginTimeMsec)))
897 {
898 // This effect doesn't belong to this clip, check next one
899 continue;
900 }
901 // Check if effect applies to this particular frame timestamp
902 if((mEffectsSettings[i].uiStartTime <=
903 (((timeUs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec)) &&
904 ((mEffectsSettings[i].uiStartTime+mEffectsSettings[i].uiDuration) >=
905 (((timeUs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec))
906 && (mEffectsSettings[i].uiDuration != 0)) {
907
908 setVideoPostProcessingNode(
909 mEffectsSettings[i].VideoEffectType, TRUE);
910 }
911 else {
912 setVideoPostProcessingNode(
913 mEffectsSettings[i].VideoEffectType, FALSE);
914 }
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800915 }
916
Dharmaray Kundargie6c07502011-01-21 16:58:31 -0800917 //Provide the overlay Update indication when there is an overlay effect
Dharmaray Kundargid01ef562011-01-26 21:11:00 -0800918 if (mCurrentVideoEffect & VIDEO_EFFECT_FRAMING) {
919 mCurrentVideoEffect &= ~VIDEO_EFFECT_FRAMING; //never apply framing here.
Dharmaray Kundargie6c07502011-01-21 16:58:31 -0800920 if (!mOverlayUpdateEventPosted) {
921
922 // Find the effect in effectSettings array
923 int index;
924 for (index = 0; index < mNumberEffects; index++) {
925 M4OSA_UInt32 timeMs = mDecodedVideoTs/1000;
926 M4OSA_UInt32 timeOffset = mDecVideoTsStoryBoard/1000;
927 if(mEffectsSettings[index].VideoEffectType ==
928 M4xVSS_kVideoEffectType_Framing) {
929 if (((mEffectsSettings[index].uiStartTime + 1) <= timeMs + timeOffset) &&
930 ((mEffectsSettings[index].uiStartTime - 1 +
931 mEffectsSettings[index].uiDuration) >= timeMs + timeOffset))
932 {
933 break;
934 }
935 }
936 }
937 if (index < mNumberEffects) {
938 mCurrFramingEffectIndex = index;
939 mOverlayUpdateEventPosted = true;
940 postOverlayUpdateEvent_l();
941 LOGV("Framing index = %d", mCurrFramingEffectIndex);
942 } else {
943 LOGV("No framing effects found");
944 }
945 }
946
947 } else if (mOverlayUpdateEventPosted) {
948 //Post the event when the overlay is no more valid
949 LOGV("Overlay is Done");
950 mOverlayUpdateEventPosted = false;
951 postOverlayUpdateEvent_l();
952 }
953
954
955 if (mCurrentVideoEffect != VIDEO_EFFECT_NONE) {
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800956 err1 = doVideoPostProcessing();
957 if(err1 != M4NO_ERROR) {
958 LOGE("doVideoPostProcessing returned err");
959 bAppliedVideoEffect = false;
960 }
961 else {
962 bAppliedVideoEffect = true;
963 }
964 }
965 else {
966 bAppliedVideoEffect = false;
967 if(mRenderingMode != MEDIA_RENDERING_INVALID) {
968 // No effects to be applied, but media rendering to be done
969 err1 = doMediaRendering();
970 if(err1 != M4NO_ERROR) {
971 LOGE("doMediaRendering returned err");
972 //Use original mVideoBuffer for rendering
973 mVideoResizedOrCropped = false;
974 }
975 }
976 }
977
978 if (mVideoRenderer != NULL) {
979 LOGV("mVideoRenderer CALL render()");
980 mVideoRenderer->render();
981 }
982
983 if (mLastVideoBuffer) {
984 mLastVideoBuffer->release();
985 mLastVideoBuffer = NULL;
986 }
987
988 mLastVideoBuffer = mVideoBuffer;
989 mVideoBuffer = NULL;
990
991 // Post progress callback based on callback interval set
992 if(mNumberDecVideoFrames >= mProgressCbInterval) {
993 postProgressCallbackEvent_l();
994 mNumberDecVideoFrames = 0; // reset counter
995 }
996
997 // if reached EndCutTime of clip, post EOS event
998 if((timeUs/1000) >= mPlayEndTimeMsec) {
999 LOGV("PreviewPlayer: onVideoEvent EOS.");
1000 mFlags |= VIDEO_AT_EOS;
1001 mFlags |= AUDIO_AT_EOS;
Santosh Madhava342f9322011-01-27 16:27:12 -08001002 if (mOverlayUpdateEventPosted) {
1003 mOverlayUpdateEventPosted = false;
1004 postOverlayUpdateEvent_l();
1005 }
Dharmaray Kundargi643290d2011-01-16 16:02:42 -08001006 postStreamDoneEvent_l(ERROR_END_OF_STREAM);
1007 }
1008 else {
1009 if(!mIsVideoSourceJpg) {
1010 postVideoEvent_l();
1011 }
1012 else {
1013 postVideoEvent_l(33000);
1014 }
1015 }
1016}
1017
1018status_t PreviewPlayer::prepare() {
1019 Mutex::Autolock autoLock(mLock);
1020 return prepare_l();
1021}
1022
1023status_t PreviewPlayer::prepare_l() {
1024 if (mFlags & PREPARED) {
1025 return OK;
1026 }
1027
1028 if (mFlags & PREPARING) {
1029 return UNKNOWN_ERROR;
1030 }
1031
1032 mIsAsyncPrepare = false;
1033 status_t err = prepareAsync_l();
1034
1035 if (err != OK) {
1036 return err;
1037 }
1038
1039 while (mFlags & PREPARING) {
1040 mPreparedCondition.wait(mLock);
1041 }
1042
1043 return mPrepareResult;
1044}
1045
1046status_t PreviewPlayer::prepareAsync_l() {
1047 if (mFlags & PREPARING) {
1048 return UNKNOWN_ERROR; // async prepare already pending
1049 }
1050
1051 if (!mQueueStarted) {
1052 mQueue.start();
1053 mQueueStarted = true;
1054 }
1055
1056 mFlags |= PREPARING;
1057 mAsyncPrepareEvent = new PreviewPlayerEvent(
1058 this, &PreviewPlayer::onPrepareAsyncEvent);
1059
1060 mQueue.postEvent(mAsyncPrepareEvent);
1061
1062 return OK;
1063}
1064
1065status_t PreviewPlayer::finishSetDataSource_l() {
1066 sp<DataSource> dataSource;
1067 sp<MediaExtractor> extractor;
1068
1069 dataSource = DataSource::CreateFromURI(mUri.string(), &mUriHeaders);
1070
1071 if (dataSource == NULL) {
1072 return UNKNOWN_ERROR;
1073 }
1074
1075 //If file type is .rgb, then no need to check for Extractor
1076 int uriLen = strlen(mUri);
1077 int startOffset = uriLen - 4;
1078 if(!strncasecmp(mUri+startOffset, ".rgb", 4)) {
1079 extractor = NULL;
1080 }
1081 else {
1082 extractor = MediaExtractor::Create(dataSource,
1083 MEDIA_MIMETYPE_CONTAINER_MPEG4);
1084 }
1085
1086 if (extractor == NULL) {
1087 LOGV("PreviewPlayer::finishSetDataSource_l extractor == NULL");
1088 return setDataSource_l_jpg();
1089 }
1090
1091 return setDataSource_l(extractor);
1092}
1093
1094
1095// static
1096bool PreviewPlayer::ContinuePreparation(void *cookie) {
1097 PreviewPlayer *me = static_cast<PreviewPlayer *>(cookie);
1098
1099 return (me->mFlags & PREPARE_CANCELLED) == 0;
1100}
1101
1102void PreviewPlayer::onPrepareAsyncEvent() {
1103 Mutex::Autolock autoLock(mLock);
1104 LOGV("onPrepareAsyncEvent");
1105
1106 if (mFlags & PREPARE_CANCELLED) {
1107 LOGI("LV PLAYER prepare was cancelled before doing anything");
1108 abortPrepare(UNKNOWN_ERROR);
1109 return;
1110 }
1111
1112 if (mUri.size() > 0) {
1113 status_t err = finishSetDataSource_l();
1114
1115 if (err != OK) {
1116 abortPrepare(err);
1117 return;
1118 }
1119 }
1120
1121 if (mVideoTrack != NULL && mVideoSource == NULL) {
1122 status_t err = initVideoDecoder(OMXCodec::kHardwareCodecsOnly);
1123
1124 if (err != OK) {
1125 abortPrepare(err);
1126 return;
1127 }
1128 }
1129
1130 if (mAudioTrack != NULL && mAudioSource == NULL) {
1131 status_t err = initAudioDecoder();
1132
1133 if (err != OK) {
1134 abortPrepare(err);
1135 return;
1136 }
1137 }
1138 finishAsyncPrepare_l();
1139
1140}
1141
1142void PreviewPlayer::finishAsyncPrepare_l() {
1143 if (mIsAsyncPrepare) {
1144 if (mVideoSource == NULL) {
1145 LOGV("finishAsyncPrepare_l: MEDIA_SET_VIDEO_SIZE 0 0 ");
1146 notifyListener_l(MEDIA_SET_VIDEO_SIZE, 0, 0);
1147 } else {
1148 LOGV("finishAsyncPrepare_l: MEDIA_SET_VIDEO_SIZE");
1149 notifyVideoSize_l();
1150 }
1151 LOGV("finishAsyncPrepare_l: MEDIA_PREPARED");
1152 notifyListener_l(MEDIA_PREPARED);
1153 }
1154
1155 mPrepareResult = OK;
1156 mFlags &= ~(PREPARING|PREPARE_CANCELLED);
1157 mFlags |= PREPARED;
1158 mAsyncPrepareEvent = NULL;
1159 mPreparedCondition.broadcast();
1160}
1161
1162status_t PreviewPlayer::suspend() {
1163 LOGV("suspend");
1164 Mutex::Autolock autoLock(mLock);
1165
1166 if (mSuspensionState != NULL) {
1167 if (mLastVideoBuffer == NULL) {
1168 //go into here if video is suspended again
1169 //after resuming without being played between
1170 //them
1171 SuspensionState *state = mSuspensionState;
1172 mSuspensionState = NULL;
1173 reset_l();
1174 mSuspensionState = state;
1175 return OK;
1176 }
1177
1178 delete mSuspensionState;
1179 mSuspensionState = NULL;
1180 }
1181
1182 if (mFlags & PREPARING) {
1183 mFlags |= PREPARE_CANCELLED;
1184 }
1185
1186 while (mFlags & PREPARING) {
1187 mPreparedCondition.wait(mLock);
1188 }
1189
1190 SuspensionState *state = new SuspensionState;
1191 state->mUri = mUri;
1192 state->mUriHeaders = mUriHeaders;
1193 state->mFileSource = mFileSource;
1194
1195 state->mFlags = mFlags & (PLAYING | AUTO_LOOPING | LOOPING | AT_EOS);
1196 getPosition(&state->mPositionUs);
1197
1198 if (mLastVideoBuffer) {
1199 size_t size = mLastVideoBuffer->range_length();
1200 if (size) {
1201 int32_t unreadable;
1202 if (!mLastVideoBuffer->meta_data()->findInt32(
1203 kKeyIsUnreadable, &unreadable)
1204 || unreadable == 0) {
1205 state->mLastVideoFrameSize = size;
1206 state->mLastVideoFrame = malloc(size);
1207 memcpy(state->mLastVideoFrame,
1208 (const uint8_t *)mLastVideoBuffer->data()
1209 + mLastVideoBuffer->range_offset(),
1210 size);
1211
1212 state->mVideoWidth = mVideoWidth;
1213 state->mVideoHeight = mVideoHeight;
1214
1215 sp<MetaData> meta = mVideoSource->getFormat();
1216 CHECK(meta->findInt32(kKeyColorFormat, &state->mColorFormat));
1217 CHECK(meta->findInt32(kKeyWidth, &state->mDecodedWidth));
1218 CHECK(meta->findInt32(kKeyHeight, &state->mDecodedHeight));
1219 } else {
1220 LOGV("Unable to save last video frame, we have no access to "
1221 "the decoded video data.");
1222 }
1223 }
1224 }
1225
1226 reset_l();
1227
1228 mSuspensionState = state;
1229
1230 return OK;
1231}
1232
1233status_t PreviewPlayer::resume() {
1234 LOGV("resume");
1235 Mutex::Autolock autoLock(mLock);
1236
1237 if (mSuspensionState == NULL) {
1238 return INVALID_OPERATION;
1239 }
1240
1241 SuspensionState *state = mSuspensionState;
1242 mSuspensionState = NULL;
1243
1244 status_t err;
1245 if (state->mFileSource != NULL) {
1246 err = AwesomePlayer::setDataSource_l(state->mFileSource);
1247
1248 if (err == OK) {
1249 mFileSource = state->mFileSource;
1250 }
1251 } else {
1252 err = AwesomePlayer::setDataSource_l(state->mUri, &state->mUriHeaders);
1253 }
1254
1255 if (err != OK) {
1256 delete state;
1257 state = NULL;
1258
1259 return err;
1260 }
1261
1262 seekTo_l(state->mPositionUs);
1263
1264 mFlags = state->mFlags & (AUTO_LOOPING | LOOPING | AT_EOS);
1265
1266 if (state->mLastVideoFrame && (mSurface != NULL || mISurface != NULL)) {
1267 mVideoRenderer =
1268 new PreviewLocalRenderer(
1269 true, // previewOnly
1270 (OMX_COLOR_FORMATTYPE)state->mColorFormat,
1271 mSurface,
1272 state->mVideoWidth,
1273 state->mVideoHeight,
1274 state->mDecodedWidth,
1275 state->mDecodedHeight);
1276
1277 mVideoRendererIsPreview = true;
1278
1279 ((PreviewLocalRenderer *)mVideoRenderer.get())->render(
1280 state->mLastVideoFrame, state->mLastVideoFrameSize);
1281 }
1282
1283 if (state->mFlags & PLAYING) {
1284 play_l();
1285 }
1286
1287 mSuspensionState = state;
1288 state = NULL;
1289
1290 return OK;
1291}
1292
1293
1294status_t PreviewPlayer::loadEffectsSettings(
1295 M4VSS3GPP_EffectSettings* pEffectSettings, int nEffects) {
1296 M4OSA_UInt32 i = 0, rgbSize = 0;
1297 M4VIFI_UInt8 *tmp = M4OSA_NULL;
1298
1299 mNumberEffects = nEffects;
1300 mEffectsSettings = pEffectSettings;
1301 return OK;
1302}
1303
1304status_t PreviewPlayer::loadAudioMixSettings(
1305 M4xVSS_AudioMixingSettings* pAudioMixSettings) {
1306
1307 LOGV("PreviewPlayer: loadAudioMixSettings: ");
1308 mPreviewPlayerAudioMixSettings = pAudioMixSettings;
1309 return OK;
1310}
1311
1312status_t PreviewPlayer::setAudioMixPCMFileHandle(
1313 M4OSA_Context pAudioMixPCMFileHandle) {
1314
1315 LOGV("PreviewPlayer: setAudioMixPCMFileHandle: ");
1316 mAudioMixPCMFileHandle = pAudioMixPCMFileHandle;
1317 return OK;
1318}
1319
1320status_t PreviewPlayer::setAudioMixStoryBoardParam(
1321 M4OSA_UInt32 audioMixStoryBoardTS,
1322 M4OSA_UInt32 currentMediaBeginCutTime,
1323 M4OSA_UInt32 primaryTrackVolValue ) {
1324
1325 mAudioMixStoryBoardTS = audioMixStoryBoardTS;
1326 mCurrentMediaBeginCutTime = currentMediaBeginCutTime;
1327 mCurrentMediaVolumeValue = primaryTrackVolValue;
1328 return OK;
1329}
1330
1331status_t PreviewPlayer::setPlaybackBeginTime(uint32_t msec) {
1332
1333 mPlayBeginTimeMsec = msec;
1334 return OK;
1335}
1336
1337status_t PreviewPlayer::setPlaybackEndTime(uint32_t msec) {
1338
1339 mPlayEndTimeMsec = msec;
1340 return OK;
1341}
1342
1343status_t PreviewPlayer::setStoryboardStartTime(uint32_t msec) {
1344
1345 mStoryboardStartTimeMsec = msec;
1346 mDecVideoTsStoryBoard = mStoryboardStartTimeMsec*1000;
1347 return OK;
1348}
1349
1350status_t PreviewPlayer::setProgressCallbackInterval(uint32_t cbInterval) {
1351
1352 mProgressCbInterval = cbInterval;
1353 return OK;
1354}
1355
1356
1357status_t PreviewPlayer::setMediaRenderingMode(
1358 M4xVSS_MediaRendering mode,
1359 M4VIDEOEDITING_VideoFrameSize outputVideoSize) {
1360
1361 mRenderingMode = mode;
1362
1363 /* reset boolean for each clip*/
1364 mVideoResizedOrCropped = false;
1365
1366 switch(outputVideoSize) {
1367 case M4VIDEOEDITING_kSQCIF:
1368 mOutputVideoWidth = 128;
1369 mOutputVideoHeight = 96;
1370 break;
1371
1372 case M4VIDEOEDITING_kQQVGA:
1373 mOutputVideoWidth = 160;
1374 mOutputVideoHeight = 120;
1375 break;
1376
1377 case M4VIDEOEDITING_kQCIF:
1378 mOutputVideoWidth = 176;
1379 mOutputVideoHeight = 144;
1380 break;
1381
1382 case M4VIDEOEDITING_kQVGA:
1383 mOutputVideoWidth = 320;
1384 mOutputVideoHeight = 240;
1385 break;
1386
1387 case M4VIDEOEDITING_kCIF:
1388 mOutputVideoWidth = 352;
1389 mOutputVideoHeight = 288;
1390 break;
1391
1392 case M4VIDEOEDITING_kVGA:
1393 mOutputVideoWidth = 640;
1394 mOutputVideoHeight = 480;
1395 break;
1396
1397 case M4VIDEOEDITING_kWVGA:
1398 mOutputVideoWidth = 800;
1399 mOutputVideoHeight = 480;
1400 break;
1401
1402 case M4VIDEOEDITING_kNTSC:
1403 mOutputVideoWidth = 720;
1404 mOutputVideoHeight = 480;
1405 break;
1406
1407 case M4VIDEOEDITING_k640_360:
1408 mOutputVideoWidth = 640;
1409 mOutputVideoHeight = 360;
1410 break;
1411
1412 case M4VIDEOEDITING_k854_480:
1413 mOutputVideoWidth = 854;
1414 mOutputVideoHeight = 480;
1415 break;
1416
1417 case M4VIDEOEDITING_kHD1280:
1418 mOutputVideoWidth = 1280;
1419 mOutputVideoHeight = 720;
1420 break;
1421
1422 case M4VIDEOEDITING_kHD1080:
1423 mOutputVideoWidth = 1080;
1424 mOutputVideoHeight = 720;
1425 break;
1426
1427 case M4VIDEOEDITING_kHD960:
1428 mOutputVideoWidth = 960;
1429 mOutputVideoHeight = 720;
1430 break;
1431
1432 default:
1433 LOGE("unsupported output video size set");
1434 return BAD_VALUE;
1435 }
1436
1437 return OK;
1438}
1439
1440M4OSA_ERR PreviewPlayer::doMediaRendering() {
1441 M4OSA_ERR err = M4NO_ERROR;
1442 M4VIFI_ImagePlane planeIn[3], planeOut[3];
1443 M4VIFI_UInt8 *inBuffer = M4OSA_NULL, *finalOutputBuffer = M4OSA_NULL;
1444 M4VIFI_UInt8 *tempOutputBuffer= M4OSA_NULL;
1445 size_t videoBufferSize = 0;
1446 M4OSA_UInt32 frameSize = 0, i=0, index =0, nFrameCount =0, bufferOffset =0;
1447 int32_t colorFormat = 0;
1448
1449 if(!mIsVideoSourceJpg) {
1450 sp<MetaData> meta = mVideoSource->getFormat();
1451 CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
1452 }
1453 else {
1454 colorFormat = OMX_COLOR_FormatYUV420Planar;
1455 }
1456
1457 videoBufferSize = mVideoBuffer->size();
1458 frameSize = (mVideoWidth*mVideoHeight*3) >> 1;
1459
1460 uint8_t* outBuffer;
1461 size_t outBufferStride = 0;
1462
1463 mVideoRenderer->getBuffer(&outBuffer, &outBufferStride);
1464
1465 bufferOffset = index*frameSize;
1466 inBuffer = (M4OSA_UInt8 *)mVideoBuffer->data()+
1467 mVideoBuffer->range_offset()+bufferOffset;
1468
1469
1470 /* In plane*/
1471 prepareYUV420ImagePlane(planeIn, mVideoWidth,
Dharmaray Kundargi35cb2de2011-01-19 19:09:27 -08001472 mVideoHeight, (M4VIFI_UInt8 *)inBuffer, mReportedWidth, mReportedHeight);
Dharmaray Kundargi643290d2011-01-16 16:02:42 -08001473
1474 // Set the output YUV420 plane to be compatible with YV12 format
1475 // W & H even
1476 // YVU instead of YUV
1477 // align buffers on 32 bits
1478
1479 //In YV12 format, sizes must be even
1480 M4OSA_UInt32 yv12PlaneWidth = ((mOutputVideoWidth +1)>>1)<<1;
1481 M4OSA_UInt32 yv12PlaneHeight = ((mOutputVideoHeight+1)>>1)<<1;
1482
1483 prepareYV12ImagePlane(planeOut, yv12PlaneWidth, yv12PlaneHeight,
1484 (M4OSA_UInt32)outBufferStride, (M4VIFI_UInt8 *)outBuffer);
1485
1486
1487 err = applyRenderingMode(planeIn, planeOut, mRenderingMode);
1488
1489 if(err != M4NO_ERROR)
1490 {
1491 LOGE("doMediaRendering: applyRenderingMode returned err=0x%x", err);
1492 return err;
1493 }
1494 mVideoResizedOrCropped = true;
1495
1496 return err;
1497}
1498
1499status_t PreviewPlayer::resetJniCallbackTimeStamp() {
1500
1501 mDecVideoTsStoryBoard = mStoryboardStartTimeMsec*1000;
1502 return OK;
1503}
1504
1505void PreviewPlayer::postProgressCallbackEvent_l() {
1506 if (mProgressCbEventPending) {
1507 return;
1508 }
1509 mProgressCbEventPending = true;
1510
1511 mQueue.postEvent(mProgressCbEvent);
1512}
1513
Dharmaray Kundargie6c07502011-01-21 16:58:31 -08001514
Dharmaray Kundargi643290d2011-01-16 16:02:42 -08001515void PreviewPlayer::onProgressCbEvent() {
1516 Mutex::Autolock autoLock(mLock);
1517 if (!mProgressCbEventPending) {
1518 return;
1519 }
1520 mProgressCbEventPending = false;
1521 // If playback starts from previous I-frame,
1522 // then send frame storyboard duration
1523 if((mDecodedVideoTs/1000) < mPlayBeginTimeMsec) {
1524 notifyListener_l(MEDIA_INFO, 0, mDecVideoTsStoryBoard/1000);
1525 }
1526 else {
1527 notifyListener_l(MEDIA_INFO, 0,
1528 (((mDecodedVideoTs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec));
1529 }
1530}
1531
Dharmaray Kundargie6c07502011-01-21 16:58:31 -08001532void PreviewPlayer::postOverlayUpdateEvent_l() {
1533 if (mOverlayUpdateEventPending) {
1534 return;
1535 }
1536 mOverlayUpdateEventPending = true;
1537 mQueue.postEvent(mOverlayUpdateEvent);
1538}
1539
1540void PreviewPlayer::onUpdateOverlayEvent() {
1541 Mutex::Autolock autoLock(mLock);
1542
1543 if (!mOverlayUpdateEventPending) {
1544 return;
1545 }
1546 mOverlayUpdateEventPending = false;
1547
1548 int updateState;
1549 if (mOverlayUpdateEventPosted) {
1550 updateState = 1;
1551 } else {
1552 updateState = 0;
1553 }
1554 notifyListener_l(0xBBBBBBBB, updateState, mCurrFramingEffectIndex);
1555}
1556
1557
Dharmaray Kundargi643290d2011-01-16 16:02:42 -08001558void PreviewPlayer::setVideoPostProcessingNode(
1559 M4VSS3GPP_VideoEffectType type, M4OSA_Bool enable) {
1560
1561 uint32_t effect = VIDEO_EFFECT_NONE;
1562
1563 //Map M4VSS3GPP_VideoEffectType to local enum
1564 switch(type) {
1565 case M4VSS3GPP_kVideoEffectType_FadeFromBlack:
1566 effect = VIDEO_EFFECT_FADEFROMBLACK;
1567 break;
1568
1569 case M4VSS3GPP_kVideoEffectType_FadeToBlack:
1570 effect = VIDEO_EFFECT_FADETOBLACK;
1571 break;
1572
1573 case M4VSS3GPP_kVideoEffectType_CurtainOpening:
1574 effect = VIDEO_EFFECT_CURTAINOPEN;
1575 break;
1576
1577 case M4VSS3GPP_kVideoEffectType_CurtainClosing:
1578 effect = VIDEO_EFFECT_CURTAINCLOSE;
1579 break;
1580
1581 case M4xVSS_kVideoEffectType_BlackAndWhite:
1582 effect = VIDEO_EFFECT_BLACKANDWHITE;
1583 break;
1584
1585 case M4xVSS_kVideoEffectType_Pink:
1586 effect = VIDEO_EFFECT_PINK;
1587 break;
1588
1589 case M4xVSS_kVideoEffectType_Green:
1590 effect = VIDEO_EFFECT_GREEN;
1591 break;
1592
1593 case M4xVSS_kVideoEffectType_Sepia:
1594 effect = VIDEO_EFFECT_SEPIA;
1595 break;
1596
1597 case M4xVSS_kVideoEffectType_Negative:
1598 effect = VIDEO_EFFECT_NEGATIVE;
1599 break;
1600
1601 case M4xVSS_kVideoEffectType_Framing:
1602 effect = VIDEO_EFFECT_FRAMING;
1603 break;
1604
1605 case M4xVSS_kVideoEffectType_Fifties:
1606 effect = VIDEO_EFFECT_FIFTIES;
1607 break;
1608
1609 case M4xVSS_kVideoEffectType_ColorRGB16:
1610 effect = VIDEO_EFFECT_COLOR_RGB16;
1611 break;
1612
1613 case M4xVSS_kVideoEffectType_Gradient:
1614 effect = VIDEO_EFFECT_GRADIENT;
1615 break;
1616
1617 default:
1618 effect = VIDEO_EFFECT_NONE;
1619 break;
1620 }
1621
1622 if(enable == M4OSA_TRUE) {
1623 //If already set, then no need to set again
1624 if(!(mCurrentVideoEffect & effect)) {
1625 mCurrentVideoEffect |= effect;
1626 if(effect == VIDEO_EFFECT_FIFTIES) {
1627 mIsFiftiesEffectStarted = true;
1628 }
1629 }
1630 }
1631 else {
1632 //Reset only if already set
1633 if(mCurrentVideoEffect & effect) {
1634 mCurrentVideoEffect &= ~effect;
1635 }
1636 }
1637}
1638
1639status_t PreviewPlayer::setImageClipProperties(uint32_t width,uint32_t height) {
1640 mVideoWidth = width;
1641 mVideoHeight = height;
1642 return OK;
1643}
1644
1645
1646M4OSA_ERR PreviewPlayer::doVideoPostProcessing() {
1647 M4OSA_ERR err = M4NO_ERROR;
1648 vePostProcessParams postProcessParams;
1649 int32_t colorFormat = 0;
1650
1651
1652 if(!mIsVideoSourceJpg) {
1653 sp<MetaData> meta = mVideoSource->getFormat();
1654 CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
1655 }
1656 else {
1657 colorFormat = OMX_COLOR_FormatYUV420Planar;
1658 }
1659
1660 if((colorFormat == OMX_COLOR_FormatYUV420SemiPlanar) ||
1661 (colorFormat == 0x7FA30C00)) {
1662 LOGE("doVideoPostProcessing: colorFormat YUV420Sp not supported");
1663 return M4ERR_UNSUPPORTED_MEDIA_TYPE;
1664 }
1665
1666 postProcessParams.vidBuffer = (M4VIFI_UInt8*)mVideoBuffer->data()
1667 + mVideoBuffer->range_offset();
1668
1669 postProcessParams.videoWidth = mVideoWidth;
1670 postProcessParams.videoHeight = mVideoHeight;
1671 postProcessParams.timeMs = mDecodedVideoTs/1000;
1672 postProcessParams.timeOffset = mDecVideoTsStoryBoard/1000;
1673 postProcessParams.effectsSettings = mEffectsSettings;
1674 postProcessParams.numberEffects = mNumberEffects;
1675 postProcessParams.outVideoWidth = mOutputVideoWidth;
1676 postProcessParams.outVideoHeight = mOutputVideoHeight;
1677 postProcessParams.currentVideoEffect = mCurrentVideoEffect;
1678 postProcessParams.renderingMode = mRenderingMode;
1679 if(mIsFiftiesEffectStarted == M4OSA_TRUE) {
1680 postProcessParams.isFiftiesEffectStarted = M4OSA_TRUE;
1681 mIsFiftiesEffectStarted = M4OSA_FALSE;
1682 }
1683 else {
1684 postProcessParams.isFiftiesEffectStarted = M4OSA_FALSE;
1685 }
1686
1687 postProcessParams.overlayFrameRGBBuffer = mFrameRGBBuffer;
1688 postProcessParams.overlayFrameYUVBuffer = mFrameYUVBuffer;
1689 mVideoRenderer->getBuffer(&(postProcessParams.pOutBuffer), &(postProcessParams.outBufferStride));
Dharmaray Kundargi35cb2de2011-01-19 19:09:27 -08001690 err = applyEffectsAndRenderingMode(&postProcessParams, mReportedWidth, mReportedHeight);
Dharmaray Kundargi643290d2011-01-16 16:02:42 -08001691
1692 return err;
1693}
1694
1695status_t PreviewPlayer::readFirstVideoFrame() {
1696 LOGV("PreviewPlayer::readFirstVideoFrame");
1697
1698 if (!mVideoBuffer) {
1699 MediaSource::ReadOptions options;
1700 if (mSeeking) {
1701 LOGV("LV PLAYER seeking to %lld us (%.2f secs)", mSeekTimeUs,
1702 mSeekTimeUs / 1E6);
1703
1704 options.setSeekTo(
1705 mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST);
1706 }
1707 for (;;) {
1708 status_t err = mVideoSource->read(&mVideoBuffer, &options);
1709 options.clearSeekTo();
1710
1711 if (err != OK) {
1712 CHECK_EQ(mVideoBuffer, NULL);
1713
1714 if (err == INFO_FORMAT_CHANGED) {
1715 LOGV("LV PLAYER VideoSource signalled format change");
1716 notifyVideoSize_l();
Dharmaray Kundargi35cb2de2011-01-19 19:09:27 -08001717 sp<MetaData> meta = mVideoSource->getFormat();
1718
1719 CHECK(meta->findInt32(kKeyWidth, &mReportedWidth));
1720 CHECK(meta->findInt32(kKeyHeight, &mReportedHeight));
Dharmaray Kundargi643290d2011-01-16 16:02:42 -08001721
1722 if (mVideoRenderer != NULL) {
1723 mVideoRendererIsPreview = false;
1724 initRenderer_l();
1725 }
1726 continue;
1727 }
1728 LOGV("PreviewPlayer: onVideoEvent EOS reached.");
1729 mFlags |= VIDEO_AT_EOS;
1730 postStreamDoneEvent_l(err);
1731 return OK;
1732 }
1733
1734 if (mVideoBuffer->range_length() == 0) {
1735 // Some decoders, notably the PV AVC software decoder
1736 // return spurious empty buffers that we just want to ignore.
1737
1738 mVideoBuffer->release();
1739 mVideoBuffer = NULL;
1740 continue;
1741 }
1742
1743 int64_t videoTimeUs;
1744 CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &videoTimeUs));
1745
1746 if((videoTimeUs/1000) < mPlayBeginTimeMsec) {
1747 // buffers are before begin cut time
1748 // ignore them
1749 //LOGI("PreviewPlayer: Ignoring buffers before begin cut time");
1750 mVideoBuffer->release();
1751 mVideoBuffer = NULL;
1752 continue;
1753 }
1754
1755 break;
1756 }
1757 }
1758
1759 int64_t timeUs;
1760 CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
1761
1762 {
1763 Mutex::Autolock autoLock(mMiscStateLock);
1764 mVideoTimeUs = timeUs;
1765 }
1766
1767 mDecodedVideoTs = timeUs;
1768
1769 return OK;
1770
1771}
1772
1773} // namespace android