blob: 2f46e4cd4fc686e2d0cc19ef45e9e8a9faaf0a60 [file] [log] [blame]
Dharmaray Kundargi643290d2011-01-16 16:02:42 -08001/*
2 * Copyright (C) 2011 NXP Software
3 * Copyright (C) 2011 The Android Open Source Project
4 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17
18#define LOG_NDEBUG 1
19#define LOG_TAG "PreviewPlayer"
20#include <utils/Log.h>
21
22#include <dlfcn.h>
23
24#include "include/ARTSPController.h"
25#include "PreviewPlayer.h"
26#include "DummyAudioSource.h"
27#include "DummyVideoSource.h"
28#include "VideoEditorSRC.h"
29#include "include/LiveSession.h"
30#include "include/NuCachedSource2.h"
31#include "include/ThrottledSource.h"
32
33
34#include "PreviewRenderer.h"
35
36#include <binder/IPCThreadState.h>
37#include <media/stagefright/DataSource.h>
38#include <media/stagefright/FileSource.h>
39#include <media/stagefright/MediaBuffer.h>
40#include <media/stagefright/MediaDefs.h>
41#include <media/stagefright/MediaExtractor.h>
42#include <media/stagefright/MediaDebug.h>
43#include <media/stagefright/MediaSource.h>
44#include <media/stagefright/MetaData.h>
45#include <media/stagefright/OMXCodec.h>
46
47#include <surfaceflinger/Surface.h>
48#include <media/stagefright/foundation/ALooper.h>
49
50namespace android {
51
52
53struct PreviewPlayerEvent : public TimedEventQueue::Event {
54 PreviewPlayerEvent(
55 PreviewPlayer *player,
56 void (PreviewPlayer::*method)())
57 : mPlayer(player),
58 mMethod(method) {
59 }
60
61protected:
62 virtual ~PreviewPlayerEvent() {}
63
64 virtual void fire(TimedEventQueue *queue, int64_t /* now_us */) {
65 (mPlayer->*mMethod)();
66 }
67
68private:
69 PreviewPlayer *mPlayer;
70 void (PreviewPlayer::*mMethod)();
71
72 PreviewPlayerEvent(const PreviewPlayerEvent &);
73 PreviewPlayerEvent &operator=(const PreviewPlayerEvent &);
74};
75
76
77struct PreviewLocalRenderer : public PreviewPlayerRenderer {
78 PreviewLocalRenderer(
79 bool previewOnly,
80 OMX_COLOR_FORMATTYPE colorFormat,
81 const sp<Surface> &surface,
82 size_t displayWidth, size_t displayHeight,
83 size_t decodedWidth, size_t decodedHeight,
84 int32_t rotationDegrees = 0)
85 : mTarget(NULL) {
86 init(previewOnly,
87 colorFormat, surface,
88 displayWidth, displayHeight,
89 decodedWidth, decodedHeight,
90 rotationDegrees);
91 }
92
93 virtual void render(MediaBuffer *buffer) {
94 render((const uint8_t *)buffer->data() + buffer->range_offset(),
95 buffer->range_length());
96 }
97
98 void render(const void *data, size_t size) {
99 mTarget->render(data, size, NULL);
100 }
101 void render() {
102 mTarget->renderYV12();
103 }
104 void getBuffer(uint8_t **data, size_t *stride) {
105 mTarget->getBufferYV12(data, stride);
106 }
107
108protected:
109 virtual ~PreviewLocalRenderer() {
110 delete mTarget;
111 mTarget = NULL;
112 }
113
114private:
115 PreviewRenderer *mTarget;
116
117 void init(
118 bool previewOnly,
119 OMX_COLOR_FORMATTYPE colorFormat,
120 const sp<Surface> &surface,
121 size_t displayWidth, size_t displayHeight,
122 size_t decodedWidth, size_t decodedHeight,
123 int32_t rotationDegrees = 0);
124
125 PreviewLocalRenderer(const PreviewLocalRenderer &);
126 PreviewLocalRenderer &operator=(const PreviewLocalRenderer &);;
127};
128
129void PreviewLocalRenderer::init(
130 bool previewOnly,
131 OMX_COLOR_FORMATTYPE colorFormat,
132 const sp<Surface> &surface,
133 size_t displayWidth, size_t displayHeight,
134 size_t decodedWidth, size_t decodedHeight,
135 int32_t rotationDegrees) {
136 mTarget = new PreviewRenderer(
137 colorFormat, surface, displayWidth, displayHeight,
138 decodedWidth, decodedHeight, rotationDegrees);
139}
140
141PreviewPlayer::PreviewPlayer()
142 : AwesomePlayer(),
143 mFrameRGBBuffer(NULL),
144 mFrameYUVBuffer(NULL) {
145
146 mVideoRenderer = NULL;
147 mLastVideoBuffer = NULL;
148 mSuspensionState = NULL;
149 mEffectsSettings = NULL;
150 mAudioMixStoryBoardTS = 0;
151 mCurrentMediaBeginCutTime = 0;
152 mCurrentMediaVolumeValue = 0;
153 mNumberEffects = 0;
154 mDecodedVideoTs = 0;
155 mDecVideoTsStoryBoard = 0;
156 mCurrentVideoEffect = VIDEO_EFFECT_NONE;
157 mProgressCbInterval = 0;
158 mNumberDecVideoFrames = 0;
159
160 mVideoEvent = new PreviewPlayerEvent(this, &PreviewPlayer::onVideoEvent);
161 mVideoEventPending = false;
162 mStreamDoneEvent = new PreviewPlayerEvent(this,
163 &AwesomePlayer::onStreamDone);
164
165 mStreamDoneEventPending = false;
166
167 mCheckAudioStatusEvent = new PreviewPlayerEvent(
168 this, &AwesomePlayer::onCheckAudioStatus);
169
170 mAudioStatusEventPending = false;
171
172 mProgressCbEvent = new PreviewPlayerEvent(this,
173 &PreviewPlayer::onProgressCbEvent);
174
175 mProgressCbEventPending = false;
176 mResizedVideoBuffer = NULL;
177 mVideoResizedOrCropped = false;
178 mRenderingMode = (M4xVSS_MediaRendering)MEDIA_RENDERING_INVALID;
179 mIsFiftiesEffectStarted = false;
180 reset();
181}
182
183PreviewPlayer::~PreviewPlayer() {
184
185 if (mQueueStarted) {
186 mQueue.stop();
187 }
188
189 reset();
190
191 if(mResizedVideoBuffer != NULL) {
192 M4OSA_free((M4OSA_MemAddr32)(mResizedVideoBuffer->data()));
193 mResizedVideoBuffer = NULL;
194 }
195
196 mVideoRenderer.clear();
197 mVideoRenderer = NULL;
198}
199
200void PreviewPlayer::cancelPlayerEvents(bool keepBufferingGoing) {
201 mQueue.cancelEvent(mVideoEvent->eventID());
202 mVideoEventPending = false;
203 mQueue.cancelEvent(mStreamDoneEvent->eventID());
204 mStreamDoneEventPending = false;
205 mQueue.cancelEvent(mCheckAudioStatusEvent->eventID());
206 mAudioStatusEventPending = false;
207
208 mQueue.cancelEvent(mProgressCbEvent->eventID());
209 mProgressCbEventPending = false;
210}
211
212status_t PreviewPlayer::setDataSource(
213 const char *uri, const KeyedVector<String8, String8> *headers) {
214 Mutex::Autolock autoLock(mLock);
215 return setDataSource_l(uri, headers);
216}
217
218status_t PreviewPlayer::setDataSource_l(
219 const char *uri, const KeyedVector<String8, String8> *headers) {
220 reset_l();
221
222 mUri = uri;
223
224 if (headers) {
225 mUriHeaders = *headers;
226 }
227
228 // The actual work will be done during preparation in the call to
229 // ::finishSetDataSource_l to avoid blocking the calling thread in
230 // setDataSource for any significant time.
231 return OK;
232}
233
234status_t PreviewPlayer::setDataSource_l(const sp<MediaExtractor> &extractor) {
235 bool haveAudio = false;
236 bool haveVideo = false;
237 for (size_t i = 0; i < extractor->countTracks(); ++i) {
238 sp<MetaData> meta = extractor->getTrackMetaData(i);
239
240 const char *mime;
241 CHECK(meta->findCString(kKeyMIMEType, &mime));
242
243 if (!haveVideo && !strncasecmp(mime, "video/", 6)) {
244 setVideoSource(extractor->getTrack(i));
245 haveVideo = true;
246 } else if (!haveAudio && !strncasecmp(mime, "audio/", 6)) {
247 setAudioSource(extractor->getTrack(i));
248 haveAudio = true;
249
250 if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) {
251 // Only do this for vorbis audio, none of the other audio
252 // formats even support this ringtone specific hack and
253 // retrieving the metadata on some extractors may turn out
254 // to be very expensive.
255 sp<MetaData> fileMeta = extractor->getMetaData();
256 int32_t loop;
257 if (fileMeta != NULL
258 && fileMeta->findInt32(kKeyAutoLoop, &loop)
259 && loop != 0) {
260 mFlags |= AUTO_LOOPING;
261 }
262 }
263 }
264
265 if (haveAudio && haveVideo) {
266 break;
267 }
268 }
269
270 /* Add the support for Dummy audio*/
271 if( !haveAudio ){
272 LOGV("PreviewPlayer: setDataSource_l Dummyaudiocreation started");
273
274 mAudioTrack = DummyAudioSource::Create(32000, 2, 20000,
275 ((mPlayEndTimeMsec)*1000));
276 LOGV("PreviewPlayer: setDataSource_l Dummyauiosource created");
277 if(mAudioTrack != NULL) {
278 haveAudio = true;
279 }
280 }
281
282 if (!haveAudio && !haveVideo) {
283 return UNKNOWN_ERROR;
284 }
285
286 mExtractorFlags = extractor->flags();
287 return OK;
288}
289
290status_t PreviewPlayer::setDataSource_l_jpg() {
291 M4OSA_ERR err = M4NO_ERROR;
292 LOGV("PreviewPlayer: setDataSource_l_jpg started");
293
294 mAudioSource = DummyAudioSource::Create(32000, 2, 20000,
295 ((mPlayEndTimeMsec)*1000));
296 LOGV("PreviewPlayer: setDataSource_l_jpg Dummyaudiosource created");
297 if(mAudioSource != NULL) {
298 setAudioSource(mAudioSource);
299 }
300 status_t error = mAudioSource->start();
301 if (error != OK) {
302 LOGV("Error starting dummy audio source");
303 mAudioSource.clear();
304 return err;
305 }
306
307 mDurationUs = (mPlayEndTimeMsec - mPlayBeginTimeMsec)*1000;
308
309 mVideoSource = DummyVideoSource::Create(mVideoWidth, mVideoHeight,
310 mDurationUs, mUri);
311 setVideoSource(mVideoSource);
312 status_t err1 = mVideoSource->start();
313 if (err1 != OK) {
314 mVideoSource.clear();
315 return err;
316 }
317
318 mIsVideoSourceJpg = true;
319 return OK;
320}
321
322void PreviewPlayer::reset() {
323 Mutex::Autolock autoLock(mLock);
324 reset_l();
325}
326
327void PreviewPlayer::reset_l() {
328
329 if (mFlags & PREPARING) {
330 mFlags |= PREPARE_CANCELLED;
331 }
332
333 while (mFlags & PREPARING) {
334 mPreparedCondition.wait(mLock);
335 }
336
337 cancelPlayerEvents();
338 mAudioTrack.clear();
339 mVideoTrack.clear();
340
341 // Shutdown audio first, so that the respone to the reset request
342 // appears to happen instantaneously as far as the user is concerned
343 // If we did this later, audio would continue playing while we
344 // shutdown the video-related resources and the player appear to
345 // not be as responsive to a reset request.
346 if (mAudioPlayer == NULL && mAudioSource != NULL) {
347 // If we had an audio player, it would have effectively
348 // taken possession of the audio source and stopped it when
349 // _it_ is stopped. Otherwise this is still our responsibility.
350 mAudioSource->stop();
351 }
352 mAudioSource.clear();
353
354 mTimeSource = NULL;
355
356 delete mAudioPlayer;
357 mAudioPlayer = NULL;
358
359 if (mLastVideoBuffer) {
360 mLastVideoBuffer->release();
361 mLastVideoBuffer = NULL;
362 }
363
364 if (mVideoBuffer) {
365 mVideoBuffer->release();
366 mVideoBuffer = NULL;
367 }
368
369 if (mVideoSource != NULL) {
370 mVideoSource->stop();
371
372 // The following hack is necessary to ensure that the OMX
373 // component is completely released by the time we may try
374 // to instantiate it again.
375 wp<MediaSource> tmp = mVideoSource;
376 mVideoSource.clear();
377 while (tmp.promote() != NULL) {
378 usleep(1000);
379 }
380 IPCThreadState::self()->flushCommands();
381 }
382
383 mDurationUs = -1;
384 mFlags = 0;
385 mExtractorFlags = 0;
386 mVideoWidth = mVideoHeight = -1;
387 mTimeSourceDeltaUs = 0;
388 mVideoTimeUs = 0;
389
390 mSeeking = false;
391 mSeekNotificationSent = false;
392 mSeekTimeUs = 0;
393
394 mUri.setTo("");
395 mUriHeaders.clear();
396
397 mFileSource.clear();
398
399 delete mSuspensionState;
400 mSuspensionState = NULL;
401
402 mCurrentVideoEffect = VIDEO_EFFECT_NONE;
403 mIsVideoSourceJpg = false;
404 mFrameRGBBuffer = NULL;
405 if(mFrameYUVBuffer != NULL) {
406 M4OSA_free((M4OSA_MemAddr32)mFrameYUVBuffer);
407 mFrameYUVBuffer = NULL;
408 }
409}
410
411void PreviewPlayer::partial_reset_l() {
412
413 if (mLastVideoBuffer) {
414 mLastVideoBuffer->release();
415 mLastVideoBuffer = NULL;
416 }
417
418 /* call base struct */
419 AwesomePlayer::partial_reset_l();
420
421}
422
423status_t PreviewPlayer::play() {
424 Mutex::Autolock autoLock(mLock);
425
426 mFlags &= ~CACHE_UNDERRUN;
427
428 return play_l();
429}
430
431status_t PreviewPlayer::play_l() {
432VideoEditorAudioPlayer *mVePlayer;
433 if (mFlags & PLAYING) {
434 return OK;
435 }
436 mStartNextPlayer = false;
437
438 if (!(mFlags & PREPARED)) {
439 status_t err = prepare_l();
440
441 if (err != OK) {
442 return err;
443 }
444 }
445
446 mFlags |= PLAYING;
447 mFlags |= FIRST_FRAME;
448
449 bool deferredAudioSeek = false;
450
451 if (mAudioSource != NULL) {
452 if (mAudioPlayer == NULL) {
453 if (mAudioSink != NULL) {
454
455 mAudioPlayer = new VideoEditorAudioPlayer(mAudioSink, this);
456 mVePlayer =
457 (VideoEditorAudioPlayer*)mAudioPlayer;
458
459 mAudioPlayer->setSource(mAudioSource);
460
461 mVePlayer->setAudioMixSettings(
462 mPreviewPlayerAudioMixSettings);
463
464 mVePlayer->setAudioMixPCMFileHandle(
465 mAudioMixPCMFileHandle);
466
467 mVePlayer->setAudioMixStoryBoardSkimTimeStamp(
468 mAudioMixStoryBoardTS, mCurrentMediaBeginCutTime,
469 mCurrentMediaVolumeValue);
470
471 // We've already started the MediaSource in order to enable
472 // the prefetcher to read its data.
473 status_t err = mVePlayer->start(
474 true /* sourceAlreadyStarted */);
475
476 if (err != OK) {
477 delete mAudioPlayer;
478 mAudioPlayer = NULL;
479
480 mFlags &= ~(PLAYING | FIRST_FRAME);
481 return err;
482 }
483
484 mTimeSource = mVePlayer; //mAudioPlayer;
485
486 deferredAudioSeek = true;
487 mWatchForAudioSeekComplete = false;
488 mWatchForAudioEOS = true;
489 }
490 } else {
491 mVePlayer->resume();
492 }
493
494 }
495
496 if (mTimeSource == NULL && mAudioPlayer == NULL) {
497 mTimeSource = &mSystemTimeSource;
498 }
499
500 if (mVideoSource != NULL) {
501 // Kick off video playback
502 postVideoEvent_l();
503 }
504
505 if (deferredAudioSeek) {
506 // If there was a seek request while we were paused
507 // and we're just starting up again, honor the request now.
508 seekAudioIfNecessary_l();
509 }
510
511 if (mFlags & AT_EOS) {
512 // Legacy behaviour, if a stream finishes playing and then
513 // is started again, we play from the start...
514 seekTo_l(0);
515 }
516
517 return OK;
518}
519
520
521void PreviewPlayer::initRenderer_l() {
522 if (mSurface != NULL || mISurface != NULL) {
523 sp<MetaData> meta = mVideoSource->getFormat();
524
525 int32_t format;
526 const char *component;
527 int32_t decodedWidth, decodedHeight;
528 CHECK(meta->findInt32(kKeyColorFormat, &format));
529 CHECK(meta->findCString(kKeyDecoderComponent, &component));
530 CHECK(meta->findInt32(kKeyWidth, &decodedWidth));
531 CHECK(meta->findInt32(kKeyHeight, &decodedHeight));
532
533 // Must ensure that mVideoRenderer's destructor is actually executed
534 // before creating a new one.
535 IPCThreadState::self()->flushCommands();
536
537 // always use localrenderer since decoded buffers are modified
538 // by postprocessing module
539 // Other decoders are instantiated locally and as a consequence
540 // allocate their buffers in local address space.
541 if(mVideoRenderer == NULL) {
542
543 mVideoRenderer = new PreviewLocalRenderer(
544 false, // previewOnly
545 (OMX_COLOR_FORMATTYPE)format,
546 mSurface,
547 mOutputVideoWidth, mOutputVideoHeight,
548 mOutputVideoWidth, mOutputVideoHeight);
549 }
550 }
551}
552
553
554void PreviewPlayer::setISurface(const sp<ISurface> &isurface) {
555 Mutex::Autolock autoLock(mLock);
556 mISurface = isurface;
557}
558
559
560status_t PreviewPlayer::seekTo(int64_t timeUs) {
561
562 if ((mExtractorFlags & MediaExtractor::CAN_SEEK) || (mIsVideoSourceJpg)) {
563 Mutex::Autolock autoLock(mLock);
564 return seekTo_l(timeUs);
565 }
566
567 return OK;
568}
569
570
571status_t PreviewPlayer::getVideoDimensions(
572 int32_t *width, int32_t *height) const {
573 Mutex::Autolock autoLock(mLock);
574
575 if (mVideoWidth < 0 || mVideoHeight < 0) {
576 return UNKNOWN_ERROR;
577 }
578
579 *width = mVideoWidth;
580 *height = mVideoHeight;
581
582 return OK;
583}
584
585
586status_t PreviewPlayer::initAudioDecoder() {
587 sp<MetaData> meta = mAudioTrack->getFormat();
588 const char *mime;
589 CHECK(meta->findCString(kKeyMIMEType, &mime));
590
591 if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) {
592 mAudioSource = mAudioTrack;
593 } else {
594 sp<MediaSource> aRawSource;
595 aRawSource = OMXCodec::Create(
596 mClient.interface(), mAudioTrack->getFormat(),
597 false, // createEncoder
598 mAudioTrack);
599
600 if(aRawSource != NULL) {
601 LOGV("initAudioDecoder: new VideoEditorSRC");
602 mAudioSource = new VideoEditorSRC(aRawSource);
603 }
604 }
605
606 if (mAudioSource != NULL) {
607 int64_t durationUs;
608 if (mAudioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
609 Mutex::Autolock autoLock(mMiscStateLock);
610 if (mDurationUs < 0 || durationUs > mDurationUs) {
611 mDurationUs = durationUs;
612 }
613 }
614 status_t err = mAudioSource->start();
615
616 if (err != OK) {
617 mAudioSource.clear();
618 return err;
619 }
620 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_QCELP)) {
621 // For legacy reasons we're simply going to ignore the absence
622 // of an audio decoder for QCELP instead of aborting playback
623 // altogether.
624 return OK;
625 }
626
627 return mAudioSource != NULL ? OK : UNKNOWN_ERROR;
628}
629
630
631status_t PreviewPlayer::initVideoDecoder(uint32_t flags) {
632
633 mVideoSource = OMXCodec::Create(
634 mClient.interface(), mVideoTrack->getFormat(),
635 false,
636 mVideoTrack,
637 NULL, flags);
638
639 if (mVideoSource != NULL) {
640 int64_t durationUs;
641 if (mVideoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
642 Mutex::Autolock autoLock(mMiscStateLock);
643 if (mDurationUs < 0 || durationUs > mDurationUs) {
644 mDurationUs = durationUs;
645 }
646 }
647
648 CHECK(mVideoTrack->getFormat()->findInt32(kKeyWidth, &mVideoWidth));
649 CHECK(mVideoTrack->getFormat()->findInt32(kKeyHeight, &mVideoHeight));
650
651 status_t err = mVideoSource->start();
652
653 if (err != OK) {
654 mVideoSource.clear();
655 return err;
656 }
657 }
658
659 return mVideoSource != NULL ? OK : UNKNOWN_ERROR;
660}
661
662
663void PreviewPlayer::onVideoEvent() {
664 uint32_t i=0;
665 bool bAppliedVideoEffect = false;
666 M4OSA_ERR err1 = M4NO_ERROR;
667 int64_t imageFrameTimeUs = 0;
668
669 Mutex::Autolock autoLock(mLock);
670 if (!mVideoEventPending) {
671 // The event has been cancelled in reset_l() but had already
672 // been scheduled for execution at that time.
673 return;
674 }
675 mVideoEventPending = false;
676
677 TimeSource *ts_st = &mSystemTimeSource;
678 int64_t timeStartUs = ts_st->getRealTimeUs();
679
680 if (mSeeking) {
681 if (mLastVideoBuffer) {
682 mLastVideoBuffer->release();
683 mLastVideoBuffer = NULL;
684 }
685
686
687 if(mAudioSource != NULL) {
688
689 // We're going to seek the video source first, followed by
690 // the audio source.
691 // In order to avoid jumps in the DataSource offset caused by
692 // the audio codec prefetching data from the old locations
693 // while the video codec is already reading data from the new
694 // locations, we'll "pause" the audio source, causing it to
695 // stop reading input data until a subsequent seek.
696
697 if (mAudioPlayer != NULL) {
698 mAudioPlayer->pause();
699 }
700 mAudioSource->pause();
701 }
702 }
703
704 if (!mVideoBuffer) {
705 MediaSource::ReadOptions options;
706 if (mSeeking) {
707 LOGV("LV PLAYER seeking to %lld us (%.2f secs)", mSeekTimeUs,
708 mSeekTimeUs / 1E6);
709
710 options.setSeekTo(
711 mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST);
712 }
713 for (;;) {
714 status_t err = mVideoSource->read(&mVideoBuffer, &options);
715 options.clearSeekTo();
716
717 if (err != OK) {
718 CHECK_EQ(mVideoBuffer, NULL);
719
720 if (err == INFO_FORMAT_CHANGED) {
721 LOGV("LV PLAYER VideoSource signalled format change");
722 notifyVideoSize_l();
723
724 if (mVideoRenderer != NULL) {
725 mVideoRendererIsPreview = false;
726 initRenderer_l();
727 }
728 continue;
729 }
730 // So video playback is complete, but we may still have
731 // a seek request pending that needs to be applied // to the audio track. if (mSeeking) { LOGV("video stream ended while seeking!"); } finishSeekIfNecessary(-1);
732 LOGV("PreviewPlayer: onVideoEvent EOS reached.");
733 mFlags |= VIDEO_AT_EOS;
734 postStreamDoneEvent_l(err);
735 return;
736 }
737
738 if (mVideoBuffer->range_length() == 0) {
739 // Some decoders, notably the PV AVC software decoder
740 // return spurious empty buffers that we just want to ignore.
741
742 mVideoBuffer->release();
743 mVideoBuffer = NULL;
744 continue;
745 }
746
747 int64_t videoTimeUs;
748 CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &videoTimeUs));
749
750 if((videoTimeUs/1000) < mPlayBeginTimeMsec) {
751 // Frames are before begin cut time
752 // Donot render
753 mVideoBuffer->release();
754 mVideoBuffer = NULL;
755 continue;
756 }
757
758 break;
759 }
760 }
761
762 mNumberDecVideoFrames++;
763
764 int64_t timeUs;
765 CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
766
767 {
768 Mutex::Autolock autoLock(mMiscStateLock);
769 mVideoTimeUs = timeUs;
770 }
771
772 mDecodedVideoTs = timeUs;
773
774 if(!mStartNextPlayer) {
775 int64_t playbackTimeRemaining = (mPlayEndTimeMsec*1000) - timeUs;
776 if(playbackTimeRemaining <= 1500000) {
777 //When less than 1.5 sec of playback left
778 // send notification to start next player
779
780 mStartNextPlayer = true;
781 notifyListener_l(0xAAAAAAAA);
782 }
783 }
784
785 bool wasSeeking = mSeeking;
786 finishSeekIfNecessary(timeUs);
787
788 TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource;
789
790 if(ts == NULL) {
791 mVideoBuffer->release();
792 mVideoBuffer = NULL;
793 return;
794 }
795
796 if(!mIsVideoSourceJpg) {
797 if (mFlags & FIRST_FRAME) {
798 mFlags &= ~FIRST_FRAME;
799
800 mTimeSourceDeltaUs = ts->getRealTimeUs() - timeUs;
801 }
802
803 int64_t realTimeUs, mediaTimeUs;
804 if (!(mFlags & AUDIO_AT_EOS) && mAudioPlayer != NULL
805 && mAudioPlayer->getMediaTimeMapping(&realTimeUs, &mediaTimeUs)) {
806 mTimeSourceDeltaUs = realTimeUs - mediaTimeUs;
807 }
808
809 int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs;
810
811 int64_t latenessUs = nowUs - timeUs;
812
813 if (wasSeeking) {
814 // Let's display the first frame after seeking right away. latenessUs = 0; }
815 LOGV("Audio time stamp = %lld and video time stamp = %lld",
816 ts->getRealTimeUs(),timeUs);
817 if (latenessUs > 40000) {
818 // We're more than 40ms late.
819
820 LOGV("LV PLAYER we're late by %lld us (%.2f secs)",
821 latenessUs, latenessUs / 1E6);
822
823 mVideoBuffer->release();
824 mVideoBuffer = NULL;
825
826 postVideoEvent_l();
827 return;
828 }
829
830 if (latenessUs < -10000) {
831 // We're more than 10ms early.
832 LOGV("We're more than 10ms early, lateness %lld", latenessUs);
833
834 postVideoEvent_l(10000);
835 return;
836 }
837 }
838
839 if (mVideoRendererIsPreview || mVideoRenderer == NULL) {
840 mVideoRendererIsPreview = false;
841
842 initRenderer_l();
843 }
844
845 // If timestamp exceeds endCutTime of clip, donot render
846 if((timeUs/1000) > mPlayEndTimeMsec) {
847 if (mLastVideoBuffer) {
848 mLastVideoBuffer->release();
849 mLastVideoBuffer = NULL;
850 }
851 mLastVideoBuffer = mVideoBuffer;
852 mVideoBuffer = NULL;
853 mFlags |= VIDEO_AT_EOS;
854 mFlags |= AUDIO_AT_EOS;
855 LOGI("PreviewPlayer: onVideoEvent timeUs > mPlayEndTime; send EOS..");
856 postStreamDoneEvent_l(ERROR_END_OF_STREAM);
857 return;
858 }
859
860 // Post processing to apply video effects
861 for(i=0;i<mNumberEffects;i++) {
862 // First check if effect starttime matches the clip being previewed
863 if((mEffectsSettings[i].uiStartTime < (mDecVideoTsStoryBoard/1000)) ||
864 (mEffectsSettings[i].uiStartTime >=
865 ((mDecVideoTsStoryBoard/1000) + mPlayEndTimeMsec - mPlayBeginTimeMsec)))
866 {
867 // This effect doesn't belong to this clip, check next one
868 continue;
869 }
870 // Check if effect applies to this particular frame timestamp
871 if((mEffectsSettings[i].uiStartTime <=
872 (((timeUs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec)) &&
873 ((mEffectsSettings[i].uiStartTime+mEffectsSettings[i].uiDuration) >=
874 (((timeUs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec))
875 && (mEffectsSettings[i].uiDuration != 0)) {
876
877 setVideoPostProcessingNode(
878 mEffectsSettings[i].VideoEffectType, TRUE);
879 }
880 else {
881 setVideoPostProcessingNode(
882 mEffectsSettings[i].VideoEffectType, FALSE);
883 }
884
885 }
886
887 if(mCurrentVideoEffect != VIDEO_EFFECT_NONE) {
888 err1 = doVideoPostProcessing();
889 if(err1 != M4NO_ERROR) {
890 LOGE("doVideoPostProcessing returned err");
891 bAppliedVideoEffect = false;
892 }
893 else {
894 bAppliedVideoEffect = true;
895 }
896 }
897 else {
898 bAppliedVideoEffect = false;
899 if(mRenderingMode != MEDIA_RENDERING_INVALID) {
900 // No effects to be applied, but media rendering to be done
901 err1 = doMediaRendering();
902 if(err1 != M4NO_ERROR) {
903 LOGE("doMediaRendering returned err");
904 //Use original mVideoBuffer for rendering
905 mVideoResizedOrCropped = false;
906 }
907 }
908 }
909
910 if (mVideoRenderer != NULL) {
911 LOGV("mVideoRenderer CALL render()");
912 mVideoRenderer->render();
913 }
914
915 if (mLastVideoBuffer) {
916 mLastVideoBuffer->release();
917 mLastVideoBuffer = NULL;
918 }
919
920 mLastVideoBuffer = mVideoBuffer;
921 mVideoBuffer = NULL;
922
923 // Post progress callback based on callback interval set
924 if(mNumberDecVideoFrames >= mProgressCbInterval) {
925 postProgressCallbackEvent_l();
926 mNumberDecVideoFrames = 0; // reset counter
927 }
928
929 // if reached EndCutTime of clip, post EOS event
930 if((timeUs/1000) >= mPlayEndTimeMsec) {
931 LOGV("PreviewPlayer: onVideoEvent EOS.");
932 mFlags |= VIDEO_AT_EOS;
933 mFlags |= AUDIO_AT_EOS;
934 postStreamDoneEvent_l(ERROR_END_OF_STREAM);
935 }
936 else {
937 if(!mIsVideoSourceJpg) {
938 postVideoEvent_l();
939 }
940 else {
941 postVideoEvent_l(33000);
942 }
943 }
944}
945
946status_t PreviewPlayer::prepare() {
947 Mutex::Autolock autoLock(mLock);
948 return prepare_l();
949}
950
951status_t PreviewPlayer::prepare_l() {
952 if (mFlags & PREPARED) {
953 return OK;
954 }
955
956 if (mFlags & PREPARING) {
957 return UNKNOWN_ERROR;
958 }
959
960 mIsAsyncPrepare = false;
961 status_t err = prepareAsync_l();
962
963 if (err != OK) {
964 return err;
965 }
966
967 while (mFlags & PREPARING) {
968 mPreparedCondition.wait(mLock);
969 }
970
971 return mPrepareResult;
972}
973
974status_t PreviewPlayer::prepareAsync_l() {
975 if (mFlags & PREPARING) {
976 return UNKNOWN_ERROR; // async prepare already pending
977 }
978
979 if (!mQueueStarted) {
980 mQueue.start();
981 mQueueStarted = true;
982 }
983
984 mFlags |= PREPARING;
985 mAsyncPrepareEvent = new PreviewPlayerEvent(
986 this, &PreviewPlayer::onPrepareAsyncEvent);
987
988 mQueue.postEvent(mAsyncPrepareEvent);
989
990 return OK;
991}
992
993status_t PreviewPlayer::finishSetDataSource_l() {
994 sp<DataSource> dataSource;
995 sp<MediaExtractor> extractor;
996
997 dataSource = DataSource::CreateFromURI(mUri.string(), &mUriHeaders);
998
999 if (dataSource == NULL) {
1000 return UNKNOWN_ERROR;
1001 }
1002
1003 //If file type is .rgb, then no need to check for Extractor
1004 int uriLen = strlen(mUri);
1005 int startOffset = uriLen - 4;
1006 if(!strncasecmp(mUri+startOffset, ".rgb", 4)) {
1007 extractor = NULL;
1008 }
1009 else {
1010 extractor = MediaExtractor::Create(dataSource,
1011 MEDIA_MIMETYPE_CONTAINER_MPEG4);
1012 }
1013
1014 if (extractor == NULL) {
1015 LOGV("PreviewPlayer::finishSetDataSource_l extractor == NULL");
1016 return setDataSource_l_jpg();
1017 }
1018
1019 return setDataSource_l(extractor);
1020}
1021
1022
1023// static
1024bool PreviewPlayer::ContinuePreparation(void *cookie) {
1025 PreviewPlayer *me = static_cast<PreviewPlayer *>(cookie);
1026
1027 return (me->mFlags & PREPARE_CANCELLED) == 0;
1028}
1029
1030void PreviewPlayer::onPrepareAsyncEvent() {
1031 Mutex::Autolock autoLock(mLock);
1032 LOGV("onPrepareAsyncEvent");
1033
1034 if (mFlags & PREPARE_CANCELLED) {
1035 LOGI("LV PLAYER prepare was cancelled before doing anything");
1036 abortPrepare(UNKNOWN_ERROR);
1037 return;
1038 }
1039
1040 if (mUri.size() > 0) {
1041 status_t err = finishSetDataSource_l();
1042
1043 if (err != OK) {
1044 abortPrepare(err);
1045 return;
1046 }
1047 }
1048
1049 if (mVideoTrack != NULL && mVideoSource == NULL) {
1050 status_t err = initVideoDecoder(OMXCodec::kHardwareCodecsOnly);
1051
1052 if (err != OK) {
1053 abortPrepare(err);
1054 return;
1055 }
1056 }
1057
1058 if (mAudioTrack != NULL && mAudioSource == NULL) {
1059 status_t err = initAudioDecoder();
1060
1061 if (err != OK) {
1062 abortPrepare(err);
1063 return;
1064 }
1065 }
1066 finishAsyncPrepare_l();
1067
1068}
1069
1070void PreviewPlayer::finishAsyncPrepare_l() {
1071 if (mIsAsyncPrepare) {
1072 if (mVideoSource == NULL) {
1073 LOGV("finishAsyncPrepare_l: MEDIA_SET_VIDEO_SIZE 0 0 ");
1074 notifyListener_l(MEDIA_SET_VIDEO_SIZE, 0, 0);
1075 } else {
1076 LOGV("finishAsyncPrepare_l: MEDIA_SET_VIDEO_SIZE");
1077 notifyVideoSize_l();
1078 }
1079 LOGV("finishAsyncPrepare_l: MEDIA_PREPARED");
1080 notifyListener_l(MEDIA_PREPARED);
1081 }
1082
1083 mPrepareResult = OK;
1084 mFlags &= ~(PREPARING|PREPARE_CANCELLED);
1085 mFlags |= PREPARED;
1086 mAsyncPrepareEvent = NULL;
1087 mPreparedCondition.broadcast();
1088}
1089
1090status_t PreviewPlayer::suspend() {
1091 LOGV("suspend");
1092 Mutex::Autolock autoLock(mLock);
1093
1094 if (mSuspensionState != NULL) {
1095 if (mLastVideoBuffer == NULL) {
1096 //go into here if video is suspended again
1097 //after resuming without being played between
1098 //them
1099 SuspensionState *state = mSuspensionState;
1100 mSuspensionState = NULL;
1101 reset_l();
1102 mSuspensionState = state;
1103 return OK;
1104 }
1105
1106 delete mSuspensionState;
1107 mSuspensionState = NULL;
1108 }
1109
1110 if (mFlags & PREPARING) {
1111 mFlags |= PREPARE_CANCELLED;
1112 }
1113
1114 while (mFlags & PREPARING) {
1115 mPreparedCondition.wait(mLock);
1116 }
1117
1118 SuspensionState *state = new SuspensionState;
1119 state->mUri = mUri;
1120 state->mUriHeaders = mUriHeaders;
1121 state->mFileSource = mFileSource;
1122
1123 state->mFlags = mFlags & (PLAYING | AUTO_LOOPING | LOOPING | AT_EOS);
1124 getPosition(&state->mPositionUs);
1125
1126 if (mLastVideoBuffer) {
1127 size_t size = mLastVideoBuffer->range_length();
1128 if (size) {
1129 int32_t unreadable;
1130 if (!mLastVideoBuffer->meta_data()->findInt32(
1131 kKeyIsUnreadable, &unreadable)
1132 || unreadable == 0) {
1133 state->mLastVideoFrameSize = size;
1134 state->mLastVideoFrame = malloc(size);
1135 memcpy(state->mLastVideoFrame,
1136 (const uint8_t *)mLastVideoBuffer->data()
1137 + mLastVideoBuffer->range_offset(),
1138 size);
1139
1140 state->mVideoWidth = mVideoWidth;
1141 state->mVideoHeight = mVideoHeight;
1142
1143 sp<MetaData> meta = mVideoSource->getFormat();
1144 CHECK(meta->findInt32(kKeyColorFormat, &state->mColorFormat));
1145 CHECK(meta->findInt32(kKeyWidth, &state->mDecodedWidth));
1146 CHECK(meta->findInt32(kKeyHeight, &state->mDecodedHeight));
1147 } else {
1148 LOGV("Unable to save last video frame, we have no access to "
1149 "the decoded video data.");
1150 }
1151 }
1152 }
1153
1154 reset_l();
1155
1156 mSuspensionState = state;
1157
1158 return OK;
1159}
1160
1161status_t PreviewPlayer::resume() {
1162 LOGV("resume");
1163 Mutex::Autolock autoLock(mLock);
1164
1165 if (mSuspensionState == NULL) {
1166 return INVALID_OPERATION;
1167 }
1168
1169 SuspensionState *state = mSuspensionState;
1170 mSuspensionState = NULL;
1171
1172 status_t err;
1173 if (state->mFileSource != NULL) {
1174 err = AwesomePlayer::setDataSource_l(state->mFileSource);
1175
1176 if (err == OK) {
1177 mFileSource = state->mFileSource;
1178 }
1179 } else {
1180 err = AwesomePlayer::setDataSource_l(state->mUri, &state->mUriHeaders);
1181 }
1182
1183 if (err != OK) {
1184 delete state;
1185 state = NULL;
1186
1187 return err;
1188 }
1189
1190 seekTo_l(state->mPositionUs);
1191
1192 mFlags = state->mFlags & (AUTO_LOOPING | LOOPING | AT_EOS);
1193
1194 if (state->mLastVideoFrame && (mSurface != NULL || mISurface != NULL)) {
1195 mVideoRenderer =
1196 new PreviewLocalRenderer(
1197 true, // previewOnly
1198 (OMX_COLOR_FORMATTYPE)state->mColorFormat,
1199 mSurface,
1200 state->mVideoWidth,
1201 state->mVideoHeight,
1202 state->mDecodedWidth,
1203 state->mDecodedHeight);
1204
1205 mVideoRendererIsPreview = true;
1206
1207 ((PreviewLocalRenderer *)mVideoRenderer.get())->render(
1208 state->mLastVideoFrame, state->mLastVideoFrameSize);
1209 }
1210
1211 if (state->mFlags & PLAYING) {
1212 play_l();
1213 }
1214
1215 mSuspensionState = state;
1216 state = NULL;
1217
1218 return OK;
1219}
1220
1221
1222status_t PreviewPlayer::loadEffectsSettings(
1223 M4VSS3GPP_EffectSettings* pEffectSettings, int nEffects) {
1224 M4OSA_UInt32 i = 0, rgbSize = 0;
1225 M4VIFI_UInt8 *tmp = M4OSA_NULL;
1226
1227 mNumberEffects = nEffects;
1228 mEffectsSettings = pEffectSettings;
1229 return OK;
1230}
1231
1232status_t PreviewPlayer::loadAudioMixSettings(
1233 M4xVSS_AudioMixingSettings* pAudioMixSettings) {
1234
1235 LOGV("PreviewPlayer: loadAudioMixSettings: ");
1236 mPreviewPlayerAudioMixSettings = pAudioMixSettings;
1237 return OK;
1238}
1239
1240status_t PreviewPlayer::setAudioMixPCMFileHandle(
1241 M4OSA_Context pAudioMixPCMFileHandle) {
1242
1243 LOGV("PreviewPlayer: setAudioMixPCMFileHandle: ");
1244 mAudioMixPCMFileHandle = pAudioMixPCMFileHandle;
1245 return OK;
1246}
1247
1248status_t PreviewPlayer::setAudioMixStoryBoardParam(
1249 M4OSA_UInt32 audioMixStoryBoardTS,
1250 M4OSA_UInt32 currentMediaBeginCutTime,
1251 M4OSA_UInt32 primaryTrackVolValue ) {
1252
1253 mAudioMixStoryBoardTS = audioMixStoryBoardTS;
1254 mCurrentMediaBeginCutTime = currentMediaBeginCutTime;
1255 mCurrentMediaVolumeValue = primaryTrackVolValue;
1256 return OK;
1257}
1258
1259status_t PreviewPlayer::setPlaybackBeginTime(uint32_t msec) {
1260
1261 mPlayBeginTimeMsec = msec;
1262 return OK;
1263}
1264
1265status_t PreviewPlayer::setPlaybackEndTime(uint32_t msec) {
1266
1267 mPlayEndTimeMsec = msec;
1268 return OK;
1269}
1270
1271status_t PreviewPlayer::setStoryboardStartTime(uint32_t msec) {
1272
1273 mStoryboardStartTimeMsec = msec;
1274 mDecVideoTsStoryBoard = mStoryboardStartTimeMsec*1000;
1275 return OK;
1276}
1277
1278status_t PreviewPlayer::setProgressCallbackInterval(uint32_t cbInterval) {
1279
1280 mProgressCbInterval = cbInterval;
1281 return OK;
1282}
1283
1284
1285status_t PreviewPlayer::setMediaRenderingMode(
1286 M4xVSS_MediaRendering mode,
1287 M4VIDEOEDITING_VideoFrameSize outputVideoSize) {
1288
1289 mRenderingMode = mode;
1290
1291 /* reset boolean for each clip*/
1292 mVideoResizedOrCropped = false;
1293
1294 switch(outputVideoSize) {
1295 case M4VIDEOEDITING_kSQCIF:
1296 mOutputVideoWidth = 128;
1297 mOutputVideoHeight = 96;
1298 break;
1299
1300 case M4VIDEOEDITING_kQQVGA:
1301 mOutputVideoWidth = 160;
1302 mOutputVideoHeight = 120;
1303 break;
1304
1305 case M4VIDEOEDITING_kQCIF:
1306 mOutputVideoWidth = 176;
1307 mOutputVideoHeight = 144;
1308 break;
1309
1310 case M4VIDEOEDITING_kQVGA:
1311 mOutputVideoWidth = 320;
1312 mOutputVideoHeight = 240;
1313 break;
1314
1315 case M4VIDEOEDITING_kCIF:
1316 mOutputVideoWidth = 352;
1317 mOutputVideoHeight = 288;
1318 break;
1319
1320 case M4VIDEOEDITING_kVGA:
1321 mOutputVideoWidth = 640;
1322 mOutputVideoHeight = 480;
1323 break;
1324
1325 case M4VIDEOEDITING_kWVGA:
1326 mOutputVideoWidth = 800;
1327 mOutputVideoHeight = 480;
1328 break;
1329
1330 case M4VIDEOEDITING_kNTSC:
1331 mOutputVideoWidth = 720;
1332 mOutputVideoHeight = 480;
1333 break;
1334
1335 case M4VIDEOEDITING_k640_360:
1336 mOutputVideoWidth = 640;
1337 mOutputVideoHeight = 360;
1338 break;
1339
1340 case M4VIDEOEDITING_k854_480:
1341 mOutputVideoWidth = 854;
1342 mOutputVideoHeight = 480;
1343 break;
1344
1345 case M4VIDEOEDITING_kHD1280:
1346 mOutputVideoWidth = 1280;
1347 mOutputVideoHeight = 720;
1348 break;
1349
1350 case M4VIDEOEDITING_kHD1080:
1351 mOutputVideoWidth = 1080;
1352 mOutputVideoHeight = 720;
1353 break;
1354
1355 case M4VIDEOEDITING_kHD960:
1356 mOutputVideoWidth = 960;
1357 mOutputVideoHeight = 720;
1358 break;
1359
1360 default:
1361 LOGE("unsupported output video size set");
1362 return BAD_VALUE;
1363 }
1364
1365 return OK;
1366}
1367
1368M4OSA_ERR PreviewPlayer::doMediaRendering() {
1369 M4OSA_ERR err = M4NO_ERROR;
1370 M4VIFI_ImagePlane planeIn[3], planeOut[3];
1371 M4VIFI_UInt8 *inBuffer = M4OSA_NULL, *finalOutputBuffer = M4OSA_NULL;
1372 M4VIFI_UInt8 *tempOutputBuffer= M4OSA_NULL;
1373 size_t videoBufferSize = 0;
1374 M4OSA_UInt32 frameSize = 0, i=0, index =0, nFrameCount =0, bufferOffset =0;
1375 int32_t colorFormat = 0;
1376
1377 if(!mIsVideoSourceJpg) {
1378 sp<MetaData> meta = mVideoSource->getFormat();
1379 CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
1380 }
1381 else {
1382 colorFormat = OMX_COLOR_FormatYUV420Planar;
1383 }
1384
1385 videoBufferSize = mVideoBuffer->size();
1386 frameSize = (mVideoWidth*mVideoHeight*3) >> 1;
1387
1388 uint8_t* outBuffer;
1389 size_t outBufferStride = 0;
1390
1391 mVideoRenderer->getBuffer(&outBuffer, &outBufferStride);
1392
1393 bufferOffset = index*frameSize;
1394 inBuffer = (M4OSA_UInt8 *)mVideoBuffer->data()+
1395 mVideoBuffer->range_offset()+bufferOffset;
1396
1397
1398 /* In plane*/
1399 prepareYUV420ImagePlane(planeIn, mVideoWidth,
1400 mVideoHeight, (M4VIFI_UInt8 *)inBuffer);
1401
1402 // Set the output YUV420 plane to be compatible with YV12 format
1403 // W & H even
1404 // YVU instead of YUV
1405 // align buffers on 32 bits
1406
1407 //In YV12 format, sizes must be even
1408 M4OSA_UInt32 yv12PlaneWidth = ((mOutputVideoWidth +1)>>1)<<1;
1409 M4OSA_UInt32 yv12PlaneHeight = ((mOutputVideoHeight+1)>>1)<<1;
1410
1411 prepareYV12ImagePlane(planeOut, yv12PlaneWidth, yv12PlaneHeight,
1412 (M4OSA_UInt32)outBufferStride, (M4VIFI_UInt8 *)outBuffer);
1413
1414
1415 err = applyRenderingMode(planeIn, planeOut, mRenderingMode);
1416
1417 if(err != M4NO_ERROR)
1418 {
1419 LOGE("doMediaRendering: applyRenderingMode returned err=0x%x", err);
1420 return err;
1421 }
1422 mVideoResizedOrCropped = true;
1423
1424 return err;
1425}
1426
1427status_t PreviewPlayer::resetJniCallbackTimeStamp() {
1428
1429 mDecVideoTsStoryBoard = mStoryboardStartTimeMsec*1000;
1430 return OK;
1431}
1432
1433void PreviewPlayer::postProgressCallbackEvent_l() {
1434 if (mProgressCbEventPending) {
1435 return;
1436 }
1437 mProgressCbEventPending = true;
1438
1439 mQueue.postEvent(mProgressCbEvent);
1440}
1441
1442void PreviewPlayer::onProgressCbEvent() {
1443 Mutex::Autolock autoLock(mLock);
1444 if (!mProgressCbEventPending) {
1445 return;
1446 }
1447 mProgressCbEventPending = false;
1448 // If playback starts from previous I-frame,
1449 // then send frame storyboard duration
1450 if((mDecodedVideoTs/1000) < mPlayBeginTimeMsec) {
1451 notifyListener_l(MEDIA_INFO, 0, mDecVideoTsStoryBoard/1000);
1452 }
1453 else {
1454 notifyListener_l(MEDIA_INFO, 0,
1455 (((mDecodedVideoTs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec));
1456 }
1457}
1458
1459void PreviewPlayer::setVideoPostProcessingNode(
1460 M4VSS3GPP_VideoEffectType type, M4OSA_Bool enable) {
1461
1462 uint32_t effect = VIDEO_EFFECT_NONE;
1463
1464 //Map M4VSS3GPP_VideoEffectType to local enum
1465 switch(type) {
1466 case M4VSS3GPP_kVideoEffectType_FadeFromBlack:
1467 effect = VIDEO_EFFECT_FADEFROMBLACK;
1468 break;
1469
1470 case M4VSS3GPP_kVideoEffectType_FadeToBlack:
1471 effect = VIDEO_EFFECT_FADETOBLACK;
1472 break;
1473
1474 case M4VSS3GPP_kVideoEffectType_CurtainOpening:
1475 effect = VIDEO_EFFECT_CURTAINOPEN;
1476 break;
1477
1478 case M4VSS3GPP_kVideoEffectType_CurtainClosing:
1479 effect = VIDEO_EFFECT_CURTAINCLOSE;
1480 break;
1481
1482 case M4xVSS_kVideoEffectType_BlackAndWhite:
1483 effect = VIDEO_EFFECT_BLACKANDWHITE;
1484 break;
1485
1486 case M4xVSS_kVideoEffectType_Pink:
1487 effect = VIDEO_EFFECT_PINK;
1488 break;
1489
1490 case M4xVSS_kVideoEffectType_Green:
1491 effect = VIDEO_EFFECT_GREEN;
1492 break;
1493
1494 case M4xVSS_kVideoEffectType_Sepia:
1495 effect = VIDEO_EFFECT_SEPIA;
1496 break;
1497
1498 case M4xVSS_kVideoEffectType_Negative:
1499 effect = VIDEO_EFFECT_NEGATIVE;
1500 break;
1501
1502 case M4xVSS_kVideoEffectType_Framing:
1503 effect = VIDEO_EFFECT_FRAMING;
1504 break;
1505
1506 case M4xVSS_kVideoEffectType_Fifties:
1507 effect = VIDEO_EFFECT_FIFTIES;
1508 break;
1509
1510 case M4xVSS_kVideoEffectType_ColorRGB16:
1511 effect = VIDEO_EFFECT_COLOR_RGB16;
1512 break;
1513
1514 case M4xVSS_kVideoEffectType_Gradient:
1515 effect = VIDEO_EFFECT_GRADIENT;
1516 break;
1517
1518 default:
1519 effect = VIDEO_EFFECT_NONE;
1520 break;
1521 }
1522
1523 if(enable == M4OSA_TRUE) {
1524 //If already set, then no need to set again
1525 if(!(mCurrentVideoEffect & effect)) {
1526 mCurrentVideoEffect |= effect;
1527 if(effect == VIDEO_EFFECT_FIFTIES) {
1528 mIsFiftiesEffectStarted = true;
1529 }
1530 }
1531 }
1532 else {
1533 //Reset only if already set
1534 if(mCurrentVideoEffect & effect) {
1535 mCurrentVideoEffect &= ~effect;
1536 }
1537 }
1538}
1539
1540status_t PreviewPlayer::setImageClipProperties(uint32_t width,uint32_t height) {
1541 mVideoWidth = width;
1542 mVideoHeight = height;
1543 return OK;
1544}
1545
1546
1547M4OSA_ERR PreviewPlayer::doVideoPostProcessing() {
1548 M4OSA_ERR err = M4NO_ERROR;
1549 vePostProcessParams postProcessParams;
1550 int32_t colorFormat = 0;
1551
1552
1553 if(!mIsVideoSourceJpg) {
1554 sp<MetaData> meta = mVideoSource->getFormat();
1555 CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
1556 }
1557 else {
1558 colorFormat = OMX_COLOR_FormatYUV420Planar;
1559 }
1560
1561 if((colorFormat == OMX_COLOR_FormatYUV420SemiPlanar) ||
1562 (colorFormat == 0x7FA30C00)) {
1563 LOGE("doVideoPostProcessing: colorFormat YUV420Sp not supported");
1564 return M4ERR_UNSUPPORTED_MEDIA_TYPE;
1565 }
1566
1567 postProcessParams.vidBuffer = (M4VIFI_UInt8*)mVideoBuffer->data()
1568 + mVideoBuffer->range_offset();
1569
1570 postProcessParams.videoWidth = mVideoWidth;
1571 postProcessParams.videoHeight = mVideoHeight;
1572 postProcessParams.timeMs = mDecodedVideoTs/1000;
1573 postProcessParams.timeOffset = mDecVideoTsStoryBoard/1000;
1574 postProcessParams.effectsSettings = mEffectsSettings;
1575 postProcessParams.numberEffects = mNumberEffects;
1576 postProcessParams.outVideoWidth = mOutputVideoWidth;
1577 postProcessParams.outVideoHeight = mOutputVideoHeight;
1578 postProcessParams.currentVideoEffect = mCurrentVideoEffect;
1579 postProcessParams.renderingMode = mRenderingMode;
1580 if(mIsFiftiesEffectStarted == M4OSA_TRUE) {
1581 postProcessParams.isFiftiesEffectStarted = M4OSA_TRUE;
1582 mIsFiftiesEffectStarted = M4OSA_FALSE;
1583 }
1584 else {
1585 postProcessParams.isFiftiesEffectStarted = M4OSA_FALSE;
1586 }
1587
1588 postProcessParams.overlayFrameRGBBuffer = mFrameRGBBuffer;
1589 postProcessParams.overlayFrameYUVBuffer = mFrameYUVBuffer;
1590 mVideoRenderer->getBuffer(&(postProcessParams.pOutBuffer), &(postProcessParams.outBufferStride));
1591 err = applyEffectsAndRenderingMode(&postProcessParams);
1592
1593 return err;
1594}
1595
1596status_t PreviewPlayer::readFirstVideoFrame() {
1597 LOGV("PreviewPlayer::readFirstVideoFrame");
1598
1599 if (!mVideoBuffer) {
1600 MediaSource::ReadOptions options;
1601 if (mSeeking) {
1602 LOGV("LV PLAYER seeking to %lld us (%.2f secs)", mSeekTimeUs,
1603 mSeekTimeUs / 1E6);
1604
1605 options.setSeekTo(
1606 mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST);
1607 }
1608 for (;;) {
1609 status_t err = mVideoSource->read(&mVideoBuffer, &options);
1610 options.clearSeekTo();
1611
1612 if (err != OK) {
1613 CHECK_EQ(mVideoBuffer, NULL);
1614
1615 if (err == INFO_FORMAT_CHANGED) {
1616 LOGV("LV PLAYER VideoSource signalled format change");
1617 notifyVideoSize_l();
1618
1619 if (mVideoRenderer != NULL) {
1620 mVideoRendererIsPreview = false;
1621 initRenderer_l();
1622 }
1623 continue;
1624 }
1625 LOGV("PreviewPlayer: onVideoEvent EOS reached.");
1626 mFlags |= VIDEO_AT_EOS;
1627 postStreamDoneEvent_l(err);
1628 return OK;
1629 }
1630
1631 if (mVideoBuffer->range_length() == 0) {
1632 // Some decoders, notably the PV AVC software decoder
1633 // return spurious empty buffers that we just want to ignore.
1634
1635 mVideoBuffer->release();
1636 mVideoBuffer = NULL;
1637 continue;
1638 }
1639
1640 int64_t videoTimeUs;
1641 CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &videoTimeUs));
1642
1643 if((videoTimeUs/1000) < mPlayBeginTimeMsec) {
1644 // buffers are before begin cut time
1645 // ignore them
1646 //LOGI("PreviewPlayer: Ignoring buffers before begin cut time");
1647 mVideoBuffer->release();
1648 mVideoBuffer = NULL;
1649 continue;
1650 }
1651
1652 break;
1653 }
1654 }
1655
1656 int64_t timeUs;
1657 CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
1658
1659 {
1660 Mutex::Autolock autoLock(mMiscStateLock);
1661 mVideoTimeUs = timeUs;
1662 }
1663
1664 mDecodedVideoTs = timeUs;
1665
1666 return OK;
1667
1668}
1669
1670} // namespace android