blob: 49f50cb540d307cd87157fcb7320db9491707719 [file] [log] [blame]
Dharmaray Kundargi643290d2011-01-16 16:02:42 -08001/*
2 * Copyright (C) 2011 NXP Software
3 * Copyright (C) 2011 The Android Open Source Project
4 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17
18#define LOG_NDEBUG 1
19#define LOG_TAG "PreviewPlayer"
20#include <utils/Log.h>
21
22#include <dlfcn.h>
23
24#include "include/ARTSPController.h"
25#include "PreviewPlayer.h"
26#include "DummyAudioSource.h"
27#include "DummyVideoSource.h"
28#include "VideoEditorSRC.h"
29#include "include/LiveSession.h"
30#include "include/NuCachedSource2.h"
31#include "include/ThrottledSource.h"
32
33
34#include "PreviewRenderer.h"
35
36#include <binder/IPCThreadState.h>
37#include <media/stagefright/DataSource.h>
38#include <media/stagefright/FileSource.h>
39#include <media/stagefright/MediaBuffer.h>
40#include <media/stagefright/MediaDefs.h>
41#include <media/stagefright/MediaExtractor.h>
42#include <media/stagefright/MediaDebug.h>
43#include <media/stagefright/MediaSource.h>
44#include <media/stagefright/MetaData.h>
45#include <media/stagefright/OMXCodec.h>
46
47#include <surfaceflinger/Surface.h>
48#include <media/stagefright/foundation/ALooper.h>
49
50namespace android {
51
52
53struct PreviewPlayerEvent : public TimedEventQueue::Event {
54 PreviewPlayerEvent(
55 PreviewPlayer *player,
56 void (PreviewPlayer::*method)())
57 : mPlayer(player),
58 mMethod(method) {
59 }
60
61protected:
62 virtual ~PreviewPlayerEvent() {}
63
64 virtual void fire(TimedEventQueue *queue, int64_t /* now_us */) {
65 (mPlayer->*mMethod)();
66 }
67
68private:
69 PreviewPlayer *mPlayer;
70 void (PreviewPlayer::*mMethod)();
71
72 PreviewPlayerEvent(const PreviewPlayerEvent &);
73 PreviewPlayerEvent &operator=(const PreviewPlayerEvent &);
74};
75
76
77struct PreviewLocalRenderer : public PreviewPlayerRenderer {
78 PreviewLocalRenderer(
79 bool previewOnly,
80 OMX_COLOR_FORMATTYPE colorFormat,
81 const sp<Surface> &surface,
82 size_t displayWidth, size_t displayHeight,
83 size_t decodedWidth, size_t decodedHeight,
84 int32_t rotationDegrees = 0)
85 : mTarget(NULL) {
86 init(previewOnly,
87 colorFormat, surface,
88 displayWidth, displayHeight,
89 decodedWidth, decodedHeight,
90 rotationDegrees);
91 }
92
93 virtual void render(MediaBuffer *buffer) {
94 render((const uint8_t *)buffer->data() + buffer->range_offset(),
95 buffer->range_length());
96 }
97
98 void render(const void *data, size_t size) {
99 mTarget->render(data, size, NULL);
100 }
101 void render() {
102 mTarget->renderYV12();
103 }
104 void getBuffer(uint8_t **data, size_t *stride) {
105 mTarget->getBufferYV12(data, stride);
106 }
107
108protected:
109 virtual ~PreviewLocalRenderer() {
110 delete mTarget;
111 mTarget = NULL;
112 }
113
114private:
115 PreviewRenderer *mTarget;
116
117 void init(
118 bool previewOnly,
119 OMX_COLOR_FORMATTYPE colorFormat,
120 const sp<Surface> &surface,
121 size_t displayWidth, size_t displayHeight,
122 size_t decodedWidth, size_t decodedHeight,
123 int32_t rotationDegrees = 0);
124
125 PreviewLocalRenderer(const PreviewLocalRenderer &);
126 PreviewLocalRenderer &operator=(const PreviewLocalRenderer &);;
127};
128
129void PreviewLocalRenderer::init(
130 bool previewOnly,
131 OMX_COLOR_FORMATTYPE colorFormat,
132 const sp<Surface> &surface,
133 size_t displayWidth, size_t displayHeight,
134 size_t decodedWidth, size_t decodedHeight,
135 int32_t rotationDegrees) {
136 mTarget = new PreviewRenderer(
137 colorFormat, surface, displayWidth, displayHeight,
138 decodedWidth, decodedHeight, rotationDegrees);
139}
140
141PreviewPlayer::PreviewPlayer()
142 : AwesomePlayer(),
143 mFrameRGBBuffer(NULL),
Dharmaray Kundargi35cb2de2011-01-19 19:09:27 -0800144 mFrameYUVBuffer(NULL),
145 mReportedWidth(0),
Dharmaray Kundargie6c07502011-01-21 16:58:31 -0800146 mReportedHeight(0),
147 mCurrFramingEffectIndex(0) {
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800148
149 mVideoRenderer = NULL;
150 mLastVideoBuffer = NULL;
151 mSuspensionState = NULL;
152 mEffectsSettings = NULL;
153 mAudioMixStoryBoardTS = 0;
154 mCurrentMediaBeginCutTime = 0;
155 mCurrentMediaVolumeValue = 0;
156 mNumberEffects = 0;
157 mDecodedVideoTs = 0;
158 mDecVideoTsStoryBoard = 0;
159 mCurrentVideoEffect = VIDEO_EFFECT_NONE;
160 mProgressCbInterval = 0;
161 mNumberDecVideoFrames = 0;
Dharmaray Kundargie6c07502011-01-21 16:58:31 -0800162 mOverlayUpdateEventPosted = false;
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800163
164 mVideoEvent = new PreviewPlayerEvent(this, &PreviewPlayer::onVideoEvent);
165 mVideoEventPending = false;
166 mStreamDoneEvent = new PreviewPlayerEvent(this,
Dharmaray Kundargie6c07502011-01-21 16:58:31 -0800167 &AwesomePlayer::onStreamDone);
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800168
169 mStreamDoneEventPending = false;
170
171 mCheckAudioStatusEvent = new PreviewPlayerEvent(
Dharmaray Kundargie6c07502011-01-21 16:58:31 -0800172 this, &AwesomePlayer::onCheckAudioStatus);
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800173
174 mAudioStatusEventPending = false;
175
176 mProgressCbEvent = new PreviewPlayerEvent(this,
Dharmaray Kundargie6c07502011-01-21 16:58:31 -0800177 &PreviewPlayer::onProgressCbEvent);
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800178
Dharmaray Kundargie6c07502011-01-21 16:58:31 -0800179 mOverlayUpdateEvent = new PreviewPlayerEvent(this,
180 &PreviewPlayer::onUpdateOverlayEvent);
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800181 mProgressCbEventPending = false;
Dharmaray Kundargie6c07502011-01-21 16:58:31 -0800182
183 mOverlayUpdateEventPending = false;
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800184 mResizedVideoBuffer = NULL;
185 mVideoResizedOrCropped = false;
186 mRenderingMode = (M4xVSS_MediaRendering)MEDIA_RENDERING_INVALID;
187 mIsFiftiesEffectStarted = false;
188 reset();
189}
190
191PreviewPlayer::~PreviewPlayer() {
192
193 if (mQueueStarted) {
194 mQueue.stop();
195 }
196
197 reset();
198
199 if(mResizedVideoBuffer != NULL) {
200 M4OSA_free((M4OSA_MemAddr32)(mResizedVideoBuffer->data()));
201 mResizedVideoBuffer = NULL;
202 }
203
204 mVideoRenderer.clear();
205 mVideoRenderer = NULL;
206}
207
208void PreviewPlayer::cancelPlayerEvents(bool keepBufferingGoing) {
209 mQueue.cancelEvent(mVideoEvent->eventID());
210 mVideoEventPending = false;
211 mQueue.cancelEvent(mStreamDoneEvent->eventID());
212 mStreamDoneEventPending = false;
213 mQueue.cancelEvent(mCheckAudioStatusEvent->eventID());
214 mAudioStatusEventPending = false;
215
216 mQueue.cancelEvent(mProgressCbEvent->eventID());
217 mProgressCbEventPending = false;
218}
219
220status_t PreviewPlayer::setDataSource(
221 const char *uri, const KeyedVector<String8, String8> *headers) {
222 Mutex::Autolock autoLock(mLock);
223 return setDataSource_l(uri, headers);
224}
225
226status_t PreviewPlayer::setDataSource_l(
227 const char *uri, const KeyedVector<String8, String8> *headers) {
228 reset_l();
229
230 mUri = uri;
231
232 if (headers) {
233 mUriHeaders = *headers;
234 }
235
236 // The actual work will be done during preparation in the call to
237 // ::finishSetDataSource_l to avoid blocking the calling thread in
238 // setDataSource for any significant time.
239 return OK;
240}
241
242status_t PreviewPlayer::setDataSource_l(const sp<MediaExtractor> &extractor) {
243 bool haveAudio = false;
244 bool haveVideo = false;
245 for (size_t i = 0; i < extractor->countTracks(); ++i) {
246 sp<MetaData> meta = extractor->getTrackMetaData(i);
247
248 const char *mime;
249 CHECK(meta->findCString(kKeyMIMEType, &mime));
250
251 if (!haveVideo && !strncasecmp(mime, "video/", 6)) {
252 setVideoSource(extractor->getTrack(i));
253 haveVideo = true;
254 } else if (!haveAudio && !strncasecmp(mime, "audio/", 6)) {
255 setAudioSource(extractor->getTrack(i));
256 haveAudio = true;
257
258 if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) {
259 // Only do this for vorbis audio, none of the other audio
260 // formats even support this ringtone specific hack and
261 // retrieving the metadata on some extractors may turn out
262 // to be very expensive.
263 sp<MetaData> fileMeta = extractor->getMetaData();
264 int32_t loop;
265 if (fileMeta != NULL
266 && fileMeta->findInt32(kKeyAutoLoop, &loop)
267 && loop != 0) {
268 mFlags |= AUTO_LOOPING;
269 }
270 }
271 }
272
273 if (haveAudio && haveVideo) {
274 break;
275 }
276 }
277
278 /* Add the support for Dummy audio*/
279 if( !haveAudio ){
280 LOGV("PreviewPlayer: setDataSource_l Dummyaudiocreation started");
281
282 mAudioTrack = DummyAudioSource::Create(32000, 2, 20000,
283 ((mPlayEndTimeMsec)*1000));
284 LOGV("PreviewPlayer: setDataSource_l Dummyauiosource created");
285 if(mAudioTrack != NULL) {
286 haveAudio = true;
287 }
288 }
289
290 if (!haveAudio && !haveVideo) {
291 return UNKNOWN_ERROR;
292 }
293
294 mExtractorFlags = extractor->flags();
295 return OK;
296}
297
298status_t PreviewPlayer::setDataSource_l_jpg() {
299 M4OSA_ERR err = M4NO_ERROR;
300 LOGV("PreviewPlayer: setDataSource_l_jpg started");
301
302 mAudioSource = DummyAudioSource::Create(32000, 2, 20000,
303 ((mPlayEndTimeMsec)*1000));
304 LOGV("PreviewPlayer: setDataSource_l_jpg Dummyaudiosource created");
305 if(mAudioSource != NULL) {
306 setAudioSource(mAudioSource);
307 }
308 status_t error = mAudioSource->start();
309 if (error != OK) {
310 LOGV("Error starting dummy audio source");
311 mAudioSource.clear();
312 return err;
313 }
314
315 mDurationUs = (mPlayEndTimeMsec - mPlayBeginTimeMsec)*1000;
316
317 mVideoSource = DummyVideoSource::Create(mVideoWidth, mVideoHeight,
318 mDurationUs, mUri);
Dharmaray Kundargi35cb2de2011-01-19 19:09:27 -0800319 mReportedWidth = mVideoWidth;
320 mReportedHeight = mVideoHeight;
321
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800322 setVideoSource(mVideoSource);
323 status_t err1 = mVideoSource->start();
324 if (err1 != OK) {
325 mVideoSource.clear();
326 return err;
327 }
328
329 mIsVideoSourceJpg = true;
330 return OK;
331}
332
333void PreviewPlayer::reset() {
334 Mutex::Autolock autoLock(mLock);
335 reset_l();
336}
337
338void PreviewPlayer::reset_l() {
339
340 if (mFlags & PREPARING) {
341 mFlags |= PREPARE_CANCELLED;
342 }
343
344 while (mFlags & PREPARING) {
345 mPreparedCondition.wait(mLock);
346 }
347
348 cancelPlayerEvents();
349 mAudioTrack.clear();
350 mVideoTrack.clear();
351
352 // Shutdown audio first, so that the respone to the reset request
353 // appears to happen instantaneously as far as the user is concerned
354 // If we did this later, audio would continue playing while we
355 // shutdown the video-related resources and the player appear to
356 // not be as responsive to a reset request.
357 if (mAudioPlayer == NULL && mAudioSource != NULL) {
358 // If we had an audio player, it would have effectively
359 // taken possession of the audio source and stopped it when
360 // _it_ is stopped. Otherwise this is still our responsibility.
361 mAudioSource->stop();
362 }
363 mAudioSource.clear();
364
365 mTimeSource = NULL;
366
367 delete mAudioPlayer;
368 mAudioPlayer = NULL;
369
370 if (mLastVideoBuffer) {
371 mLastVideoBuffer->release();
372 mLastVideoBuffer = NULL;
373 }
374
375 if (mVideoBuffer) {
376 mVideoBuffer->release();
377 mVideoBuffer = NULL;
378 }
379
380 if (mVideoSource != NULL) {
381 mVideoSource->stop();
382
383 // The following hack is necessary to ensure that the OMX
384 // component is completely released by the time we may try
385 // to instantiate it again.
386 wp<MediaSource> tmp = mVideoSource;
387 mVideoSource.clear();
388 while (tmp.promote() != NULL) {
389 usleep(1000);
390 }
391 IPCThreadState::self()->flushCommands();
392 }
393
394 mDurationUs = -1;
395 mFlags = 0;
396 mExtractorFlags = 0;
397 mVideoWidth = mVideoHeight = -1;
398 mTimeSourceDeltaUs = 0;
399 mVideoTimeUs = 0;
400
401 mSeeking = false;
402 mSeekNotificationSent = false;
403 mSeekTimeUs = 0;
404
405 mUri.setTo("");
406 mUriHeaders.clear();
407
408 mFileSource.clear();
409
410 delete mSuspensionState;
411 mSuspensionState = NULL;
412
413 mCurrentVideoEffect = VIDEO_EFFECT_NONE;
414 mIsVideoSourceJpg = false;
415 mFrameRGBBuffer = NULL;
416 if(mFrameYUVBuffer != NULL) {
417 M4OSA_free((M4OSA_MemAddr32)mFrameYUVBuffer);
418 mFrameYUVBuffer = NULL;
419 }
420}
421
422void PreviewPlayer::partial_reset_l() {
423
424 if (mLastVideoBuffer) {
425 mLastVideoBuffer->release();
426 mLastVideoBuffer = NULL;
427 }
428
429 /* call base struct */
430 AwesomePlayer::partial_reset_l();
431
432}
433
434status_t PreviewPlayer::play() {
435 Mutex::Autolock autoLock(mLock);
436
437 mFlags &= ~CACHE_UNDERRUN;
438
439 return play_l();
440}
441
442status_t PreviewPlayer::play_l() {
443VideoEditorAudioPlayer *mVePlayer;
444 if (mFlags & PLAYING) {
445 return OK;
446 }
447 mStartNextPlayer = false;
448
449 if (!(mFlags & PREPARED)) {
450 status_t err = prepare_l();
451
452 if (err != OK) {
453 return err;
454 }
455 }
456
457 mFlags |= PLAYING;
458 mFlags |= FIRST_FRAME;
459
460 bool deferredAudioSeek = false;
461
462 if (mAudioSource != NULL) {
463 if (mAudioPlayer == NULL) {
464 if (mAudioSink != NULL) {
465
466 mAudioPlayer = new VideoEditorAudioPlayer(mAudioSink, this);
467 mVePlayer =
468 (VideoEditorAudioPlayer*)mAudioPlayer;
469
470 mAudioPlayer->setSource(mAudioSource);
471
472 mVePlayer->setAudioMixSettings(
473 mPreviewPlayerAudioMixSettings);
474
475 mVePlayer->setAudioMixPCMFileHandle(
476 mAudioMixPCMFileHandle);
477
478 mVePlayer->setAudioMixStoryBoardSkimTimeStamp(
479 mAudioMixStoryBoardTS, mCurrentMediaBeginCutTime,
480 mCurrentMediaVolumeValue);
481
482 // We've already started the MediaSource in order to enable
483 // the prefetcher to read its data.
484 status_t err = mVePlayer->start(
485 true /* sourceAlreadyStarted */);
486
487 if (err != OK) {
488 delete mAudioPlayer;
489 mAudioPlayer = NULL;
490
491 mFlags &= ~(PLAYING | FIRST_FRAME);
492 return err;
493 }
494
495 mTimeSource = mVePlayer; //mAudioPlayer;
496
497 deferredAudioSeek = true;
498 mWatchForAudioSeekComplete = false;
499 mWatchForAudioEOS = true;
500 }
501 } else {
502 mVePlayer->resume();
503 }
504
505 }
506
507 if (mTimeSource == NULL && mAudioPlayer == NULL) {
508 mTimeSource = &mSystemTimeSource;
509 }
510
Dharmaray Kundargi53c567c2011-01-29 18:52:50 -0800511 // Set the seek option for Image source files and read.
512 // This resets the timestamping for image play
513 if (mIsVideoSourceJpg) {
514 MediaSource::ReadOptions options;
515 MediaBuffer *aLocalBuffer;
516 options.setSeekTo(mSeekTimeUs);
517 mVideoSource->read(&aLocalBuffer, &options);
518 }
519
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800520 if (mVideoSource != NULL) {
521 // Kick off video playback
522 postVideoEvent_l();
523 }
524
525 if (deferredAudioSeek) {
526 // If there was a seek request while we were paused
527 // and we're just starting up again, honor the request now.
528 seekAudioIfNecessary_l();
529 }
530
531 if (mFlags & AT_EOS) {
532 // Legacy behaviour, if a stream finishes playing and then
533 // is started again, we play from the start...
534 seekTo_l(0);
535 }
536
537 return OK;
538}
539
540
541void PreviewPlayer::initRenderer_l() {
542 if (mSurface != NULL || mISurface != NULL) {
543 sp<MetaData> meta = mVideoSource->getFormat();
544
545 int32_t format;
546 const char *component;
547 int32_t decodedWidth, decodedHeight;
548 CHECK(meta->findInt32(kKeyColorFormat, &format));
549 CHECK(meta->findCString(kKeyDecoderComponent, &component));
550 CHECK(meta->findInt32(kKeyWidth, &decodedWidth));
551 CHECK(meta->findInt32(kKeyHeight, &decodedHeight));
552
553 // Must ensure that mVideoRenderer's destructor is actually executed
554 // before creating a new one.
555 IPCThreadState::self()->flushCommands();
556
557 // always use localrenderer since decoded buffers are modified
558 // by postprocessing module
559 // Other decoders are instantiated locally and as a consequence
560 // allocate their buffers in local address space.
561 if(mVideoRenderer == NULL) {
562
563 mVideoRenderer = new PreviewLocalRenderer(
564 false, // previewOnly
565 (OMX_COLOR_FORMATTYPE)format,
566 mSurface,
567 mOutputVideoWidth, mOutputVideoHeight,
568 mOutputVideoWidth, mOutputVideoHeight);
569 }
570 }
571}
572
573
574void PreviewPlayer::setISurface(const sp<ISurface> &isurface) {
575 Mutex::Autolock autoLock(mLock);
576 mISurface = isurface;
577}
578
579
580status_t PreviewPlayer::seekTo(int64_t timeUs) {
581
582 if ((mExtractorFlags & MediaExtractor::CAN_SEEK) || (mIsVideoSourceJpg)) {
583 Mutex::Autolock autoLock(mLock);
584 return seekTo_l(timeUs);
585 }
586
587 return OK;
588}
589
590
591status_t PreviewPlayer::getVideoDimensions(
592 int32_t *width, int32_t *height) const {
593 Mutex::Autolock autoLock(mLock);
594
595 if (mVideoWidth < 0 || mVideoHeight < 0) {
596 return UNKNOWN_ERROR;
597 }
598
599 *width = mVideoWidth;
600 *height = mVideoHeight;
601
602 return OK;
603}
604
605
606status_t PreviewPlayer::initAudioDecoder() {
607 sp<MetaData> meta = mAudioTrack->getFormat();
608 const char *mime;
609 CHECK(meta->findCString(kKeyMIMEType, &mime));
610
611 if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) {
612 mAudioSource = mAudioTrack;
613 } else {
614 sp<MediaSource> aRawSource;
615 aRawSource = OMXCodec::Create(
616 mClient.interface(), mAudioTrack->getFormat(),
617 false, // createEncoder
618 mAudioTrack);
619
620 if(aRawSource != NULL) {
621 LOGV("initAudioDecoder: new VideoEditorSRC");
622 mAudioSource = new VideoEditorSRC(aRawSource);
623 }
624 }
625
626 if (mAudioSource != NULL) {
627 int64_t durationUs;
628 if (mAudioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
629 Mutex::Autolock autoLock(mMiscStateLock);
630 if (mDurationUs < 0 || durationUs > mDurationUs) {
631 mDurationUs = durationUs;
632 }
633 }
634 status_t err = mAudioSource->start();
635
636 if (err != OK) {
637 mAudioSource.clear();
638 return err;
639 }
640 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_QCELP)) {
641 // For legacy reasons we're simply going to ignore the absence
642 // of an audio decoder for QCELP instead of aborting playback
643 // altogether.
644 return OK;
645 }
646
647 return mAudioSource != NULL ? OK : UNKNOWN_ERROR;
648}
649
650
651status_t PreviewPlayer::initVideoDecoder(uint32_t flags) {
652
653 mVideoSource = OMXCodec::Create(
654 mClient.interface(), mVideoTrack->getFormat(),
655 false,
656 mVideoTrack,
657 NULL, flags);
658
659 if (mVideoSource != NULL) {
660 int64_t durationUs;
661 if (mVideoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
662 Mutex::Autolock autoLock(mMiscStateLock);
663 if (mDurationUs < 0 || durationUs > mDurationUs) {
664 mDurationUs = durationUs;
665 }
666 }
667
668 CHECK(mVideoTrack->getFormat()->findInt32(kKeyWidth, &mVideoWidth));
669 CHECK(mVideoTrack->getFormat()->findInt32(kKeyHeight, &mVideoHeight));
670
Dharmaray Kundargi35cb2de2011-01-19 19:09:27 -0800671 mReportedWidth = mVideoWidth;
672 mReportedHeight = mVideoHeight;
673
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800674 status_t err = mVideoSource->start();
675
676 if (err != OK) {
677 mVideoSource.clear();
678 return err;
679 }
680 }
681
682 return mVideoSource != NULL ? OK : UNKNOWN_ERROR;
683}
684
685
686void PreviewPlayer::onVideoEvent() {
687 uint32_t i=0;
688 bool bAppliedVideoEffect = false;
689 M4OSA_ERR err1 = M4NO_ERROR;
690 int64_t imageFrameTimeUs = 0;
691
692 Mutex::Autolock autoLock(mLock);
693 if (!mVideoEventPending) {
694 // The event has been cancelled in reset_l() but had already
695 // been scheduled for execution at that time.
696 return;
697 }
698 mVideoEventPending = false;
699
700 TimeSource *ts_st = &mSystemTimeSource;
701 int64_t timeStartUs = ts_st->getRealTimeUs();
702
703 if (mSeeking) {
704 if (mLastVideoBuffer) {
705 mLastVideoBuffer->release();
706 mLastVideoBuffer = NULL;
707 }
708
709
710 if(mAudioSource != NULL) {
711
712 // We're going to seek the video source first, followed by
713 // the audio source.
714 // In order to avoid jumps in the DataSource offset caused by
715 // the audio codec prefetching data from the old locations
716 // while the video codec is already reading data from the new
717 // locations, we'll "pause" the audio source, causing it to
718 // stop reading input data until a subsequent seek.
719
720 if (mAudioPlayer != NULL) {
721 mAudioPlayer->pause();
722 }
723 mAudioSource->pause();
724 }
725 }
726
727 if (!mVideoBuffer) {
728 MediaSource::ReadOptions options;
729 if (mSeeking) {
730 LOGV("LV PLAYER seeking to %lld us (%.2f secs)", mSeekTimeUs,
731 mSeekTimeUs / 1E6);
732
733 options.setSeekTo(
734 mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST);
735 }
736 for (;;) {
737 status_t err = mVideoSource->read(&mVideoBuffer, &options);
738 options.clearSeekTo();
739
740 if (err != OK) {
741 CHECK_EQ(mVideoBuffer, NULL);
742
743 if (err == INFO_FORMAT_CHANGED) {
744 LOGV("LV PLAYER VideoSource signalled format change");
745 notifyVideoSize_l();
Dharmaray Kundargi35cb2de2011-01-19 19:09:27 -0800746 sp<MetaData> meta = mVideoSource->getFormat();
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800747
Dharmaray Kundargi35cb2de2011-01-19 19:09:27 -0800748 CHECK(meta->findInt32(kKeyWidth, &mReportedWidth));
749 CHECK(meta->findInt32(kKeyHeight, &mReportedHeight));
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800750 if (mVideoRenderer != NULL) {
751 mVideoRendererIsPreview = false;
752 initRenderer_l();
753 }
754 continue;
755 }
756 // So video playback is complete, but we may still have
Santosh Madhava342f9322011-01-27 16:27:12 -0800757 // a seek request pending that needs to be applied to the audio track
758 if (mSeeking) {
759 LOGV("video stream ended while seeking!");
760 }
761 finishSeekIfNecessary(-1);
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800762 LOGV("PreviewPlayer: onVideoEvent EOS reached.");
763 mFlags |= VIDEO_AT_EOS;
Santosh Madhava342f9322011-01-27 16:27:12 -0800764 if (mOverlayUpdateEventPosted) {
765 mOverlayUpdateEventPosted = false;
766 postOverlayUpdateEvent_l();
767 }
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800768 postStreamDoneEvent_l(err);
769 return;
770 }
771
772 if (mVideoBuffer->range_length() == 0) {
773 // Some decoders, notably the PV AVC software decoder
774 // return spurious empty buffers that we just want to ignore.
775
776 mVideoBuffer->release();
777 mVideoBuffer = NULL;
778 continue;
779 }
780
781 int64_t videoTimeUs;
782 CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &videoTimeUs));
783
784 if((videoTimeUs/1000) < mPlayBeginTimeMsec) {
785 // Frames are before begin cut time
786 // Donot render
787 mVideoBuffer->release();
788 mVideoBuffer = NULL;
789 continue;
790 }
791
792 break;
793 }
794 }
795
796 mNumberDecVideoFrames++;
797
798 int64_t timeUs;
799 CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
800
801 {
802 Mutex::Autolock autoLock(mMiscStateLock);
803 mVideoTimeUs = timeUs;
804 }
805
806 mDecodedVideoTs = timeUs;
807
808 if(!mStartNextPlayer) {
809 int64_t playbackTimeRemaining = (mPlayEndTimeMsec*1000) - timeUs;
810 if(playbackTimeRemaining <= 1500000) {
811 //When less than 1.5 sec of playback left
812 // send notification to start next player
813
814 mStartNextPlayer = true;
815 notifyListener_l(0xAAAAAAAA);
816 }
817 }
818
819 bool wasSeeking = mSeeking;
820 finishSeekIfNecessary(timeUs);
821
822 TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource;
823
824 if(ts == NULL) {
825 mVideoBuffer->release();
826 mVideoBuffer = NULL;
827 return;
828 }
829
830 if(!mIsVideoSourceJpg) {
831 if (mFlags & FIRST_FRAME) {
832 mFlags &= ~FIRST_FRAME;
833
834 mTimeSourceDeltaUs = ts->getRealTimeUs() - timeUs;
835 }
836
837 int64_t realTimeUs, mediaTimeUs;
838 if (!(mFlags & AUDIO_AT_EOS) && mAudioPlayer != NULL
839 && mAudioPlayer->getMediaTimeMapping(&realTimeUs, &mediaTimeUs)) {
840 mTimeSourceDeltaUs = realTimeUs - mediaTimeUs;
841 }
842
843 int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs;
844
845 int64_t latenessUs = nowUs - timeUs;
846
847 if (wasSeeking) {
Santosh Madhava342f9322011-01-27 16:27:12 -0800848 // Let's display the first frame after seeking right away.
849 latenessUs = 0;
850 }
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800851 LOGV("Audio time stamp = %lld and video time stamp = %lld",
852 ts->getRealTimeUs(),timeUs);
853 if (latenessUs > 40000) {
854 // We're more than 40ms late.
855
856 LOGV("LV PLAYER we're late by %lld us (%.2f secs)",
857 latenessUs, latenessUs / 1E6);
858
859 mVideoBuffer->release();
860 mVideoBuffer = NULL;
861
862 postVideoEvent_l();
863 return;
864 }
865
866 if (latenessUs < -10000) {
867 // We're more than 10ms early.
868 LOGV("We're more than 10ms early, lateness %lld", latenessUs);
869
870 postVideoEvent_l(10000);
871 return;
872 }
873 }
874
875 if (mVideoRendererIsPreview || mVideoRenderer == NULL) {
876 mVideoRendererIsPreview = false;
877
878 initRenderer_l();
879 }
880
881 // If timestamp exceeds endCutTime of clip, donot render
882 if((timeUs/1000) > mPlayEndTimeMsec) {
883 if (mLastVideoBuffer) {
884 mLastVideoBuffer->release();
885 mLastVideoBuffer = NULL;
886 }
887 mLastVideoBuffer = mVideoBuffer;
888 mVideoBuffer = NULL;
889 mFlags |= VIDEO_AT_EOS;
890 mFlags |= AUDIO_AT_EOS;
891 LOGI("PreviewPlayer: onVideoEvent timeUs > mPlayEndTime; send EOS..");
Dharmaray Kundargie6c07502011-01-21 16:58:31 -0800892 if (mOverlayUpdateEventPosted) {
893 mOverlayUpdateEventPosted = false;
894 postOverlayUpdateEvent_l();
895 }
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800896 postStreamDoneEvent_l(ERROR_END_OF_STREAM);
897 return;
898 }
899
900 // Post processing to apply video effects
901 for(i=0;i<mNumberEffects;i++) {
902 // First check if effect starttime matches the clip being previewed
903 if((mEffectsSettings[i].uiStartTime < (mDecVideoTsStoryBoard/1000)) ||
904 (mEffectsSettings[i].uiStartTime >=
905 ((mDecVideoTsStoryBoard/1000) + mPlayEndTimeMsec - mPlayBeginTimeMsec)))
906 {
907 // This effect doesn't belong to this clip, check next one
908 continue;
909 }
910 // Check if effect applies to this particular frame timestamp
911 if((mEffectsSettings[i].uiStartTime <=
912 (((timeUs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec)) &&
913 ((mEffectsSettings[i].uiStartTime+mEffectsSettings[i].uiDuration) >=
914 (((timeUs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec))
915 && (mEffectsSettings[i].uiDuration != 0)) {
916
917 setVideoPostProcessingNode(
918 mEffectsSettings[i].VideoEffectType, TRUE);
919 }
920 else {
921 setVideoPostProcessingNode(
922 mEffectsSettings[i].VideoEffectType, FALSE);
923 }
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800924 }
925
Dharmaray Kundargie6c07502011-01-21 16:58:31 -0800926 //Provide the overlay Update indication when there is an overlay effect
Dharmaray Kundargid01ef562011-01-26 21:11:00 -0800927 if (mCurrentVideoEffect & VIDEO_EFFECT_FRAMING) {
928 mCurrentVideoEffect &= ~VIDEO_EFFECT_FRAMING; //never apply framing here.
Dharmaray Kundargie6c07502011-01-21 16:58:31 -0800929 if (!mOverlayUpdateEventPosted) {
930
931 // Find the effect in effectSettings array
932 int index;
933 for (index = 0; index < mNumberEffects; index++) {
934 M4OSA_UInt32 timeMs = mDecodedVideoTs/1000;
935 M4OSA_UInt32 timeOffset = mDecVideoTsStoryBoard/1000;
936 if(mEffectsSettings[index].VideoEffectType ==
937 M4xVSS_kVideoEffectType_Framing) {
Dharmaray Kundargi254c8df2011-01-28 19:28:31 -0800938 if (((mEffectsSettings[index].uiStartTime + 1) <=
939 timeMs + timeOffset - mPlayBeginTimeMsec) &&
Dharmaray Kundargie6c07502011-01-21 16:58:31 -0800940 ((mEffectsSettings[index].uiStartTime - 1 +
Dharmaray Kundargi254c8df2011-01-28 19:28:31 -0800941 mEffectsSettings[index].uiDuration) >=
942 timeMs + timeOffset - mPlayBeginTimeMsec))
Dharmaray Kundargie6c07502011-01-21 16:58:31 -0800943 {
944 break;
945 }
946 }
947 }
948 if (index < mNumberEffects) {
949 mCurrFramingEffectIndex = index;
950 mOverlayUpdateEventPosted = true;
951 postOverlayUpdateEvent_l();
952 LOGV("Framing index = %d", mCurrFramingEffectIndex);
953 } else {
954 LOGV("No framing effects found");
955 }
956 }
957
958 } else if (mOverlayUpdateEventPosted) {
959 //Post the event when the overlay is no more valid
960 LOGV("Overlay is Done");
961 mOverlayUpdateEventPosted = false;
962 postOverlayUpdateEvent_l();
963 }
964
965
966 if (mCurrentVideoEffect != VIDEO_EFFECT_NONE) {
Dharmaray Kundargi643290d2011-01-16 16:02:42 -0800967 err1 = doVideoPostProcessing();
968 if(err1 != M4NO_ERROR) {
969 LOGE("doVideoPostProcessing returned err");
970 bAppliedVideoEffect = false;
971 }
972 else {
973 bAppliedVideoEffect = true;
974 }
975 }
976 else {
977 bAppliedVideoEffect = false;
978 if(mRenderingMode != MEDIA_RENDERING_INVALID) {
979 // No effects to be applied, but media rendering to be done
980 err1 = doMediaRendering();
981 if(err1 != M4NO_ERROR) {
982 LOGE("doMediaRendering returned err");
983 //Use original mVideoBuffer for rendering
984 mVideoResizedOrCropped = false;
985 }
986 }
987 }
988
989 if (mVideoRenderer != NULL) {
990 LOGV("mVideoRenderer CALL render()");
991 mVideoRenderer->render();
992 }
993
994 if (mLastVideoBuffer) {
995 mLastVideoBuffer->release();
996 mLastVideoBuffer = NULL;
997 }
998
999 mLastVideoBuffer = mVideoBuffer;
1000 mVideoBuffer = NULL;
1001
1002 // Post progress callback based on callback interval set
1003 if(mNumberDecVideoFrames >= mProgressCbInterval) {
1004 postProgressCallbackEvent_l();
1005 mNumberDecVideoFrames = 0; // reset counter
1006 }
1007
1008 // if reached EndCutTime of clip, post EOS event
1009 if((timeUs/1000) >= mPlayEndTimeMsec) {
1010 LOGV("PreviewPlayer: onVideoEvent EOS.");
1011 mFlags |= VIDEO_AT_EOS;
1012 mFlags |= AUDIO_AT_EOS;
Santosh Madhava342f9322011-01-27 16:27:12 -08001013 if (mOverlayUpdateEventPosted) {
1014 mOverlayUpdateEventPosted = false;
1015 postOverlayUpdateEvent_l();
1016 }
Dharmaray Kundargi643290d2011-01-16 16:02:42 -08001017 postStreamDoneEvent_l(ERROR_END_OF_STREAM);
1018 }
1019 else {
1020 if(!mIsVideoSourceJpg) {
1021 postVideoEvent_l();
1022 }
1023 else {
1024 postVideoEvent_l(33000);
1025 }
1026 }
1027}
1028
1029status_t PreviewPlayer::prepare() {
1030 Mutex::Autolock autoLock(mLock);
1031 return prepare_l();
1032}
1033
1034status_t PreviewPlayer::prepare_l() {
1035 if (mFlags & PREPARED) {
1036 return OK;
1037 }
1038
1039 if (mFlags & PREPARING) {
1040 return UNKNOWN_ERROR;
1041 }
1042
1043 mIsAsyncPrepare = false;
1044 status_t err = prepareAsync_l();
1045
1046 if (err != OK) {
1047 return err;
1048 }
1049
1050 while (mFlags & PREPARING) {
1051 mPreparedCondition.wait(mLock);
1052 }
1053
1054 return mPrepareResult;
1055}
1056
1057status_t PreviewPlayer::prepareAsync_l() {
1058 if (mFlags & PREPARING) {
1059 return UNKNOWN_ERROR; // async prepare already pending
1060 }
1061
1062 if (!mQueueStarted) {
1063 mQueue.start();
1064 mQueueStarted = true;
1065 }
1066
1067 mFlags |= PREPARING;
1068 mAsyncPrepareEvent = new PreviewPlayerEvent(
1069 this, &PreviewPlayer::onPrepareAsyncEvent);
1070
1071 mQueue.postEvent(mAsyncPrepareEvent);
1072
1073 return OK;
1074}
1075
1076status_t PreviewPlayer::finishSetDataSource_l() {
1077 sp<DataSource> dataSource;
1078 sp<MediaExtractor> extractor;
1079
1080 dataSource = DataSource::CreateFromURI(mUri.string(), &mUriHeaders);
1081
1082 if (dataSource == NULL) {
1083 return UNKNOWN_ERROR;
1084 }
1085
1086 //If file type is .rgb, then no need to check for Extractor
1087 int uriLen = strlen(mUri);
1088 int startOffset = uriLen - 4;
1089 if(!strncasecmp(mUri+startOffset, ".rgb", 4)) {
1090 extractor = NULL;
1091 }
1092 else {
1093 extractor = MediaExtractor::Create(dataSource,
1094 MEDIA_MIMETYPE_CONTAINER_MPEG4);
1095 }
1096
1097 if (extractor == NULL) {
1098 LOGV("PreviewPlayer::finishSetDataSource_l extractor == NULL");
1099 return setDataSource_l_jpg();
1100 }
1101
1102 return setDataSource_l(extractor);
1103}
1104
1105
1106// static
1107bool PreviewPlayer::ContinuePreparation(void *cookie) {
1108 PreviewPlayer *me = static_cast<PreviewPlayer *>(cookie);
1109
1110 return (me->mFlags & PREPARE_CANCELLED) == 0;
1111}
1112
1113void PreviewPlayer::onPrepareAsyncEvent() {
1114 Mutex::Autolock autoLock(mLock);
1115 LOGV("onPrepareAsyncEvent");
1116
1117 if (mFlags & PREPARE_CANCELLED) {
1118 LOGI("LV PLAYER prepare was cancelled before doing anything");
1119 abortPrepare(UNKNOWN_ERROR);
1120 return;
1121 }
1122
1123 if (mUri.size() > 0) {
1124 status_t err = finishSetDataSource_l();
1125
1126 if (err != OK) {
1127 abortPrepare(err);
1128 return;
1129 }
1130 }
1131
1132 if (mVideoTrack != NULL && mVideoSource == NULL) {
1133 status_t err = initVideoDecoder(OMXCodec::kHardwareCodecsOnly);
1134
1135 if (err != OK) {
1136 abortPrepare(err);
1137 return;
1138 }
1139 }
1140
1141 if (mAudioTrack != NULL && mAudioSource == NULL) {
1142 status_t err = initAudioDecoder();
1143
1144 if (err != OK) {
1145 abortPrepare(err);
1146 return;
1147 }
1148 }
1149 finishAsyncPrepare_l();
1150
1151}
1152
1153void PreviewPlayer::finishAsyncPrepare_l() {
1154 if (mIsAsyncPrepare) {
1155 if (mVideoSource == NULL) {
1156 LOGV("finishAsyncPrepare_l: MEDIA_SET_VIDEO_SIZE 0 0 ");
1157 notifyListener_l(MEDIA_SET_VIDEO_SIZE, 0, 0);
1158 } else {
1159 LOGV("finishAsyncPrepare_l: MEDIA_SET_VIDEO_SIZE");
1160 notifyVideoSize_l();
1161 }
1162 LOGV("finishAsyncPrepare_l: MEDIA_PREPARED");
1163 notifyListener_l(MEDIA_PREPARED);
1164 }
1165
1166 mPrepareResult = OK;
1167 mFlags &= ~(PREPARING|PREPARE_CANCELLED);
1168 mFlags |= PREPARED;
1169 mAsyncPrepareEvent = NULL;
1170 mPreparedCondition.broadcast();
1171}
1172
1173status_t PreviewPlayer::suspend() {
1174 LOGV("suspend");
1175 Mutex::Autolock autoLock(mLock);
1176
1177 if (mSuspensionState != NULL) {
1178 if (mLastVideoBuffer == NULL) {
1179 //go into here if video is suspended again
1180 //after resuming without being played between
1181 //them
1182 SuspensionState *state = mSuspensionState;
1183 mSuspensionState = NULL;
1184 reset_l();
1185 mSuspensionState = state;
1186 return OK;
1187 }
1188
1189 delete mSuspensionState;
1190 mSuspensionState = NULL;
1191 }
1192
1193 if (mFlags & PREPARING) {
1194 mFlags |= PREPARE_CANCELLED;
1195 }
1196
1197 while (mFlags & PREPARING) {
1198 mPreparedCondition.wait(mLock);
1199 }
1200
1201 SuspensionState *state = new SuspensionState;
1202 state->mUri = mUri;
1203 state->mUriHeaders = mUriHeaders;
1204 state->mFileSource = mFileSource;
1205
1206 state->mFlags = mFlags & (PLAYING | AUTO_LOOPING | LOOPING | AT_EOS);
1207 getPosition(&state->mPositionUs);
1208
1209 if (mLastVideoBuffer) {
1210 size_t size = mLastVideoBuffer->range_length();
1211 if (size) {
1212 int32_t unreadable;
1213 if (!mLastVideoBuffer->meta_data()->findInt32(
1214 kKeyIsUnreadable, &unreadable)
1215 || unreadable == 0) {
1216 state->mLastVideoFrameSize = size;
1217 state->mLastVideoFrame = malloc(size);
1218 memcpy(state->mLastVideoFrame,
1219 (const uint8_t *)mLastVideoBuffer->data()
1220 + mLastVideoBuffer->range_offset(),
1221 size);
1222
1223 state->mVideoWidth = mVideoWidth;
1224 state->mVideoHeight = mVideoHeight;
1225
1226 sp<MetaData> meta = mVideoSource->getFormat();
1227 CHECK(meta->findInt32(kKeyColorFormat, &state->mColorFormat));
1228 CHECK(meta->findInt32(kKeyWidth, &state->mDecodedWidth));
1229 CHECK(meta->findInt32(kKeyHeight, &state->mDecodedHeight));
1230 } else {
1231 LOGV("Unable to save last video frame, we have no access to "
1232 "the decoded video data.");
1233 }
1234 }
1235 }
1236
1237 reset_l();
1238
1239 mSuspensionState = state;
1240
1241 return OK;
1242}
1243
1244status_t PreviewPlayer::resume() {
1245 LOGV("resume");
1246 Mutex::Autolock autoLock(mLock);
1247
1248 if (mSuspensionState == NULL) {
1249 return INVALID_OPERATION;
1250 }
1251
1252 SuspensionState *state = mSuspensionState;
1253 mSuspensionState = NULL;
1254
1255 status_t err;
1256 if (state->mFileSource != NULL) {
1257 err = AwesomePlayer::setDataSource_l(state->mFileSource);
1258
1259 if (err == OK) {
1260 mFileSource = state->mFileSource;
1261 }
1262 } else {
1263 err = AwesomePlayer::setDataSource_l(state->mUri, &state->mUriHeaders);
1264 }
1265
1266 if (err != OK) {
1267 delete state;
1268 state = NULL;
1269
1270 return err;
1271 }
1272
1273 seekTo_l(state->mPositionUs);
1274
1275 mFlags = state->mFlags & (AUTO_LOOPING | LOOPING | AT_EOS);
1276
1277 if (state->mLastVideoFrame && (mSurface != NULL || mISurface != NULL)) {
1278 mVideoRenderer =
1279 new PreviewLocalRenderer(
1280 true, // previewOnly
1281 (OMX_COLOR_FORMATTYPE)state->mColorFormat,
1282 mSurface,
1283 state->mVideoWidth,
1284 state->mVideoHeight,
1285 state->mDecodedWidth,
1286 state->mDecodedHeight);
1287
1288 mVideoRendererIsPreview = true;
1289
1290 ((PreviewLocalRenderer *)mVideoRenderer.get())->render(
1291 state->mLastVideoFrame, state->mLastVideoFrameSize);
1292 }
1293
1294 if (state->mFlags & PLAYING) {
1295 play_l();
1296 }
1297
1298 mSuspensionState = state;
1299 state = NULL;
1300
1301 return OK;
1302}
1303
1304
1305status_t PreviewPlayer::loadEffectsSettings(
1306 M4VSS3GPP_EffectSettings* pEffectSettings, int nEffects) {
1307 M4OSA_UInt32 i = 0, rgbSize = 0;
1308 M4VIFI_UInt8 *tmp = M4OSA_NULL;
1309
1310 mNumberEffects = nEffects;
1311 mEffectsSettings = pEffectSettings;
1312 return OK;
1313}
1314
1315status_t PreviewPlayer::loadAudioMixSettings(
1316 M4xVSS_AudioMixingSettings* pAudioMixSettings) {
1317
1318 LOGV("PreviewPlayer: loadAudioMixSettings: ");
1319 mPreviewPlayerAudioMixSettings = pAudioMixSettings;
1320 return OK;
1321}
1322
1323status_t PreviewPlayer::setAudioMixPCMFileHandle(
1324 M4OSA_Context pAudioMixPCMFileHandle) {
1325
1326 LOGV("PreviewPlayer: setAudioMixPCMFileHandle: ");
1327 mAudioMixPCMFileHandle = pAudioMixPCMFileHandle;
1328 return OK;
1329}
1330
1331status_t PreviewPlayer::setAudioMixStoryBoardParam(
1332 M4OSA_UInt32 audioMixStoryBoardTS,
1333 M4OSA_UInt32 currentMediaBeginCutTime,
1334 M4OSA_UInt32 primaryTrackVolValue ) {
1335
1336 mAudioMixStoryBoardTS = audioMixStoryBoardTS;
1337 mCurrentMediaBeginCutTime = currentMediaBeginCutTime;
1338 mCurrentMediaVolumeValue = primaryTrackVolValue;
1339 return OK;
1340}
1341
1342status_t PreviewPlayer::setPlaybackBeginTime(uint32_t msec) {
1343
1344 mPlayBeginTimeMsec = msec;
1345 return OK;
1346}
1347
1348status_t PreviewPlayer::setPlaybackEndTime(uint32_t msec) {
1349
1350 mPlayEndTimeMsec = msec;
1351 return OK;
1352}
1353
1354status_t PreviewPlayer::setStoryboardStartTime(uint32_t msec) {
1355
1356 mStoryboardStartTimeMsec = msec;
1357 mDecVideoTsStoryBoard = mStoryboardStartTimeMsec*1000;
1358 return OK;
1359}
1360
1361status_t PreviewPlayer::setProgressCallbackInterval(uint32_t cbInterval) {
1362
1363 mProgressCbInterval = cbInterval;
1364 return OK;
1365}
1366
1367
1368status_t PreviewPlayer::setMediaRenderingMode(
1369 M4xVSS_MediaRendering mode,
1370 M4VIDEOEDITING_VideoFrameSize outputVideoSize) {
1371
1372 mRenderingMode = mode;
1373
1374 /* reset boolean for each clip*/
1375 mVideoResizedOrCropped = false;
1376
1377 switch(outputVideoSize) {
1378 case M4VIDEOEDITING_kSQCIF:
1379 mOutputVideoWidth = 128;
1380 mOutputVideoHeight = 96;
1381 break;
1382
1383 case M4VIDEOEDITING_kQQVGA:
1384 mOutputVideoWidth = 160;
1385 mOutputVideoHeight = 120;
1386 break;
1387
1388 case M4VIDEOEDITING_kQCIF:
1389 mOutputVideoWidth = 176;
1390 mOutputVideoHeight = 144;
1391 break;
1392
1393 case M4VIDEOEDITING_kQVGA:
1394 mOutputVideoWidth = 320;
1395 mOutputVideoHeight = 240;
1396 break;
1397
1398 case M4VIDEOEDITING_kCIF:
1399 mOutputVideoWidth = 352;
1400 mOutputVideoHeight = 288;
1401 break;
1402
1403 case M4VIDEOEDITING_kVGA:
1404 mOutputVideoWidth = 640;
1405 mOutputVideoHeight = 480;
1406 break;
1407
1408 case M4VIDEOEDITING_kWVGA:
1409 mOutputVideoWidth = 800;
1410 mOutputVideoHeight = 480;
1411 break;
1412
1413 case M4VIDEOEDITING_kNTSC:
1414 mOutputVideoWidth = 720;
1415 mOutputVideoHeight = 480;
1416 break;
1417
1418 case M4VIDEOEDITING_k640_360:
1419 mOutputVideoWidth = 640;
1420 mOutputVideoHeight = 360;
1421 break;
1422
1423 case M4VIDEOEDITING_k854_480:
1424 mOutputVideoWidth = 854;
1425 mOutputVideoHeight = 480;
1426 break;
1427
1428 case M4VIDEOEDITING_kHD1280:
1429 mOutputVideoWidth = 1280;
1430 mOutputVideoHeight = 720;
1431 break;
1432
1433 case M4VIDEOEDITING_kHD1080:
1434 mOutputVideoWidth = 1080;
1435 mOutputVideoHeight = 720;
1436 break;
1437
1438 case M4VIDEOEDITING_kHD960:
1439 mOutputVideoWidth = 960;
1440 mOutputVideoHeight = 720;
1441 break;
1442
1443 default:
1444 LOGE("unsupported output video size set");
1445 return BAD_VALUE;
1446 }
1447
1448 return OK;
1449}
1450
1451M4OSA_ERR PreviewPlayer::doMediaRendering() {
1452 M4OSA_ERR err = M4NO_ERROR;
1453 M4VIFI_ImagePlane planeIn[3], planeOut[3];
1454 M4VIFI_UInt8 *inBuffer = M4OSA_NULL, *finalOutputBuffer = M4OSA_NULL;
1455 M4VIFI_UInt8 *tempOutputBuffer= M4OSA_NULL;
1456 size_t videoBufferSize = 0;
1457 M4OSA_UInt32 frameSize = 0, i=0, index =0, nFrameCount =0, bufferOffset =0;
1458 int32_t colorFormat = 0;
1459
1460 if(!mIsVideoSourceJpg) {
1461 sp<MetaData> meta = mVideoSource->getFormat();
1462 CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
1463 }
1464 else {
1465 colorFormat = OMX_COLOR_FormatYUV420Planar;
1466 }
1467
1468 videoBufferSize = mVideoBuffer->size();
1469 frameSize = (mVideoWidth*mVideoHeight*3) >> 1;
1470
1471 uint8_t* outBuffer;
1472 size_t outBufferStride = 0;
1473
1474 mVideoRenderer->getBuffer(&outBuffer, &outBufferStride);
1475
1476 bufferOffset = index*frameSize;
1477 inBuffer = (M4OSA_UInt8 *)mVideoBuffer->data()+
1478 mVideoBuffer->range_offset()+bufferOffset;
1479
1480
1481 /* In plane*/
1482 prepareYUV420ImagePlane(planeIn, mVideoWidth,
Dharmaray Kundargi35cb2de2011-01-19 19:09:27 -08001483 mVideoHeight, (M4VIFI_UInt8 *)inBuffer, mReportedWidth, mReportedHeight);
Dharmaray Kundargi643290d2011-01-16 16:02:42 -08001484
1485 // Set the output YUV420 plane to be compatible with YV12 format
1486 // W & H even
1487 // YVU instead of YUV
1488 // align buffers on 32 bits
1489
1490 //In YV12 format, sizes must be even
1491 M4OSA_UInt32 yv12PlaneWidth = ((mOutputVideoWidth +1)>>1)<<1;
1492 M4OSA_UInt32 yv12PlaneHeight = ((mOutputVideoHeight+1)>>1)<<1;
1493
1494 prepareYV12ImagePlane(planeOut, yv12PlaneWidth, yv12PlaneHeight,
1495 (M4OSA_UInt32)outBufferStride, (M4VIFI_UInt8 *)outBuffer);
1496
1497
1498 err = applyRenderingMode(planeIn, planeOut, mRenderingMode);
1499
1500 if(err != M4NO_ERROR)
1501 {
1502 LOGE("doMediaRendering: applyRenderingMode returned err=0x%x", err);
1503 return err;
1504 }
1505 mVideoResizedOrCropped = true;
1506
1507 return err;
1508}
1509
1510status_t PreviewPlayer::resetJniCallbackTimeStamp() {
1511
1512 mDecVideoTsStoryBoard = mStoryboardStartTimeMsec*1000;
1513 return OK;
1514}
1515
1516void PreviewPlayer::postProgressCallbackEvent_l() {
1517 if (mProgressCbEventPending) {
1518 return;
1519 }
1520 mProgressCbEventPending = true;
1521
1522 mQueue.postEvent(mProgressCbEvent);
1523}
1524
Dharmaray Kundargie6c07502011-01-21 16:58:31 -08001525
Dharmaray Kundargi643290d2011-01-16 16:02:42 -08001526void PreviewPlayer::onProgressCbEvent() {
1527 Mutex::Autolock autoLock(mLock);
1528 if (!mProgressCbEventPending) {
1529 return;
1530 }
1531 mProgressCbEventPending = false;
1532 // If playback starts from previous I-frame,
1533 // then send frame storyboard duration
1534 if((mDecodedVideoTs/1000) < mPlayBeginTimeMsec) {
1535 notifyListener_l(MEDIA_INFO, 0, mDecVideoTsStoryBoard/1000);
1536 }
1537 else {
1538 notifyListener_l(MEDIA_INFO, 0,
1539 (((mDecodedVideoTs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec));
1540 }
1541}
1542
Dharmaray Kundargie6c07502011-01-21 16:58:31 -08001543void PreviewPlayer::postOverlayUpdateEvent_l() {
1544 if (mOverlayUpdateEventPending) {
1545 return;
1546 }
1547 mOverlayUpdateEventPending = true;
1548 mQueue.postEvent(mOverlayUpdateEvent);
1549}
1550
1551void PreviewPlayer::onUpdateOverlayEvent() {
1552 Mutex::Autolock autoLock(mLock);
1553
1554 if (!mOverlayUpdateEventPending) {
1555 return;
1556 }
1557 mOverlayUpdateEventPending = false;
1558
1559 int updateState;
1560 if (mOverlayUpdateEventPosted) {
1561 updateState = 1;
1562 } else {
1563 updateState = 0;
1564 }
1565 notifyListener_l(0xBBBBBBBB, updateState, mCurrFramingEffectIndex);
1566}
1567
1568
Dharmaray Kundargi643290d2011-01-16 16:02:42 -08001569void PreviewPlayer::setVideoPostProcessingNode(
1570 M4VSS3GPP_VideoEffectType type, M4OSA_Bool enable) {
1571
1572 uint32_t effect = VIDEO_EFFECT_NONE;
1573
1574 //Map M4VSS3GPP_VideoEffectType to local enum
1575 switch(type) {
1576 case M4VSS3GPP_kVideoEffectType_FadeFromBlack:
1577 effect = VIDEO_EFFECT_FADEFROMBLACK;
1578 break;
1579
1580 case M4VSS3GPP_kVideoEffectType_FadeToBlack:
1581 effect = VIDEO_EFFECT_FADETOBLACK;
1582 break;
1583
1584 case M4VSS3GPP_kVideoEffectType_CurtainOpening:
1585 effect = VIDEO_EFFECT_CURTAINOPEN;
1586 break;
1587
1588 case M4VSS3GPP_kVideoEffectType_CurtainClosing:
1589 effect = VIDEO_EFFECT_CURTAINCLOSE;
1590 break;
1591
1592 case M4xVSS_kVideoEffectType_BlackAndWhite:
1593 effect = VIDEO_EFFECT_BLACKANDWHITE;
1594 break;
1595
1596 case M4xVSS_kVideoEffectType_Pink:
1597 effect = VIDEO_EFFECT_PINK;
1598 break;
1599
1600 case M4xVSS_kVideoEffectType_Green:
1601 effect = VIDEO_EFFECT_GREEN;
1602 break;
1603
1604 case M4xVSS_kVideoEffectType_Sepia:
1605 effect = VIDEO_EFFECT_SEPIA;
1606 break;
1607
1608 case M4xVSS_kVideoEffectType_Negative:
1609 effect = VIDEO_EFFECT_NEGATIVE;
1610 break;
1611
1612 case M4xVSS_kVideoEffectType_Framing:
1613 effect = VIDEO_EFFECT_FRAMING;
1614 break;
1615
1616 case M4xVSS_kVideoEffectType_Fifties:
1617 effect = VIDEO_EFFECT_FIFTIES;
1618 break;
1619
1620 case M4xVSS_kVideoEffectType_ColorRGB16:
1621 effect = VIDEO_EFFECT_COLOR_RGB16;
1622 break;
1623
1624 case M4xVSS_kVideoEffectType_Gradient:
1625 effect = VIDEO_EFFECT_GRADIENT;
1626 break;
1627
1628 default:
1629 effect = VIDEO_EFFECT_NONE;
1630 break;
1631 }
1632
1633 if(enable == M4OSA_TRUE) {
1634 //If already set, then no need to set again
1635 if(!(mCurrentVideoEffect & effect)) {
1636 mCurrentVideoEffect |= effect;
1637 if(effect == VIDEO_EFFECT_FIFTIES) {
1638 mIsFiftiesEffectStarted = true;
1639 }
1640 }
1641 }
1642 else {
1643 //Reset only if already set
1644 if(mCurrentVideoEffect & effect) {
1645 mCurrentVideoEffect &= ~effect;
1646 }
1647 }
1648}
1649
1650status_t PreviewPlayer::setImageClipProperties(uint32_t width,uint32_t height) {
1651 mVideoWidth = width;
1652 mVideoHeight = height;
1653 return OK;
1654}
1655
1656
1657M4OSA_ERR PreviewPlayer::doVideoPostProcessing() {
1658 M4OSA_ERR err = M4NO_ERROR;
1659 vePostProcessParams postProcessParams;
1660 int32_t colorFormat = 0;
1661
1662
1663 if(!mIsVideoSourceJpg) {
1664 sp<MetaData> meta = mVideoSource->getFormat();
1665 CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
1666 }
1667 else {
1668 colorFormat = OMX_COLOR_FormatYUV420Planar;
1669 }
1670
1671 if((colorFormat == OMX_COLOR_FormatYUV420SemiPlanar) ||
1672 (colorFormat == 0x7FA30C00)) {
1673 LOGE("doVideoPostProcessing: colorFormat YUV420Sp not supported");
1674 return M4ERR_UNSUPPORTED_MEDIA_TYPE;
1675 }
1676
1677 postProcessParams.vidBuffer = (M4VIFI_UInt8*)mVideoBuffer->data()
1678 + mVideoBuffer->range_offset();
1679
1680 postProcessParams.videoWidth = mVideoWidth;
1681 postProcessParams.videoHeight = mVideoHeight;
1682 postProcessParams.timeMs = mDecodedVideoTs/1000;
1683 postProcessParams.timeOffset = mDecVideoTsStoryBoard/1000;
1684 postProcessParams.effectsSettings = mEffectsSettings;
1685 postProcessParams.numberEffects = mNumberEffects;
1686 postProcessParams.outVideoWidth = mOutputVideoWidth;
1687 postProcessParams.outVideoHeight = mOutputVideoHeight;
1688 postProcessParams.currentVideoEffect = mCurrentVideoEffect;
1689 postProcessParams.renderingMode = mRenderingMode;
1690 if(mIsFiftiesEffectStarted == M4OSA_TRUE) {
1691 postProcessParams.isFiftiesEffectStarted = M4OSA_TRUE;
1692 mIsFiftiesEffectStarted = M4OSA_FALSE;
1693 }
1694 else {
1695 postProcessParams.isFiftiesEffectStarted = M4OSA_FALSE;
1696 }
1697
1698 postProcessParams.overlayFrameRGBBuffer = mFrameRGBBuffer;
1699 postProcessParams.overlayFrameYUVBuffer = mFrameYUVBuffer;
1700 mVideoRenderer->getBuffer(&(postProcessParams.pOutBuffer), &(postProcessParams.outBufferStride));
Dharmaray Kundargi35cb2de2011-01-19 19:09:27 -08001701 err = applyEffectsAndRenderingMode(&postProcessParams, mReportedWidth, mReportedHeight);
Dharmaray Kundargi643290d2011-01-16 16:02:42 -08001702
1703 return err;
1704}
1705
1706status_t PreviewPlayer::readFirstVideoFrame() {
1707 LOGV("PreviewPlayer::readFirstVideoFrame");
1708
1709 if (!mVideoBuffer) {
1710 MediaSource::ReadOptions options;
1711 if (mSeeking) {
1712 LOGV("LV PLAYER seeking to %lld us (%.2f secs)", mSeekTimeUs,
1713 mSeekTimeUs / 1E6);
1714
1715 options.setSeekTo(
1716 mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST);
1717 }
1718 for (;;) {
1719 status_t err = mVideoSource->read(&mVideoBuffer, &options);
1720 options.clearSeekTo();
1721
1722 if (err != OK) {
1723 CHECK_EQ(mVideoBuffer, NULL);
1724
1725 if (err == INFO_FORMAT_CHANGED) {
1726 LOGV("LV PLAYER VideoSource signalled format change");
1727 notifyVideoSize_l();
Dharmaray Kundargi35cb2de2011-01-19 19:09:27 -08001728 sp<MetaData> meta = mVideoSource->getFormat();
1729
1730 CHECK(meta->findInt32(kKeyWidth, &mReportedWidth));
1731 CHECK(meta->findInt32(kKeyHeight, &mReportedHeight));
Dharmaray Kundargi643290d2011-01-16 16:02:42 -08001732
1733 if (mVideoRenderer != NULL) {
1734 mVideoRendererIsPreview = false;
1735 initRenderer_l();
1736 }
1737 continue;
1738 }
1739 LOGV("PreviewPlayer: onVideoEvent EOS reached.");
1740 mFlags |= VIDEO_AT_EOS;
1741 postStreamDoneEvent_l(err);
1742 return OK;
1743 }
1744
1745 if (mVideoBuffer->range_length() == 0) {
1746 // Some decoders, notably the PV AVC software decoder
1747 // return spurious empty buffers that we just want to ignore.
1748
1749 mVideoBuffer->release();
1750 mVideoBuffer = NULL;
1751 continue;
1752 }
1753
1754 int64_t videoTimeUs;
1755 CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &videoTimeUs));
1756
1757 if((videoTimeUs/1000) < mPlayBeginTimeMsec) {
1758 // buffers are before begin cut time
1759 // ignore them
1760 //LOGI("PreviewPlayer: Ignoring buffers before begin cut time");
1761 mVideoBuffer->release();
1762 mVideoBuffer = NULL;
1763 continue;
1764 }
1765
1766 break;
1767 }
1768 }
1769
1770 int64_t timeUs;
1771 CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
1772
1773 {
1774 Mutex::Autolock autoLock(mMiscStateLock);
1775 mVideoTimeUs = timeUs;
1776 }
1777
1778 mDecodedVideoTs = timeUs;
1779
1780 return OK;
1781
1782}
1783
1784} // namespace android