| Dharmaray Kundargi | 643290d | 2011-01-16 16:02:42 -0800 | [diff] [blame] | 1 | /* | 
|  | 2 | * Copyright (C) 2011 NXP Software | 
|  | 3 | * Copyright (C) 2011 The Android Open Source Project | 
|  | 4 | * | 
|  | 5 | * Licensed under the Apache License, Version 2.0 (the "License"); | 
|  | 6 | * you may not use this file except in compliance with the License. | 
|  | 7 | * You may obtain a copy of the License at | 
|  | 8 | * | 
|  | 9 | *      http://www.apache.org/licenses/LICENSE-2.0 | 
|  | 10 | * | 
|  | 11 | * Unless required by applicable law or agreed to in writing, software | 
|  | 12 | * distributed under the License is distributed on an "AS IS" BASIS, | 
|  | 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | 
|  | 14 | * See the License for the specific language governing permissions and | 
|  | 15 | * limitations under the License. | 
|  | 16 | */ | 
|  | 17 |  | 
|  | 18 | #define LOG_NDEBUG 1 | 
|  | 19 | #define LOG_TAG "PreviewPlayer" | 
|  | 20 | #include <utils/Log.h> | 
|  | 21 |  | 
|  | 22 | #include <dlfcn.h> | 
|  | 23 |  | 
|  | 24 | #include "include/ARTSPController.h" | 
|  | 25 | #include "PreviewPlayer.h" | 
|  | 26 | #include "DummyAudioSource.h" | 
|  | 27 | #include "DummyVideoSource.h" | 
|  | 28 | #include "VideoEditorSRC.h" | 
|  | 29 | #include "include/LiveSession.h" | 
|  | 30 | #include "include/NuCachedSource2.h" | 
|  | 31 | #include "include/ThrottledSource.h" | 
|  | 32 |  | 
|  | 33 |  | 
|  | 34 | #include "PreviewRenderer.h" | 
|  | 35 |  | 
|  | 36 | #include <binder/IPCThreadState.h> | 
|  | 37 | #include <media/stagefright/DataSource.h> | 
|  | 38 | #include <media/stagefright/FileSource.h> | 
|  | 39 | #include <media/stagefright/MediaBuffer.h> | 
|  | 40 | #include <media/stagefright/MediaDefs.h> | 
|  | 41 | #include <media/stagefright/MediaExtractor.h> | 
|  | 42 | #include <media/stagefright/MediaDebug.h> | 
|  | 43 | #include <media/stagefright/MediaSource.h> | 
|  | 44 | #include <media/stagefright/MetaData.h> | 
|  | 45 | #include <media/stagefright/OMXCodec.h> | 
|  | 46 |  | 
|  | 47 | #include <surfaceflinger/Surface.h> | 
|  | 48 | #include <media/stagefright/foundation/ALooper.h> | 
|  | 49 |  | 
|  | 50 | namespace android { | 
|  | 51 |  | 
|  | 52 |  | 
|  | 53 | struct PreviewPlayerEvent : public TimedEventQueue::Event { | 
|  | 54 | PreviewPlayerEvent( | 
|  | 55 | PreviewPlayer *player, | 
|  | 56 | void (PreviewPlayer::*method)()) | 
|  | 57 | : mPlayer(player), | 
|  | 58 | mMethod(method) { | 
|  | 59 | } | 
|  | 60 |  | 
|  | 61 | protected: | 
|  | 62 | virtual ~PreviewPlayerEvent() {} | 
|  | 63 |  | 
|  | 64 | virtual void fire(TimedEventQueue *queue, int64_t /* now_us */) { | 
|  | 65 | (mPlayer->*mMethod)(); | 
|  | 66 | } | 
|  | 67 |  | 
|  | 68 | private: | 
|  | 69 | PreviewPlayer *mPlayer; | 
|  | 70 | void (PreviewPlayer::*mMethod)(); | 
|  | 71 |  | 
|  | 72 | PreviewPlayerEvent(const PreviewPlayerEvent &); | 
|  | 73 | PreviewPlayerEvent &operator=(const PreviewPlayerEvent &); | 
|  | 74 | }; | 
|  | 75 |  | 
|  | 76 |  | 
|  | 77 | struct PreviewLocalRenderer : public PreviewPlayerRenderer { | 
|  | 78 | PreviewLocalRenderer( | 
|  | 79 | bool previewOnly, | 
|  | 80 | OMX_COLOR_FORMATTYPE colorFormat, | 
|  | 81 | const sp<Surface> &surface, | 
|  | 82 | size_t displayWidth, size_t displayHeight, | 
|  | 83 | size_t decodedWidth, size_t decodedHeight, | 
|  | 84 | int32_t rotationDegrees = 0) | 
|  | 85 | : mTarget(NULL) { | 
|  | 86 | init(previewOnly, | 
|  | 87 | colorFormat, surface, | 
|  | 88 | displayWidth, displayHeight, | 
|  | 89 | decodedWidth, decodedHeight, | 
|  | 90 | rotationDegrees); | 
|  | 91 | } | 
|  | 92 |  | 
|  | 93 | virtual void render(MediaBuffer *buffer) { | 
|  | 94 | render((const uint8_t *)buffer->data() + buffer->range_offset(), | 
|  | 95 | buffer->range_length()); | 
|  | 96 | } | 
|  | 97 |  | 
|  | 98 | void render(const void *data, size_t size) { | 
|  | 99 | mTarget->render(data, size, NULL); | 
|  | 100 | } | 
|  | 101 | void render() { | 
|  | 102 | mTarget->renderYV12(); | 
|  | 103 | } | 
|  | 104 | void getBuffer(uint8_t **data, size_t *stride) { | 
|  | 105 | mTarget->getBufferYV12(data, stride); | 
|  | 106 | } | 
|  | 107 |  | 
|  | 108 | protected: | 
|  | 109 | virtual ~PreviewLocalRenderer() { | 
|  | 110 | delete mTarget; | 
|  | 111 | mTarget = NULL; | 
|  | 112 | } | 
|  | 113 |  | 
|  | 114 | private: | 
|  | 115 | PreviewRenderer *mTarget; | 
|  | 116 |  | 
|  | 117 | void init( | 
|  | 118 | bool previewOnly, | 
|  | 119 | OMX_COLOR_FORMATTYPE colorFormat, | 
|  | 120 | const sp<Surface> &surface, | 
|  | 121 | size_t displayWidth, size_t displayHeight, | 
|  | 122 | size_t decodedWidth, size_t decodedHeight, | 
|  | 123 | int32_t rotationDegrees = 0); | 
|  | 124 |  | 
|  | 125 | PreviewLocalRenderer(const PreviewLocalRenderer &); | 
|  | 126 | PreviewLocalRenderer &operator=(const PreviewLocalRenderer &);; | 
|  | 127 | }; | 
|  | 128 |  | 
|  | 129 | void PreviewLocalRenderer::init( | 
|  | 130 | bool previewOnly, | 
|  | 131 | OMX_COLOR_FORMATTYPE colorFormat, | 
|  | 132 | const sp<Surface> &surface, | 
|  | 133 | size_t displayWidth, size_t displayHeight, | 
|  | 134 | size_t decodedWidth, size_t decodedHeight, | 
|  | 135 | int32_t rotationDegrees) { | 
|  | 136 | mTarget = new PreviewRenderer( | 
|  | 137 | colorFormat, surface, displayWidth, displayHeight, | 
|  | 138 | decodedWidth, decodedHeight, rotationDegrees); | 
|  | 139 | } | 
|  | 140 |  | 
|  | 141 | PreviewPlayer::PreviewPlayer() | 
|  | 142 | : AwesomePlayer(), | 
|  | 143 | mFrameRGBBuffer(NULL), | 
| Dharmaray Kundargi | 35cb2de | 2011-01-19 19:09:27 -0800 | [diff] [blame^] | 144 | mFrameYUVBuffer(NULL), | 
|  | 145 | mReportedWidth(0), | 
|  | 146 | mReportedHeight(0) { | 
| Dharmaray Kundargi | 643290d | 2011-01-16 16:02:42 -0800 | [diff] [blame] | 147 |  | 
|  | 148 | mVideoRenderer = NULL; | 
|  | 149 | mLastVideoBuffer = NULL; | 
|  | 150 | mSuspensionState = NULL; | 
|  | 151 | mEffectsSettings = NULL; | 
|  | 152 | mAudioMixStoryBoardTS = 0; | 
|  | 153 | mCurrentMediaBeginCutTime = 0; | 
|  | 154 | mCurrentMediaVolumeValue = 0; | 
|  | 155 | mNumberEffects = 0; | 
|  | 156 | mDecodedVideoTs = 0; | 
|  | 157 | mDecVideoTsStoryBoard = 0; | 
|  | 158 | mCurrentVideoEffect = VIDEO_EFFECT_NONE; | 
|  | 159 | mProgressCbInterval = 0; | 
|  | 160 | mNumberDecVideoFrames = 0; | 
|  | 161 |  | 
|  | 162 | mVideoEvent = new PreviewPlayerEvent(this, &PreviewPlayer::onVideoEvent); | 
|  | 163 | mVideoEventPending = false; | 
|  | 164 | mStreamDoneEvent = new PreviewPlayerEvent(this, | 
|  | 165 | &AwesomePlayer::onStreamDone); | 
|  | 166 |  | 
|  | 167 | mStreamDoneEventPending = false; | 
|  | 168 |  | 
|  | 169 | mCheckAudioStatusEvent = new PreviewPlayerEvent( | 
|  | 170 | this, &AwesomePlayer::onCheckAudioStatus); | 
|  | 171 |  | 
|  | 172 | mAudioStatusEventPending = false; | 
|  | 173 |  | 
|  | 174 | mProgressCbEvent = new PreviewPlayerEvent(this, | 
|  | 175 | &PreviewPlayer::onProgressCbEvent); | 
|  | 176 |  | 
|  | 177 | mProgressCbEventPending = false; | 
|  | 178 | mResizedVideoBuffer = NULL; | 
|  | 179 | mVideoResizedOrCropped = false; | 
|  | 180 | mRenderingMode = (M4xVSS_MediaRendering)MEDIA_RENDERING_INVALID; | 
|  | 181 | mIsFiftiesEffectStarted = false; | 
|  | 182 | reset(); | 
|  | 183 | } | 
|  | 184 |  | 
|  | 185 | PreviewPlayer::~PreviewPlayer() { | 
|  | 186 |  | 
|  | 187 | if (mQueueStarted) { | 
|  | 188 | mQueue.stop(); | 
|  | 189 | } | 
|  | 190 |  | 
|  | 191 | reset(); | 
|  | 192 |  | 
|  | 193 | if(mResizedVideoBuffer != NULL) { | 
|  | 194 | M4OSA_free((M4OSA_MemAddr32)(mResizedVideoBuffer->data())); | 
|  | 195 | mResizedVideoBuffer = NULL; | 
|  | 196 | } | 
|  | 197 |  | 
|  | 198 | mVideoRenderer.clear(); | 
|  | 199 | mVideoRenderer = NULL; | 
|  | 200 | } | 
|  | 201 |  | 
|  | 202 | void PreviewPlayer::cancelPlayerEvents(bool keepBufferingGoing) { | 
|  | 203 | mQueue.cancelEvent(mVideoEvent->eventID()); | 
|  | 204 | mVideoEventPending = false; | 
|  | 205 | mQueue.cancelEvent(mStreamDoneEvent->eventID()); | 
|  | 206 | mStreamDoneEventPending = false; | 
|  | 207 | mQueue.cancelEvent(mCheckAudioStatusEvent->eventID()); | 
|  | 208 | mAudioStatusEventPending = false; | 
|  | 209 |  | 
|  | 210 | mQueue.cancelEvent(mProgressCbEvent->eventID()); | 
|  | 211 | mProgressCbEventPending = false; | 
|  | 212 | } | 
|  | 213 |  | 
|  | 214 | status_t PreviewPlayer::setDataSource( | 
|  | 215 | const char *uri, const KeyedVector<String8, String8> *headers) { | 
|  | 216 | Mutex::Autolock autoLock(mLock); | 
|  | 217 | return setDataSource_l(uri, headers); | 
|  | 218 | } | 
|  | 219 |  | 
|  | 220 | status_t PreviewPlayer::setDataSource_l( | 
|  | 221 | const char *uri, const KeyedVector<String8, String8> *headers) { | 
|  | 222 | reset_l(); | 
|  | 223 |  | 
|  | 224 | mUri = uri; | 
|  | 225 |  | 
|  | 226 | if (headers) { | 
|  | 227 | mUriHeaders = *headers; | 
|  | 228 | } | 
|  | 229 |  | 
|  | 230 | // The actual work will be done during preparation in the call to | 
|  | 231 | // ::finishSetDataSource_l to avoid blocking the calling thread in | 
|  | 232 | // setDataSource for any significant time. | 
|  | 233 | return OK; | 
|  | 234 | } | 
|  | 235 |  | 
|  | 236 | status_t PreviewPlayer::setDataSource_l(const sp<MediaExtractor> &extractor) { | 
|  | 237 | bool haveAudio = false; | 
|  | 238 | bool haveVideo = false; | 
|  | 239 | for (size_t i = 0; i < extractor->countTracks(); ++i) { | 
|  | 240 | sp<MetaData> meta = extractor->getTrackMetaData(i); | 
|  | 241 |  | 
|  | 242 | const char *mime; | 
|  | 243 | CHECK(meta->findCString(kKeyMIMEType, &mime)); | 
|  | 244 |  | 
|  | 245 | if (!haveVideo && !strncasecmp(mime, "video/", 6)) { | 
|  | 246 | setVideoSource(extractor->getTrack(i)); | 
|  | 247 | haveVideo = true; | 
|  | 248 | } else if (!haveAudio && !strncasecmp(mime, "audio/", 6)) { | 
|  | 249 | setAudioSource(extractor->getTrack(i)); | 
|  | 250 | haveAudio = true; | 
|  | 251 |  | 
|  | 252 | if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) { | 
|  | 253 | // Only do this for vorbis audio, none of the other audio | 
|  | 254 | // formats even support this ringtone specific hack and | 
|  | 255 | // retrieving the metadata on some extractors may turn out | 
|  | 256 | // to be very expensive. | 
|  | 257 | sp<MetaData> fileMeta = extractor->getMetaData(); | 
|  | 258 | int32_t loop; | 
|  | 259 | if (fileMeta != NULL | 
|  | 260 | && fileMeta->findInt32(kKeyAutoLoop, &loop) | 
|  | 261 | && loop != 0) { | 
|  | 262 | mFlags |= AUTO_LOOPING; | 
|  | 263 | } | 
|  | 264 | } | 
|  | 265 | } | 
|  | 266 |  | 
|  | 267 | if (haveAudio && haveVideo) { | 
|  | 268 | break; | 
|  | 269 | } | 
|  | 270 | } | 
|  | 271 |  | 
|  | 272 | /* Add the support for Dummy audio*/ | 
|  | 273 | if( !haveAudio ){ | 
|  | 274 | LOGV("PreviewPlayer: setDataSource_l Dummyaudiocreation started"); | 
|  | 275 |  | 
|  | 276 | mAudioTrack = DummyAudioSource::Create(32000, 2, 20000, | 
|  | 277 | ((mPlayEndTimeMsec)*1000)); | 
|  | 278 | LOGV("PreviewPlayer: setDataSource_l Dummyauiosource created"); | 
|  | 279 | if(mAudioTrack != NULL) { | 
|  | 280 | haveAudio = true; | 
|  | 281 | } | 
|  | 282 | } | 
|  | 283 |  | 
|  | 284 | if (!haveAudio && !haveVideo) { | 
|  | 285 | return UNKNOWN_ERROR; | 
|  | 286 | } | 
|  | 287 |  | 
|  | 288 | mExtractorFlags = extractor->flags(); | 
|  | 289 | return OK; | 
|  | 290 | } | 
|  | 291 |  | 
|  | 292 | status_t PreviewPlayer::setDataSource_l_jpg() { | 
|  | 293 | M4OSA_ERR err = M4NO_ERROR; | 
|  | 294 | LOGV("PreviewPlayer: setDataSource_l_jpg started"); | 
|  | 295 |  | 
|  | 296 | mAudioSource = DummyAudioSource::Create(32000, 2, 20000, | 
|  | 297 | ((mPlayEndTimeMsec)*1000)); | 
|  | 298 | LOGV("PreviewPlayer: setDataSource_l_jpg Dummyaudiosource created"); | 
|  | 299 | if(mAudioSource != NULL) { | 
|  | 300 | setAudioSource(mAudioSource); | 
|  | 301 | } | 
|  | 302 | status_t error = mAudioSource->start(); | 
|  | 303 | if (error != OK) { | 
|  | 304 | LOGV("Error starting dummy audio source"); | 
|  | 305 | mAudioSource.clear(); | 
|  | 306 | return err; | 
|  | 307 | } | 
|  | 308 |  | 
|  | 309 | mDurationUs = (mPlayEndTimeMsec - mPlayBeginTimeMsec)*1000; | 
|  | 310 |  | 
|  | 311 | mVideoSource = DummyVideoSource::Create(mVideoWidth, mVideoHeight, | 
|  | 312 | mDurationUs, mUri); | 
| Dharmaray Kundargi | 35cb2de | 2011-01-19 19:09:27 -0800 | [diff] [blame^] | 313 | mReportedWidth = mVideoWidth; | 
|  | 314 | mReportedHeight = mVideoHeight; | 
|  | 315 |  | 
| Dharmaray Kundargi | 643290d | 2011-01-16 16:02:42 -0800 | [diff] [blame] | 316 | setVideoSource(mVideoSource); | 
|  | 317 | status_t err1 = mVideoSource->start(); | 
|  | 318 | if (err1 != OK) { | 
|  | 319 | mVideoSource.clear(); | 
|  | 320 | return err; | 
|  | 321 | } | 
|  | 322 |  | 
|  | 323 | mIsVideoSourceJpg = true; | 
|  | 324 | return OK; | 
|  | 325 | } | 
|  | 326 |  | 
|  | 327 | void PreviewPlayer::reset() { | 
|  | 328 | Mutex::Autolock autoLock(mLock); | 
|  | 329 | reset_l(); | 
|  | 330 | } | 
|  | 331 |  | 
|  | 332 | void PreviewPlayer::reset_l() { | 
|  | 333 |  | 
|  | 334 | if (mFlags & PREPARING) { | 
|  | 335 | mFlags |= PREPARE_CANCELLED; | 
|  | 336 | } | 
|  | 337 |  | 
|  | 338 | while (mFlags & PREPARING) { | 
|  | 339 | mPreparedCondition.wait(mLock); | 
|  | 340 | } | 
|  | 341 |  | 
|  | 342 | cancelPlayerEvents(); | 
|  | 343 | mAudioTrack.clear(); | 
|  | 344 | mVideoTrack.clear(); | 
|  | 345 |  | 
|  | 346 | // Shutdown audio first, so that the respone to the reset request | 
|  | 347 | // appears to happen instantaneously as far as the user is concerned | 
|  | 348 | // If we did this later, audio would continue playing while we | 
|  | 349 | // shutdown the video-related resources and the player appear to | 
|  | 350 | // not be as responsive to a reset request. | 
|  | 351 | if (mAudioPlayer == NULL && mAudioSource != NULL) { | 
|  | 352 | // If we had an audio player, it would have effectively | 
|  | 353 | // taken possession of the audio source and stopped it when | 
|  | 354 | // _it_ is stopped. Otherwise this is still our responsibility. | 
|  | 355 | mAudioSource->stop(); | 
|  | 356 | } | 
|  | 357 | mAudioSource.clear(); | 
|  | 358 |  | 
|  | 359 | mTimeSource = NULL; | 
|  | 360 |  | 
|  | 361 | delete mAudioPlayer; | 
|  | 362 | mAudioPlayer = NULL; | 
|  | 363 |  | 
|  | 364 | if (mLastVideoBuffer) { | 
|  | 365 | mLastVideoBuffer->release(); | 
|  | 366 | mLastVideoBuffer = NULL; | 
|  | 367 | } | 
|  | 368 |  | 
|  | 369 | if (mVideoBuffer) { | 
|  | 370 | mVideoBuffer->release(); | 
|  | 371 | mVideoBuffer = NULL; | 
|  | 372 | } | 
|  | 373 |  | 
|  | 374 | if (mVideoSource != NULL) { | 
|  | 375 | mVideoSource->stop(); | 
|  | 376 |  | 
|  | 377 | // The following hack is necessary to ensure that the OMX | 
|  | 378 | // component is completely released by the time we may try | 
|  | 379 | // to instantiate it again. | 
|  | 380 | wp<MediaSource> tmp = mVideoSource; | 
|  | 381 | mVideoSource.clear(); | 
|  | 382 | while (tmp.promote() != NULL) { | 
|  | 383 | usleep(1000); | 
|  | 384 | } | 
|  | 385 | IPCThreadState::self()->flushCommands(); | 
|  | 386 | } | 
|  | 387 |  | 
|  | 388 | mDurationUs = -1; | 
|  | 389 | mFlags = 0; | 
|  | 390 | mExtractorFlags = 0; | 
|  | 391 | mVideoWidth = mVideoHeight = -1; | 
|  | 392 | mTimeSourceDeltaUs = 0; | 
|  | 393 | mVideoTimeUs = 0; | 
|  | 394 |  | 
|  | 395 | mSeeking = false; | 
|  | 396 | mSeekNotificationSent = false; | 
|  | 397 | mSeekTimeUs = 0; | 
|  | 398 |  | 
|  | 399 | mUri.setTo(""); | 
|  | 400 | mUriHeaders.clear(); | 
|  | 401 |  | 
|  | 402 | mFileSource.clear(); | 
|  | 403 |  | 
|  | 404 | delete mSuspensionState; | 
|  | 405 | mSuspensionState = NULL; | 
|  | 406 |  | 
|  | 407 | mCurrentVideoEffect = VIDEO_EFFECT_NONE; | 
|  | 408 | mIsVideoSourceJpg = false; | 
|  | 409 | mFrameRGBBuffer = NULL; | 
|  | 410 | if(mFrameYUVBuffer != NULL) { | 
|  | 411 | M4OSA_free((M4OSA_MemAddr32)mFrameYUVBuffer); | 
|  | 412 | mFrameYUVBuffer = NULL; | 
|  | 413 | } | 
|  | 414 | } | 
|  | 415 |  | 
|  | 416 | void PreviewPlayer::partial_reset_l() { | 
|  | 417 |  | 
|  | 418 | if (mLastVideoBuffer) { | 
|  | 419 | mLastVideoBuffer->release(); | 
|  | 420 | mLastVideoBuffer = NULL; | 
|  | 421 | } | 
|  | 422 |  | 
|  | 423 | /* call base struct */ | 
|  | 424 | AwesomePlayer::partial_reset_l(); | 
|  | 425 |  | 
|  | 426 | } | 
|  | 427 |  | 
|  | 428 | status_t PreviewPlayer::play() { | 
|  | 429 | Mutex::Autolock autoLock(mLock); | 
|  | 430 |  | 
|  | 431 | mFlags &= ~CACHE_UNDERRUN; | 
|  | 432 |  | 
|  | 433 | return play_l(); | 
|  | 434 | } | 
|  | 435 |  | 
|  | 436 | status_t PreviewPlayer::play_l() { | 
|  | 437 | VideoEditorAudioPlayer  *mVePlayer; | 
|  | 438 | if (mFlags & PLAYING) { | 
|  | 439 | return OK; | 
|  | 440 | } | 
|  | 441 | mStartNextPlayer = false; | 
|  | 442 |  | 
|  | 443 | if (!(mFlags & PREPARED)) { | 
|  | 444 | status_t err = prepare_l(); | 
|  | 445 |  | 
|  | 446 | if (err != OK) { | 
|  | 447 | return err; | 
|  | 448 | } | 
|  | 449 | } | 
|  | 450 |  | 
|  | 451 | mFlags |= PLAYING; | 
|  | 452 | mFlags |= FIRST_FRAME; | 
|  | 453 |  | 
|  | 454 | bool deferredAudioSeek = false; | 
|  | 455 |  | 
|  | 456 | if (mAudioSource != NULL) { | 
|  | 457 | if (mAudioPlayer == NULL) { | 
|  | 458 | if (mAudioSink != NULL) { | 
|  | 459 |  | 
|  | 460 | mAudioPlayer = new VideoEditorAudioPlayer(mAudioSink, this); | 
|  | 461 | mVePlayer = | 
|  | 462 | (VideoEditorAudioPlayer*)mAudioPlayer; | 
|  | 463 |  | 
|  | 464 | mAudioPlayer->setSource(mAudioSource); | 
|  | 465 |  | 
|  | 466 | mVePlayer->setAudioMixSettings( | 
|  | 467 | mPreviewPlayerAudioMixSettings); | 
|  | 468 |  | 
|  | 469 | mVePlayer->setAudioMixPCMFileHandle( | 
|  | 470 | mAudioMixPCMFileHandle); | 
|  | 471 |  | 
|  | 472 | mVePlayer->setAudioMixStoryBoardSkimTimeStamp( | 
|  | 473 | mAudioMixStoryBoardTS, mCurrentMediaBeginCutTime, | 
|  | 474 | mCurrentMediaVolumeValue); | 
|  | 475 |  | 
|  | 476 | // We've already started the MediaSource in order to enable | 
|  | 477 | // the prefetcher to read its data. | 
|  | 478 | status_t err = mVePlayer->start( | 
|  | 479 | true /* sourceAlreadyStarted */); | 
|  | 480 |  | 
|  | 481 | if (err != OK) { | 
|  | 482 | delete mAudioPlayer; | 
|  | 483 | mAudioPlayer = NULL; | 
|  | 484 |  | 
|  | 485 | mFlags &= ~(PLAYING | FIRST_FRAME); | 
|  | 486 | return err; | 
|  | 487 | } | 
|  | 488 |  | 
|  | 489 | mTimeSource = mVePlayer; //mAudioPlayer; | 
|  | 490 |  | 
|  | 491 | deferredAudioSeek = true; | 
|  | 492 | mWatchForAudioSeekComplete = false; | 
|  | 493 | mWatchForAudioEOS = true; | 
|  | 494 | } | 
|  | 495 | } else { | 
|  | 496 | mVePlayer->resume(); | 
|  | 497 | } | 
|  | 498 |  | 
|  | 499 | } | 
|  | 500 |  | 
|  | 501 | if (mTimeSource == NULL && mAudioPlayer == NULL) { | 
|  | 502 | mTimeSource = &mSystemTimeSource; | 
|  | 503 | } | 
|  | 504 |  | 
|  | 505 | if (mVideoSource != NULL) { | 
|  | 506 | // Kick off video playback | 
|  | 507 | postVideoEvent_l(); | 
|  | 508 | } | 
|  | 509 |  | 
|  | 510 | if (deferredAudioSeek) { | 
|  | 511 | // If there was a seek request while we were paused | 
|  | 512 | // and we're just starting up again, honor the request now. | 
|  | 513 | seekAudioIfNecessary_l(); | 
|  | 514 | } | 
|  | 515 |  | 
|  | 516 | if (mFlags & AT_EOS) { | 
|  | 517 | // Legacy behaviour, if a stream finishes playing and then | 
|  | 518 | // is started again, we play from the start... | 
|  | 519 | seekTo_l(0); | 
|  | 520 | } | 
|  | 521 |  | 
|  | 522 | return OK; | 
|  | 523 | } | 
|  | 524 |  | 
|  | 525 |  | 
|  | 526 | void PreviewPlayer::initRenderer_l() { | 
|  | 527 | if (mSurface != NULL || mISurface != NULL) { | 
|  | 528 | sp<MetaData> meta = mVideoSource->getFormat(); | 
|  | 529 |  | 
|  | 530 | int32_t format; | 
|  | 531 | const char *component; | 
|  | 532 | int32_t decodedWidth, decodedHeight; | 
|  | 533 | CHECK(meta->findInt32(kKeyColorFormat, &format)); | 
|  | 534 | CHECK(meta->findCString(kKeyDecoderComponent, &component)); | 
|  | 535 | CHECK(meta->findInt32(kKeyWidth, &decodedWidth)); | 
|  | 536 | CHECK(meta->findInt32(kKeyHeight, &decodedHeight)); | 
|  | 537 |  | 
|  | 538 | // Must ensure that mVideoRenderer's destructor is actually executed | 
|  | 539 | // before creating a new one. | 
|  | 540 | IPCThreadState::self()->flushCommands(); | 
|  | 541 |  | 
|  | 542 | // always use localrenderer since decoded buffers are modified | 
|  | 543 | // by postprocessing module | 
|  | 544 | // Other decoders are instantiated locally and as a consequence | 
|  | 545 | // allocate their buffers in local address space. | 
|  | 546 | if(mVideoRenderer == NULL) { | 
|  | 547 |  | 
|  | 548 | mVideoRenderer = new PreviewLocalRenderer( | 
|  | 549 | false,  // previewOnly | 
|  | 550 | (OMX_COLOR_FORMATTYPE)format, | 
|  | 551 | mSurface, | 
|  | 552 | mOutputVideoWidth, mOutputVideoHeight, | 
|  | 553 | mOutputVideoWidth, mOutputVideoHeight); | 
|  | 554 | } | 
|  | 555 | } | 
|  | 556 | } | 
|  | 557 |  | 
|  | 558 |  | 
|  | 559 | void PreviewPlayer::setISurface(const sp<ISurface> &isurface) { | 
|  | 560 | Mutex::Autolock autoLock(mLock); | 
|  | 561 | mISurface = isurface; | 
|  | 562 | } | 
|  | 563 |  | 
|  | 564 |  | 
|  | 565 | status_t PreviewPlayer::seekTo(int64_t timeUs) { | 
|  | 566 |  | 
|  | 567 | if ((mExtractorFlags & MediaExtractor::CAN_SEEK) || (mIsVideoSourceJpg)) { | 
|  | 568 | Mutex::Autolock autoLock(mLock); | 
|  | 569 | return seekTo_l(timeUs); | 
|  | 570 | } | 
|  | 571 |  | 
|  | 572 | return OK; | 
|  | 573 | } | 
|  | 574 |  | 
|  | 575 |  | 
|  | 576 | status_t PreviewPlayer::getVideoDimensions( | 
|  | 577 | int32_t *width, int32_t *height) const { | 
|  | 578 | Mutex::Autolock autoLock(mLock); | 
|  | 579 |  | 
|  | 580 | if (mVideoWidth < 0 || mVideoHeight < 0) { | 
|  | 581 | return UNKNOWN_ERROR; | 
|  | 582 | } | 
|  | 583 |  | 
|  | 584 | *width = mVideoWidth; | 
|  | 585 | *height = mVideoHeight; | 
|  | 586 |  | 
|  | 587 | return OK; | 
|  | 588 | } | 
|  | 589 |  | 
|  | 590 |  | 
|  | 591 | status_t PreviewPlayer::initAudioDecoder() { | 
|  | 592 | sp<MetaData> meta = mAudioTrack->getFormat(); | 
|  | 593 | const char *mime; | 
|  | 594 | CHECK(meta->findCString(kKeyMIMEType, &mime)); | 
|  | 595 |  | 
|  | 596 | if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { | 
|  | 597 | mAudioSource = mAudioTrack; | 
|  | 598 | } else { | 
|  | 599 | sp<MediaSource> aRawSource; | 
|  | 600 | aRawSource = OMXCodec::Create( | 
|  | 601 | mClient.interface(), mAudioTrack->getFormat(), | 
|  | 602 | false, // createEncoder | 
|  | 603 | mAudioTrack); | 
|  | 604 |  | 
|  | 605 | if(aRawSource != NULL) { | 
|  | 606 | LOGV("initAudioDecoder: new VideoEditorSRC"); | 
|  | 607 | mAudioSource = new VideoEditorSRC(aRawSource); | 
|  | 608 | } | 
|  | 609 | } | 
|  | 610 |  | 
|  | 611 | if (mAudioSource != NULL) { | 
|  | 612 | int64_t durationUs; | 
|  | 613 | if (mAudioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) { | 
|  | 614 | Mutex::Autolock autoLock(mMiscStateLock); | 
|  | 615 | if (mDurationUs < 0 || durationUs > mDurationUs) { | 
|  | 616 | mDurationUs = durationUs; | 
|  | 617 | } | 
|  | 618 | } | 
|  | 619 | status_t err = mAudioSource->start(); | 
|  | 620 |  | 
|  | 621 | if (err != OK) { | 
|  | 622 | mAudioSource.clear(); | 
|  | 623 | return err; | 
|  | 624 | } | 
|  | 625 | } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_QCELP)) { | 
|  | 626 | // For legacy reasons we're simply going to ignore the absence | 
|  | 627 | // of an audio decoder for QCELP instead of aborting playback | 
|  | 628 | // altogether. | 
|  | 629 | return OK; | 
|  | 630 | } | 
|  | 631 |  | 
|  | 632 | return mAudioSource != NULL ? OK : UNKNOWN_ERROR; | 
|  | 633 | } | 
|  | 634 |  | 
|  | 635 |  | 
|  | 636 | status_t PreviewPlayer::initVideoDecoder(uint32_t flags) { | 
|  | 637 |  | 
|  | 638 | mVideoSource = OMXCodec::Create( | 
|  | 639 | mClient.interface(), mVideoTrack->getFormat(), | 
|  | 640 | false, | 
|  | 641 | mVideoTrack, | 
|  | 642 | NULL, flags); | 
|  | 643 |  | 
|  | 644 | if (mVideoSource != NULL) { | 
|  | 645 | int64_t durationUs; | 
|  | 646 | if (mVideoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) { | 
|  | 647 | Mutex::Autolock autoLock(mMiscStateLock); | 
|  | 648 | if (mDurationUs < 0 || durationUs > mDurationUs) { | 
|  | 649 | mDurationUs = durationUs; | 
|  | 650 | } | 
|  | 651 | } | 
|  | 652 |  | 
|  | 653 | CHECK(mVideoTrack->getFormat()->findInt32(kKeyWidth, &mVideoWidth)); | 
|  | 654 | CHECK(mVideoTrack->getFormat()->findInt32(kKeyHeight, &mVideoHeight)); | 
|  | 655 |  | 
| Dharmaray Kundargi | 35cb2de | 2011-01-19 19:09:27 -0800 | [diff] [blame^] | 656 | mReportedWidth = mVideoWidth; | 
|  | 657 | mReportedHeight = mVideoHeight; | 
|  | 658 |  | 
| Dharmaray Kundargi | 643290d | 2011-01-16 16:02:42 -0800 | [diff] [blame] | 659 | status_t err = mVideoSource->start(); | 
|  | 660 |  | 
|  | 661 | if (err != OK) { | 
|  | 662 | mVideoSource.clear(); | 
|  | 663 | return err; | 
|  | 664 | } | 
|  | 665 | } | 
|  | 666 |  | 
|  | 667 | return mVideoSource != NULL ? OK : UNKNOWN_ERROR; | 
|  | 668 | } | 
|  | 669 |  | 
|  | 670 |  | 
|  | 671 | void PreviewPlayer::onVideoEvent() { | 
|  | 672 | uint32_t i=0; | 
|  | 673 | bool bAppliedVideoEffect = false; | 
|  | 674 | M4OSA_ERR err1 = M4NO_ERROR; | 
|  | 675 | int64_t imageFrameTimeUs = 0; | 
|  | 676 |  | 
|  | 677 | Mutex::Autolock autoLock(mLock); | 
|  | 678 | if (!mVideoEventPending) { | 
|  | 679 | // The event has been cancelled in reset_l() but had already | 
|  | 680 | // been scheduled for execution at that time. | 
|  | 681 | return; | 
|  | 682 | } | 
|  | 683 | mVideoEventPending = false; | 
|  | 684 |  | 
|  | 685 | TimeSource *ts_st =  &mSystemTimeSource; | 
|  | 686 | int64_t timeStartUs = ts_st->getRealTimeUs(); | 
|  | 687 |  | 
|  | 688 | if (mSeeking) { | 
|  | 689 | if (mLastVideoBuffer) { | 
|  | 690 | mLastVideoBuffer->release(); | 
|  | 691 | mLastVideoBuffer = NULL; | 
|  | 692 | } | 
|  | 693 |  | 
|  | 694 |  | 
|  | 695 | if(mAudioSource != NULL) { | 
|  | 696 |  | 
|  | 697 | // We're going to seek the video source first, followed by | 
|  | 698 | // the audio source. | 
|  | 699 | // In order to avoid jumps in the DataSource offset caused by | 
|  | 700 | // the audio codec prefetching data from the old locations | 
|  | 701 | // while the video codec is already reading data from the new | 
|  | 702 | // locations, we'll "pause" the audio source, causing it to | 
|  | 703 | // stop reading input data until a subsequent seek. | 
|  | 704 |  | 
|  | 705 | if (mAudioPlayer != NULL) { | 
|  | 706 | mAudioPlayer->pause(); | 
|  | 707 | } | 
|  | 708 | mAudioSource->pause(); | 
|  | 709 | } | 
|  | 710 | } | 
|  | 711 |  | 
|  | 712 | if (!mVideoBuffer) { | 
|  | 713 | MediaSource::ReadOptions options; | 
|  | 714 | if (mSeeking) { | 
|  | 715 | LOGV("LV PLAYER seeking to %lld us (%.2f secs)", mSeekTimeUs, | 
|  | 716 | mSeekTimeUs / 1E6); | 
|  | 717 |  | 
|  | 718 | options.setSeekTo( | 
|  | 719 | mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST); | 
|  | 720 | } | 
|  | 721 | for (;;) { | 
|  | 722 | status_t err = mVideoSource->read(&mVideoBuffer, &options); | 
|  | 723 | options.clearSeekTo(); | 
|  | 724 |  | 
|  | 725 | if (err != OK) { | 
|  | 726 | CHECK_EQ(mVideoBuffer, NULL); | 
|  | 727 |  | 
|  | 728 | if (err == INFO_FORMAT_CHANGED) { | 
|  | 729 | LOGV("LV PLAYER VideoSource signalled format change"); | 
|  | 730 | notifyVideoSize_l(); | 
| Dharmaray Kundargi | 35cb2de | 2011-01-19 19:09:27 -0800 | [diff] [blame^] | 731 | sp<MetaData> meta = mVideoSource->getFormat(); | 
| Dharmaray Kundargi | 643290d | 2011-01-16 16:02:42 -0800 | [diff] [blame] | 732 |  | 
| Dharmaray Kundargi | 35cb2de | 2011-01-19 19:09:27 -0800 | [diff] [blame^] | 733 | CHECK(meta->findInt32(kKeyWidth, &mReportedWidth)); | 
|  | 734 | CHECK(meta->findInt32(kKeyHeight, &mReportedHeight)); | 
| Dharmaray Kundargi | 643290d | 2011-01-16 16:02:42 -0800 | [diff] [blame] | 735 | if (mVideoRenderer != NULL) { | 
|  | 736 | mVideoRendererIsPreview = false; | 
|  | 737 | initRenderer_l(); | 
|  | 738 | } | 
|  | 739 | continue; | 
|  | 740 | } | 
|  | 741 | // So video playback is complete, but we may still have | 
|  | 742 | // a seek request pending that needs to be applied
                // to the audio track.
                if (mSeeking) {
                    LOGV("video stream ended while seeking!");
                }
                finishSeekIfNecessary(-1); | 
|  | 743 | LOGV("PreviewPlayer: onVideoEvent EOS reached."); | 
|  | 744 | mFlags |= VIDEO_AT_EOS; | 
|  | 745 | postStreamDoneEvent_l(err); | 
|  | 746 | return; | 
|  | 747 | } | 
|  | 748 |  | 
|  | 749 | if (mVideoBuffer->range_length() == 0) { | 
|  | 750 | // Some decoders, notably the PV AVC software decoder | 
|  | 751 | // return spurious empty buffers that we just want to ignore. | 
|  | 752 |  | 
|  | 753 | mVideoBuffer->release(); | 
|  | 754 | mVideoBuffer = NULL; | 
|  | 755 | continue; | 
|  | 756 | } | 
|  | 757 |  | 
|  | 758 | int64_t videoTimeUs; | 
|  | 759 | CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &videoTimeUs)); | 
|  | 760 |  | 
|  | 761 | if((videoTimeUs/1000) < mPlayBeginTimeMsec) { | 
|  | 762 | // Frames are before begin cut time | 
|  | 763 | // Donot render | 
|  | 764 | mVideoBuffer->release(); | 
|  | 765 | mVideoBuffer = NULL; | 
|  | 766 | continue; | 
|  | 767 | } | 
|  | 768 |  | 
|  | 769 | break; | 
|  | 770 | } | 
|  | 771 | } | 
|  | 772 |  | 
|  | 773 | mNumberDecVideoFrames++; | 
|  | 774 |  | 
|  | 775 | int64_t timeUs; | 
|  | 776 | CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs)); | 
|  | 777 |  | 
|  | 778 | { | 
|  | 779 | Mutex::Autolock autoLock(mMiscStateLock); | 
|  | 780 | mVideoTimeUs = timeUs; | 
|  | 781 | } | 
|  | 782 |  | 
|  | 783 | mDecodedVideoTs = timeUs; | 
|  | 784 |  | 
|  | 785 | if(!mStartNextPlayer) { | 
|  | 786 | int64_t playbackTimeRemaining = (mPlayEndTimeMsec*1000) - timeUs; | 
|  | 787 | if(playbackTimeRemaining <= 1500000) { | 
|  | 788 | //When less than 1.5 sec of playback left | 
|  | 789 | // send notification to start next player | 
|  | 790 |  | 
|  | 791 | mStartNextPlayer = true; | 
|  | 792 | notifyListener_l(0xAAAAAAAA); | 
|  | 793 | } | 
|  | 794 | } | 
|  | 795 |  | 
|  | 796 | bool wasSeeking = mSeeking; | 
|  | 797 | finishSeekIfNecessary(timeUs); | 
|  | 798 |  | 
|  | 799 | TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource; | 
|  | 800 |  | 
|  | 801 | if(ts == NULL) { | 
|  | 802 | mVideoBuffer->release(); | 
|  | 803 | mVideoBuffer = NULL; | 
|  | 804 | return; | 
|  | 805 | } | 
|  | 806 |  | 
|  | 807 | if(!mIsVideoSourceJpg) { | 
|  | 808 | if (mFlags & FIRST_FRAME) { | 
|  | 809 | mFlags &= ~FIRST_FRAME; | 
|  | 810 |  | 
|  | 811 | mTimeSourceDeltaUs = ts->getRealTimeUs() - timeUs; | 
|  | 812 | } | 
|  | 813 |  | 
|  | 814 | int64_t realTimeUs, mediaTimeUs; | 
|  | 815 | if (!(mFlags & AUDIO_AT_EOS) && mAudioPlayer != NULL | 
|  | 816 | && mAudioPlayer->getMediaTimeMapping(&realTimeUs, &mediaTimeUs)) { | 
|  | 817 | mTimeSourceDeltaUs = realTimeUs - mediaTimeUs; | 
|  | 818 | } | 
|  | 819 |  | 
|  | 820 | int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs; | 
|  | 821 |  | 
|  | 822 | int64_t latenessUs = nowUs - timeUs; | 
|  | 823 |  | 
|  | 824 | if (wasSeeking) { | 
|  | 825 | // Let's display the first frame after seeking right away.
            latenessUs = 0;
        } | 
|  | 826 | LOGV("Audio time stamp = %lld and video time stamp = %lld", | 
|  | 827 | ts->getRealTimeUs(),timeUs); | 
|  | 828 | if (latenessUs > 40000) { | 
|  | 829 | // We're more than 40ms late. | 
|  | 830 |  | 
|  | 831 | LOGV("LV PLAYER we're late by %lld us (%.2f secs)", | 
|  | 832 | latenessUs, latenessUs / 1E6); | 
|  | 833 |  | 
|  | 834 | mVideoBuffer->release(); | 
|  | 835 | mVideoBuffer = NULL; | 
|  | 836 |  | 
|  | 837 | postVideoEvent_l(); | 
|  | 838 | return; | 
|  | 839 | } | 
|  | 840 |  | 
|  | 841 | if (latenessUs < -10000) { | 
|  | 842 | // We're more than 10ms early. | 
|  | 843 | LOGV("We're more than 10ms early, lateness %lld", latenessUs); | 
|  | 844 |  | 
|  | 845 | postVideoEvent_l(10000); | 
|  | 846 | return; | 
|  | 847 | } | 
|  | 848 | } | 
|  | 849 |  | 
|  | 850 | if (mVideoRendererIsPreview || mVideoRenderer == NULL) { | 
|  | 851 | mVideoRendererIsPreview = false; | 
|  | 852 |  | 
|  | 853 | initRenderer_l(); | 
|  | 854 | } | 
|  | 855 |  | 
|  | 856 | // If timestamp exceeds endCutTime of clip, donot render | 
|  | 857 | if((timeUs/1000) > mPlayEndTimeMsec) { | 
|  | 858 | if (mLastVideoBuffer) { | 
|  | 859 | mLastVideoBuffer->release(); | 
|  | 860 | mLastVideoBuffer = NULL; | 
|  | 861 | } | 
|  | 862 | mLastVideoBuffer = mVideoBuffer; | 
|  | 863 | mVideoBuffer = NULL; | 
|  | 864 | mFlags |= VIDEO_AT_EOS; | 
|  | 865 | mFlags |= AUDIO_AT_EOS; | 
|  | 866 | LOGI("PreviewPlayer: onVideoEvent timeUs > mPlayEndTime; send EOS.."); | 
|  | 867 | postStreamDoneEvent_l(ERROR_END_OF_STREAM); | 
|  | 868 | return; | 
|  | 869 | } | 
|  | 870 |  | 
|  | 871 | // Post processing to apply video effects | 
|  | 872 | for(i=0;i<mNumberEffects;i++) { | 
|  | 873 | // First check if effect starttime matches the clip being previewed | 
|  | 874 | if((mEffectsSettings[i].uiStartTime < (mDecVideoTsStoryBoard/1000)) || | 
|  | 875 | (mEffectsSettings[i].uiStartTime >= | 
|  | 876 | ((mDecVideoTsStoryBoard/1000) + mPlayEndTimeMsec - mPlayBeginTimeMsec))) | 
|  | 877 | { | 
|  | 878 | // This effect doesn't belong to this clip, check next one | 
|  | 879 | continue; | 
|  | 880 | } | 
|  | 881 | // Check if effect applies to this particular frame timestamp | 
|  | 882 | if((mEffectsSettings[i].uiStartTime <= | 
|  | 883 | (((timeUs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec)) && | 
|  | 884 | ((mEffectsSettings[i].uiStartTime+mEffectsSettings[i].uiDuration) >= | 
|  | 885 | (((timeUs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec)) | 
|  | 886 | && (mEffectsSettings[i].uiDuration != 0)) { | 
|  | 887 |  | 
|  | 888 | setVideoPostProcessingNode( | 
|  | 889 | mEffectsSettings[i].VideoEffectType, TRUE); | 
|  | 890 | } | 
|  | 891 | else { | 
|  | 892 | setVideoPostProcessingNode( | 
|  | 893 | mEffectsSettings[i].VideoEffectType, FALSE); | 
|  | 894 | } | 
|  | 895 |  | 
|  | 896 | } | 
|  | 897 |  | 
|  | 898 | if(mCurrentVideoEffect != VIDEO_EFFECT_NONE) { | 
|  | 899 | err1 = doVideoPostProcessing(); | 
|  | 900 | if(err1 != M4NO_ERROR) { | 
|  | 901 | LOGE("doVideoPostProcessing returned err"); | 
|  | 902 | bAppliedVideoEffect = false; | 
|  | 903 | } | 
|  | 904 | else { | 
|  | 905 | bAppliedVideoEffect = true; | 
|  | 906 | } | 
|  | 907 | } | 
|  | 908 | else { | 
|  | 909 | bAppliedVideoEffect = false; | 
|  | 910 | if(mRenderingMode != MEDIA_RENDERING_INVALID) { | 
|  | 911 | // No effects to be applied, but media rendering to be done | 
|  | 912 | err1 = doMediaRendering(); | 
|  | 913 | if(err1 != M4NO_ERROR) { | 
|  | 914 | LOGE("doMediaRendering returned err"); | 
|  | 915 | //Use original mVideoBuffer for rendering | 
|  | 916 | mVideoResizedOrCropped = false; | 
|  | 917 | } | 
|  | 918 | } | 
|  | 919 | } | 
|  | 920 |  | 
|  | 921 | if (mVideoRenderer != NULL) { | 
|  | 922 | LOGV("mVideoRenderer CALL render()"); | 
|  | 923 | mVideoRenderer->render(); | 
|  | 924 | } | 
|  | 925 |  | 
|  | 926 | if (mLastVideoBuffer) { | 
|  | 927 | mLastVideoBuffer->release(); | 
|  | 928 | mLastVideoBuffer = NULL; | 
|  | 929 | } | 
|  | 930 |  | 
|  | 931 | mLastVideoBuffer = mVideoBuffer; | 
|  | 932 | mVideoBuffer = NULL; | 
|  | 933 |  | 
|  | 934 | // Post progress callback based on callback interval set | 
|  | 935 | if(mNumberDecVideoFrames >= mProgressCbInterval) { | 
|  | 936 | postProgressCallbackEvent_l(); | 
|  | 937 | mNumberDecVideoFrames = 0;  // reset counter | 
|  | 938 | } | 
|  | 939 |  | 
|  | 940 | // if reached EndCutTime of clip, post EOS event | 
|  | 941 | if((timeUs/1000) >= mPlayEndTimeMsec) { | 
|  | 942 | LOGV("PreviewPlayer: onVideoEvent EOS."); | 
|  | 943 | mFlags |= VIDEO_AT_EOS; | 
|  | 944 | mFlags |= AUDIO_AT_EOS; | 
|  | 945 | postStreamDoneEvent_l(ERROR_END_OF_STREAM); | 
|  | 946 | } | 
|  | 947 | else { | 
|  | 948 | if(!mIsVideoSourceJpg) { | 
|  | 949 | postVideoEvent_l(); | 
|  | 950 | } | 
|  | 951 | else { | 
|  | 952 | postVideoEvent_l(33000); | 
|  | 953 | } | 
|  | 954 | } | 
|  | 955 | } | 
|  | 956 |  | 
|  | 957 | status_t PreviewPlayer::prepare() { | 
|  | 958 | Mutex::Autolock autoLock(mLock); | 
|  | 959 | return prepare_l(); | 
|  | 960 | } | 
|  | 961 |  | 
|  | 962 | status_t PreviewPlayer::prepare_l() { | 
|  | 963 | if (mFlags & PREPARED) { | 
|  | 964 | return OK; | 
|  | 965 | } | 
|  | 966 |  | 
|  | 967 | if (mFlags & PREPARING) { | 
|  | 968 | return UNKNOWN_ERROR; | 
|  | 969 | } | 
|  | 970 |  | 
|  | 971 | mIsAsyncPrepare = false; | 
|  | 972 | status_t err = prepareAsync_l(); | 
|  | 973 |  | 
|  | 974 | if (err != OK) { | 
|  | 975 | return err; | 
|  | 976 | } | 
|  | 977 |  | 
|  | 978 | while (mFlags & PREPARING) { | 
|  | 979 | mPreparedCondition.wait(mLock); | 
|  | 980 | } | 
|  | 981 |  | 
|  | 982 | return mPrepareResult; | 
|  | 983 | } | 
|  | 984 |  | 
|  | 985 | status_t PreviewPlayer::prepareAsync_l() { | 
|  | 986 | if (mFlags & PREPARING) { | 
|  | 987 | return UNKNOWN_ERROR;  // async prepare already pending | 
|  | 988 | } | 
|  | 989 |  | 
|  | 990 | if (!mQueueStarted) { | 
|  | 991 | mQueue.start(); | 
|  | 992 | mQueueStarted = true; | 
|  | 993 | } | 
|  | 994 |  | 
|  | 995 | mFlags |= PREPARING; | 
|  | 996 | mAsyncPrepareEvent = new PreviewPlayerEvent( | 
|  | 997 | this, &PreviewPlayer::onPrepareAsyncEvent); | 
|  | 998 |  | 
|  | 999 | mQueue.postEvent(mAsyncPrepareEvent); | 
|  | 1000 |  | 
|  | 1001 | return OK; | 
|  | 1002 | } | 
|  | 1003 |  | 
|  | 1004 | status_t PreviewPlayer::finishSetDataSource_l() { | 
|  | 1005 | sp<DataSource> dataSource; | 
|  | 1006 | sp<MediaExtractor> extractor; | 
|  | 1007 |  | 
|  | 1008 | dataSource = DataSource::CreateFromURI(mUri.string(), &mUriHeaders); | 
|  | 1009 |  | 
|  | 1010 | if (dataSource == NULL) { | 
|  | 1011 | return UNKNOWN_ERROR; | 
|  | 1012 | } | 
|  | 1013 |  | 
|  | 1014 | //If file type is .rgb, then no need to check for Extractor | 
|  | 1015 | int uriLen = strlen(mUri); | 
|  | 1016 | int startOffset = uriLen - 4; | 
|  | 1017 | if(!strncasecmp(mUri+startOffset, ".rgb", 4)) { | 
|  | 1018 | extractor = NULL; | 
|  | 1019 | } | 
|  | 1020 | else { | 
|  | 1021 | extractor = MediaExtractor::Create(dataSource, | 
|  | 1022 | MEDIA_MIMETYPE_CONTAINER_MPEG4); | 
|  | 1023 | } | 
|  | 1024 |  | 
|  | 1025 | if (extractor == NULL) { | 
|  | 1026 | LOGV("PreviewPlayer::finishSetDataSource_l  extractor == NULL"); | 
|  | 1027 | return setDataSource_l_jpg(); | 
|  | 1028 | } | 
|  | 1029 |  | 
|  | 1030 | return setDataSource_l(extractor); | 
|  | 1031 | } | 
|  | 1032 |  | 
|  | 1033 |  | 
|  | 1034 | // static | 
|  | 1035 | bool PreviewPlayer::ContinuePreparation(void *cookie) { | 
|  | 1036 | PreviewPlayer *me = static_cast<PreviewPlayer *>(cookie); | 
|  | 1037 |  | 
|  | 1038 | return (me->mFlags & PREPARE_CANCELLED) == 0; | 
|  | 1039 | } | 
|  | 1040 |  | 
|  | 1041 | void PreviewPlayer::onPrepareAsyncEvent() { | 
|  | 1042 | Mutex::Autolock autoLock(mLock); | 
|  | 1043 | LOGV("onPrepareAsyncEvent"); | 
|  | 1044 |  | 
|  | 1045 | if (mFlags & PREPARE_CANCELLED) { | 
|  | 1046 | LOGI("LV PLAYER prepare was cancelled before doing anything"); | 
|  | 1047 | abortPrepare(UNKNOWN_ERROR); | 
|  | 1048 | return; | 
|  | 1049 | } | 
|  | 1050 |  | 
|  | 1051 | if (mUri.size() > 0) { | 
|  | 1052 | status_t err = finishSetDataSource_l(); | 
|  | 1053 |  | 
|  | 1054 | if (err != OK) { | 
|  | 1055 | abortPrepare(err); | 
|  | 1056 | return; | 
|  | 1057 | } | 
|  | 1058 | } | 
|  | 1059 |  | 
|  | 1060 | if (mVideoTrack != NULL && mVideoSource == NULL) { | 
|  | 1061 | status_t err = initVideoDecoder(OMXCodec::kHardwareCodecsOnly); | 
|  | 1062 |  | 
|  | 1063 | if (err != OK) { | 
|  | 1064 | abortPrepare(err); | 
|  | 1065 | return; | 
|  | 1066 | } | 
|  | 1067 | } | 
|  | 1068 |  | 
|  | 1069 | if (mAudioTrack != NULL && mAudioSource == NULL) { | 
|  | 1070 | status_t err = initAudioDecoder(); | 
|  | 1071 |  | 
|  | 1072 | if (err != OK) { | 
|  | 1073 | abortPrepare(err); | 
|  | 1074 | return; | 
|  | 1075 | } | 
|  | 1076 | } | 
|  | 1077 | finishAsyncPrepare_l(); | 
|  | 1078 |  | 
|  | 1079 | } | 
|  | 1080 |  | 
|  | 1081 | void PreviewPlayer::finishAsyncPrepare_l() { | 
|  | 1082 | if (mIsAsyncPrepare) { | 
|  | 1083 | if (mVideoSource == NULL) { | 
|  | 1084 | LOGV("finishAsyncPrepare_l: MEDIA_SET_VIDEO_SIZE 0 0 "); | 
|  | 1085 | notifyListener_l(MEDIA_SET_VIDEO_SIZE, 0, 0); | 
|  | 1086 | } else { | 
|  | 1087 | LOGV("finishAsyncPrepare_l: MEDIA_SET_VIDEO_SIZE"); | 
|  | 1088 | notifyVideoSize_l(); | 
|  | 1089 | } | 
|  | 1090 | LOGV("finishAsyncPrepare_l: MEDIA_PREPARED"); | 
|  | 1091 | notifyListener_l(MEDIA_PREPARED); | 
|  | 1092 | } | 
|  | 1093 |  | 
|  | 1094 | mPrepareResult = OK; | 
|  | 1095 | mFlags &= ~(PREPARING|PREPARE_CANCELLED); | 
|  | 1096 | mFlags |= PREPARED; | 
|  | 1097 | mAsyncPrepareEvent = NULL; | 
|  | 1098 | mPreparedCondition.broadcast(); | 
|  | 1099 | } | 
|  | 1100 |  | 
|  | 1101 | status_t PreviewPlayer::suspend() { | 
|  | 1102 | LOGV("suspend"); | 
|  | 1103 | Mutex::Autolock autoLock(mLock); | 
|  | 1104 |  | 
|  | 1105 | if (mSuspensionState != NULL) { | 
|  | 1106 | if (mLastVideoBuffer == NULL) { | 
|  | 1107 | //go into here if video is suspended again | 
|  | 1108 | //after resuming without being played between | 
|  | 1109 | //them | 
|  | 1110 | SuspensionState *state = mSuspensionState; | 
|  | 1111 | mSuspensionState = NULL; | 
|  | 1112 | reset_l(); | 
|  | 1113 | mSuspensionState = state; | 
|  | 1114 | return OK; | 
|  | 1115 | } | 
|  | 1116 |  | 
|  | 1117 | delete mSuspensionState; | 
|  | 1118 | mSuspensionState = NULL; | 
|  | 1119 | } | 
|  | 1120 |  | 
|  | 1121 | if (mFlags & PREPARING) { | 
|  | 1122 | mFlags |= PREPARE_CANCELLED; | 
|  | 1123 | } | 
|  | 1124 |  | 
|  | 1125 | while (mFlags & PREPARING) { | 
|  | 1126 | mPreparedCondition.wait(mLock); | 
|  | 1127 | } | 
|  | 1128 |  | 
|  | 1129 | SuspensionState *state = new SuspensionState; | 
|  | 1130 | state->mUri = mUri; | 
|  | 1131 | state->mUriHeaders = mUriHeaders; | 
|  | 1132 | state->mFileSource = mFileSource; | 
|  | 1133 |  | 
|  | 1134 | state->mFlags = mFlags & (PLAYING | AUTO_LOOPING | LOOPING | AT_EOS); | 
|  | 1135 | getPosition(&state->mPositionUs); | 
|  | 1136 |  | 
|  | 1137 | if (mLastVideoBuffer) { | 
|  | 1138 | size_t size = mLastVideoBuffer->range_length(); | 
|  | 1139 | if (size) { | 
|  | 1140 | int32_t unreadable; | 
|  | 1141 | if (!mLastVideoBuffer->meta_data()->findInt32( | 
|  | 1142 | kKeyIsUnreadable, &unreadable) | 
|  | 1143 | || unreadable == 0) { | 
|  | 1144 | state->mLastVideoFrameSize = size; | 
|  | 1145 | state->mLastVideoFrame = malloc(size); | 
|  | 1146 | memcpy(state->mLastVideoFrame, | 
|  | 1147 | (const uint8_t *)mLastVideoBuffer->data() | 
|  | 1148 | + mLastVideoBuffer->range_offset(), | 
|  | 1149 | size); | 
|  | 1150 |  | 
|  | 1151 | state->mVideoWidth = mVideoWidth; | 
|  | 1152 | state->mVideoHeight = mVideoHeight; | 
|  | 1153 |  | 
|  | 1154 | sp<MetaData> meta = mVideoSource->getFormat(); | 
|  | 1155 | CHECK(meta->findInt32(kKeyColorFormat, &state->mColorFormat)); | 
|  | 1156 | CHECK(meta->findInt32(kKeyWidth, &state->mDecodedWidth)); | 
|  | 1157 | CHECK(meta->findInt32(kKeyHeight, &state->mDecodedHeight)); | 
|  | 1158 | } else { | 
|  | 1159 | LOGV("Unable to save last video frame, we have no access to " | 
|  | 1160 | "the decoded video data."); | 
|  | 1161 | } | 
|  | 1162 | } | 
|  | 1163 | } | 
|  | 1164 |  | 
|  | 1165 | reset_l(); | 
|  | 1166 |  | 
|  | 1167 | mSuspensionState = state; | 
|  | 1168 |  | 
|  | 1169 | return OK; | 
|  | 1170 | } | 
|  | 1171 |  | 
|  | 1172 | status_t PreviewPlayer::resume() { | 
|  | 1173 | LOGV("resume"); | 
|  | 1174 | Mutex::Autolock autoLock(mLock); | 
|  | 1175 |  | 
|  | 1176 | if (mSuspensionState == NULL) { | 
|  | 1177 | return INVALID_OPERATION; | 
|  | 1178 | } | 
|  | 1179 |  | 
|  | 1180 | SuspensionState *state = mSuspensionState; | 
|  | 1181 | mSuspensionState = NULL; | 
|  | 1182 |  | 
|  | 1183 | status_t err; | 
|  | 1184 | if (state->mFileSource != NULL) { | 
|  | 1185 | err = AwesomePlayer::setDataSource_l(state->mFileSource); | 
|  | 1186 |  | 
|  | 1187 | if (err == OK) { | 
|  | 1188 | mFileSource = state->mFileSource; | 
|  | 1189 | } | 
|  | 1190 | } else { | 
|  | 1191 | err = AwesomePlayer::setDataSource_l(state->mUri, &state->mUriHeaders); | 
|  | 1192 | } | 
|  | 1193 |  | 
|  | 1194 | if (err != OK) { | 
|  | 1195 | delete state; | 
|  | 1196 | state = NULL; | 
|  | 1197 |  | 
|  | 1198 | return err; | 
|  | 1199 | } | 
|  | 1200 |  | 
|  | 1201 | seekTo_l(state->mPositionUs); | 
|  | 1202 |  | 
|  | 1203 | mFlags = state->mFlags & (AUTO_LOOPING | LOOPING | AT_EOS); | 
|  | 1204 |  | 
|  | 1205 | if (state->mLastVideoFrame && (mSurface != NULL || mISurface != NULL)) { | 
|  | 1206 | mVideoRenderer = | 
|  | 1207 | new PreviewLocalRenderer( | 
|  | 1208 | true,  // previewOnly | 
|  | 1209 | (OMX_COLOR_FORMATTYPE)state->mColorFormat, | 
|  | 1210 | mSurface, | 
|  | 1211 | state->mVideoWidth, | 
|  | 1212 | state->mVideoHeight, | 
|  | 1213 | state->mDecodedWidth, | 
|  | 1214 | state->mDecodedHeight); | 
|  | 1215 |  | 
|  | 1216 | mVideoRendererIsPreview = true; | 
|  | 1217 |  | 
|  | 1218 | ((PreviewLocalRenderer *)mVideoRenderer.get())->render( | 
|  | 1219 | state->mLastVideoFrame, state->mLastVideoFrameSize); | 
|  | 1220 | } | 
|  | 1221 |  | 
|  | 1222 | if (state->mFlags & PLAYING) { | 
|  | 1223 | play_l(); | 
|  | 1224 | } | 
|  | 1225 |  | 
|  | 1226 | mSuspensionState = state; | 
|  | 1227 | state = NULL; | 
|  | 1228 |  | 
|  | 1229 | return OK; | 
|  | 1230 | } | 
|  | 1231 |  | 
|  | 1232 |  | 
|  | 1233 | status_t PreviewPlayer::loadEffectsSettings( | 
|  | 1234 | M4VSS3GPP_EffectSettings* pEffectSettings, int nEffects) { | 
|  | 1235 | M4OSA_UInt32 i = 0, rgbSize = 0; | 
|  | 1236 | M4VIFI_UInt8 *tmp = M4OSA_NULL; | 
|  | 1237 |  | 
|  | 1238 | mNumberEffects = nEffects; | 
|  | 1239 | mEffectsSettings = pEffectSettings; | 
|  | 1240 | return OK; | 
|  | 1241 | } | 
|  | 1242 |  | 
|  | 1243 | status_t PreviewPlayer::loadAudioMixSettings( | 
|  | 1244 | M4xVSS_AudioMixingSettings* pAudioMixSettings) { | 
|  | 1245 |  | 
|  | 1246 | LOGV("PreviewPlayer: loadAudioMixSettings: "); | 
|  | 1247 | mPreviewPlayerAudioMixSettings = pAudioMixSettings; | 
|  | 1248 | return OK; | 
|  | 1249 | } | 
|  | 1250 |  | 
|  | 1251 | status_t PreviewPlayer::setAudioMixPCMFileHandle( | 
|  | 1252 | M4OSA_Context pAudioMixPCMFileHandle) { | 
|  | 1253 |  | 
|  | 1254 | LOGV("PreviewPlayer: setAudioMixPCMFileHandle: "); | 
|  | 1255 | mAudioMixPCMFileHandle = pAudioMixPCMFileHandle; | 
|  | 1256 | return OK; | 
|  | 1257 | } | 
|  | 1258 |  | 
|  | 1259 | status_t PreviewPlayer::setAudioMixStoryBoardParam( | 
|  | 1260 | M4OSA_UInt32 audioMixStoryBoardTS, | 
|  | 1261 | M4OSA_UInt32 currentMediaBeginCutTime, | 
|  | 1262 | M4OSA_UInt32 primaryTrackVolValue ) { | 
|  | 1263 |  | 
|  | 1264 | mAudioMixStoryBoardTS = audioMixStoryBoardTS; | 
|  | 1265 | mCurrentMediaBeginCutTime = currentMediaBeginCutTime; | 
|  | 1266 | mCurrentMediaVolumeValue = primaryTrackVolValue; | 
|  | 1267 | return OK; | 
|  | 1268 | } | 
|  | 1269 |  | 
|  | 1270 | status_t PreviewPlayer::setPlaybackBeginTime(uint32_t msec) { | 
|  | 1271 |  | 
|  | 1272 | mPlayBeginTimeMsec = msec; | 
|  | 1273 | return OK; | 
|  | 1274 | } | 
|  | 1275 |  | 
|  | 1276 | status_t PreviewPlayer::setPlaybackEndTime(uint32_t msec) { | 
|  | 1277 |  | 
|  | 1278 | mPlayEndTimeMsec = msec; | 
|  | 1279 | return OK; | 
|  | 1280 | } | 
|  | 1281 |  | 
|  | 1282 | status_t PreviewPlayer::setStoryboardStartTime(uint32_t msec) { | 
|  | 1283 |  | 
|  | 1284 | mStoryboardStartTimeMsec = msec; | 
|  | 1285 | mDecVideoTsStoryBoard = mStoryboardStartTimeMsec*1000; | 
|  | 1286 | return OK; | 
|  | 1287 | } | 
|  | 1288 |  | 
|  | 1289 | status_t PreviewPlayer::setProgressCallbackInterval(uint32_t cbInterval) { | 
|  | 1290 |  | 
|  | 1291 | mProgressCbInterval = cbInterval; | 
|  | 1292 | return OK; | 
|  | 1293 | } | 
|  | 1294 |  | 
|  | 1295 |  | 
|  | 1296 | status_t PreviewPlayer::setMediaRenderingMode( | 
|  | 1297 | M4xVSS_MediaRendering mode, | 
|  | 1298 | M4VIDEOEDITING_VideoFrameSize outputVideoSize) { | 
|  | 1299 |  | 
|  | 1300 | mRenderingMode = mode; | 
|  | 1301 |  | 
|  | 1302 | /* reset boolean for each clip*/ | 
|  | 1303 | mVideoResizedOrCropped = false; | 
|  | 1304 |  | 
|  | 1305 | switch(outputVideoSize) { | 
|  | 1306 | case M4VIDEOEDITING_kSQCIF: | 
|  | 1307 | mOutputVideoWidth = 128; | 
|  | 1308 | mOutputVideoHeight = 96; | 
|  | 1309 | break; | 
|  | 1310 |  | 
|  | 1311 | case M4VIDEOEDITING_kQQVGA: | 
|  | 1312 | mOutputVideoWidth = 160; | 
|  | 1313 | mOutputVideoHeight = 120; | 
|  | 1314 | break; | 
|  | 1315 |  | 
|  | 1316 | case M4VIDEOEDITING_kQCIF: | 
|  | 1317 | mOutputVideoWidth = 176; | 
|  | 1318 | mOutputVideoHeight = 144; | 
|  | 1319 | break; | 
|  | 1320 |  | 
|  | 1321 | case M4VIDEOEDITING_kQVGA: | 
|  | 1322 | mOutputVideoWidth = 320; | 
|  | 1323 | mOutputVideoHeight = 240; | 
|  | 1324 | break; | 
|  | 1325 |  | 
|  | 1326 | case M4VIDEOEDITING_kCIF: | 
|  | 1327 | mOutputVideoWidth = 352; | 
|  | 1328 | mOutputVideoHeight = 288; | 
|  | 1329 | break; | 
|  | 1330 |  | 
|  | 1331 | case M4VIDEOEDITING_kVGA: | 
|  | 1332 | mOutputVideoWidth = 640; | 
|  | 1333 | mOutputVideoHeight = 480; | 
|  | 1334 | break; | 
|  | 1335 |  | 
|  | 1336 | case M4VIDEOEDITING_kWVGA: | 
|  | 1337 | mOutputVideoWidth = 800; | 
|  | 1338 | mOutputVideoHeight = 480; | 
|  | 1339 | break; | 
|  | 1340 |  | 
|  | 1341 | case M4VIDEOEDITING_kNTSC: | 
|  | 1342 | mOutputVideoWidth = 720; | 
|  | 1343 | mOutputVideoHeight = 480; | 
|  | 1344 | break; | 
|  | 1345 |  | 
|  | 1346 | case M4VIDEOEDITING_k640_360: | 
|  | 1347 | mOutputVideoWidth = 640; | 
|  | 1348 | mOutputVideoHeight = 360; | 
|  | 1349 | break; | 
|  | 1350 |  | 
|  | 1351 | case M4VIDEOEDITING_k854_480: | 
|  | 1352 | mOutputVideoWidth = 854; | 
|  | 1353 | mOutputVideoHeight = 480; | 
|  | 1354 | break; | 
|  | 1355 |  | 
|  | 1356 | case M4VIDEOEDITING_kHD1280: | 
|  | 1357 | mOutputVideoWidth = 1280; | 
|  | 1358 | mOutputVideoHeight = 720; | 
|  | 1359 | break; | 
|  | 1360 |  | 
|  | 1361 | case M4VIDEOEDITING_kHD1080: | 
|  | 1362 | mOutputVideoWidth = 1080; | 
|  | 1363 | mOutputVideoHeight = 720; | 
|  | 1364 | break; | 
|  | 1365 |  | 
|  | 1366 | case M4VIDEOEDITING_kHD960: | 
|  | 1367 | mOutputVideoWidth = 960; | 
|  | 1368 | mOutputVideoHeight = 720; | 
|  | 1369 | break; | 
|  | 1370 |  | 
|  | 1371 | default: | 
|  | 1372 | LOGE("unsupported output video size set"); | 
|  | 1373 | return BAD_VALUE; | 
|  | 1374 | } | 
|  | 1375 |  | 
|  | 1376 | return OK; | 
|  | 1377 | } | 
|  | 1378 |  | 
|  | 1379 | M4OSA_ERR PreviewPlayer::doMediaRendering() { | 
|  | 1380 | M4OSA_ERR err = M4NO_ERROR; | 
|  | 1381 | M4VIFI_ImagePlane planeIn[3], planeOut[3]; | 
|  | 1382 | M4VIFI_UInt8 *inBuffer = M4OSA_NULL, *finalOutputBuffer = M4OSA_NULL; | 
|  | 1383 | M4VIFI_UInt8 *tempOutputBuffer= M4OSA_NULL; | 
|  | 1384 | size_t videoBufferSize = 0; | 
|  | 1385 | M4OSA_UInt32 frameSize = 0, i=0, index =0, nFrameCount =0, bufferOffset =0; | 
|  | 1386 | int32_t colorFormat = 0; | 
|  | 1387 |  | 
|  | 1388 | if(!mIsVideoSourceJpg) { | 
|  | 1389 | sp<MetaData> meta = mVideoSource->getFormat(); | 
|  | 1390 | CHECK(meta->findInt32(kKeyColorFormat, &colorFormat)); | 
|  | 1391 | } | 
|  | 1392 | else { | 
|  | 1393 | colorFormat = OMX_COLOR_FormatYUV420Planar; | 
|  | 1394 | } | 
|  | 1395 |  | 
|  | 1396 | videoBufferSize = mVideoBuffer->size(); | 
|  | 1397 | frameSize = (mVideoWidth*mVideoHeight*3) >> 1; | 
|  | 1398 |  | 
|  | 1399 | uint8_t* outBuffer; | 
|  | 1400 | size_t outBufferStride = 0; | 
|  | 1401 |  | 
|  | 1402 | mVideoRenderer->getBuffer(&outBuffer, &outBufferStride); | 
|  | 1403 |  | 
|  | 1404 | bufferOffset = index*frameSize; | 
|  | 1405 | inBuffer = (M4OSA_UInt8 *)mVideoBuffer->data()+ | 
|  | 1406 | mVideoBuffer->range_offset()+bufferOffset; | 
|  | 1407 |  | 
|  | 1408 |  | 
|  | 1409 | /* In plane*/ | 
|  | 1410 | prepareYUV420ImagePlane(planeIn, mVideoWidth, | 
| Dharmaray Kundargi | 35cb2de | 2011-01-19 19:09:27 -0800 | [diff] [blame^] | 1411 | mVideoHeight, (M4VIFI_UInt8 *)inBuffer, mReportedWidth, mReportedHeight); | 
| Dharmaray Kundargi | 643290d | 2011-01-16 16:02:42 -0800 | [diff] [blame] | 1412 |  | 
|  | 1413 | // Set the output YUV420 plane to be compatible with YV12 format | 
|  | 1414 | // W & H even | 
|  | 1415 | // YVU instead of YUV | 
|  | 1416 | // align buffers on 32 bits | 
|  | 1417 |  | 
|  | 1418 | //In YV12 format, sizes must be even | 
|  | 1419 | M4OSA_UInt32 yv12PlaneWidth = ((mOutputVideoWidth +1)>>1)<<1; | 
|  | 1420 | M4OSA_UInt32 yv12PlaneHeight = ((mOutputVideoHeight+1)>>1)<<1; | 
|  | 1421 |  | 
|  | 1422 | prepareYV12ImagePlane(planeOut, yv12PlaneWidth, yv12PlaneHeight, | 
|  | 1423 | (M4OSA_UInt32)outBufferStride, (M4VIFI_UInt8 *)outBuffer); | 
|  | 1424 |  | 
|  | 1425 |  | 
|  | 1426 | err = applyRenderingMode(planeIn, planeOut, mRenderingMode); | 
|  | 1427 |  | 
|  | 1428 | if(err != M4NO_ERROR) | 
|  | 1429 | { | 
|  | 1430 | LOGE("doMediaRendering: applyRenderingMode returned err=0x%x", err); | 
|  | 1431 | return err; | 
|  | 1432 | } | 
|  | 1433 | mVideoResizedOrCropped = true; | 
|  | 1434 |  | 
|  | 1435 | return err; | 
|  | 1436 | } | 
|  | 1437 |  | 
|  | 1438 | status_t PreviewPlayer::resetJniCallbackTimeStamp() { | 
|  | 1439 |  | 
|  | 1440 | mDecVideoTsStoryBoard = mStoryboardStartTimeMsec*1000; | 
|  | 1441 | return OK; | 
|  | 1442 | } | 
|  | 1443 |  | 
|  | 1444 | void PreviewPlayer::postProgressCallbackEvent_l() { | 
|  | 1445 | if (mProgressCbEventPending) { | 
|  | 1446 | return; | 
|  | 1447 | } | 
|  | 1448 | mProgressCbEventPending = true; | 
|  | 1449 |  | 
|  | 1450 | mQueue.postEvent(mProgressCbEvent); | 
|  | 1451 | } | 
|  | 1452 |  | 
|  | 1453 | void PreviewPlayer::onProgressCbEvent() { | 
|  | 1454 | Mutex::Autolock autoLock(mLock); | 
|  | 1455 | if (!mProgressCbEventPending) { | 
|  | 1456 | return; | 
|  | 1457 | } | 
|  | 1458 | mProgressCbEventPending = false; | 
|  | 1459 | // If playback starts from previous I-frame, | 
|  | 1460 | // then send frame storyboard duration | 
|  | 1461 | if((mDecodedVideoTs/1000) < mPlayBeginTimeMsec) { | 
|  | 1462 | notifyListener_l(MEDIA_INFO, 0, mDecVideoTsStoryBoard/1000); | 
|  | 1463 | } | 
|  | 1464 | else { | 
|  | 1465 | notifyListener_l(MEDIA_INFO, 0, | 
|  | 1466 | (((mDecodedVideoTs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec)); | 
|  | 1467 | } | 
|  | 1468 | } | 
|  | 1469 |  | 
|  | 1470 | void PreviewPlayer::setVideoPostProcessingNode( | 
|  | 1471 | M4VSS3GPP_VideoEffectType type, M4OSA_Bool enable) { | 
|  | 1472 |  | 
|  | 1473 | uint32_t effect = VIDEO_EFFECT_NONE; | 
|  | 1474 |  | 
|  | 1475 | //Map M4VSS3GPP_VideoEffectType to local enum | 
|  | 1476 | switch(type) { | 
|  | 1477 | case M4VSS3GPP_kVideoEffectType_FadeFromBlack: | 
|  | 1478 | effect = VIDEO_EFFECT_FADEFROMBLACK; | 
|  | 1479 | break; | 
|  | 1480 |  | 
|  | 1481 | case M4VSS3GPP_kVideoEffectType_FadeToBlack: | 
|  | 1482 | effect = VIDEO_EFFECT_FADETOBLACK; | 
|  | 1483 | break; | 
|  | 1484 |  | 
|  | 1485 | case M4VSS3GPP_kVideoEffectType_CurtainOpening: | 
|  | 1486 | effect = VIDEO_EFFECT_CURTAINOPEN; | 
|  | 1487 | break; | 
|  | 1488 |  | 
|  | 1489 | case M4VSS3GPP_kVideoEffectType_CurtainClosing: | 
|  | 1490 | effect = VIDEO_EFFECT_CURTAINCLOSE; | 
|  | 1491 | break; | 
|  | 1492 |  | 
|  | 1493 | case M4xVSS_kVideoEffectType_BlackAndWhite: | 
|  | 1494 | effect = VIDEO_EFFECT_BLACKANDWHITE; | 
|  | 1495 | break; | 
|  | 1496 |  | 
|  | 1497 | case M4xVSS_kVideoEffectType_Pink: | 
|  | 1498 | effect = VIDEO_EFFECT_PINK; | 
|  | 1499 | break; | 
|  | 1500 |  | 
|  | 1501 | case M4xVSS_kVideoEffectType_Green: | 
|  | 1502 | effect = VIDEO_EFFECT_GREEN; | 
|  | 1503 | break; | 
|  | 1504 |  | 
|  | 1505 | case M4xVSS_kVideoEffectType_Sepia: | 
|  | 1506 | effect = VIDEO_EFFECT_SEPIA; | 
|  | 1507 | break; | 
|  | 1508 |  | 
|  | 1509 | case M4xVSS_kVideoEffectType_Negative: | 
|  | 1510 | effect = VIDEO_EFFECT_NEGATIVE; | 
|  | 1511 | break; | 
|  | 1512 |  | 
|  | 1513 | case M4xVSS_kVideoEffectType_Framing: | 
|  | 1514 | effect = VIDEO_EFFECT_FRAMING; | 
|  | 1515 | break; | 
|  | 1516 |  | 
|  | 1517 | case M4xVSS_kVideoEffectType_Fifties: | 
|  | 1518 | effect = VIDEO_EFFECT_FIFTIES; | 
|  | 1519 | break; | 
|  | 1520 |  | 
|  | 1521 | case M4xVSS_kVideoEffectType_ColorRGB16: | 
|  | 1522 | effect = VIDEO_EFFECT_COLOR_RGB16; | 
|  | 1523 | break; | 
|  | 1524 |  | 
|  | 1525 | case M4xVSS_kVideoEffectType_Gradient: | 
|  | 1526 | effect = VIDEO_EFFECT_GRADIENT; | 
|  | 1527 | break; | 
|  | 1528 |  | 
|  | 1529 | default: | 
|  | 1530 | effect = VIDEO_EFFECT_NONE; | 
|  | 1531 | break; | 
|  | 1532 | } | 
|  | 1533 |  | 
|  | 1534 | if(enable == M4OSA_TRUE) { | 
|  | 1535 | //If already set, then no need to set again | 
|  | 1536 | if(!(mCurrentVideoEffect & effect)) { | 
|  | 1537 | mCurrentVideoEffect |= effect; | 
|  | 1538 | if(effect == VIDEO_EFFECT_FIFTIES) { | 
|  | 1539 | mIsFiftiesEffectStarted = true; | 
|  | 1540 | } | 
|  | 1541 | } | 
|  | 1542 | } | 
|  | 1543 | else  { | 
|  | 1544 | //Reset only if already set | 
|  | 1545 | if(mCurrentVideoEffect & effect) { | 
|  | 1546 | mCurrentVideoEffect &= ~effect; | 
|  | 1547 | } | 
|  | 1548 | } | 
|  | 1549 | } | 
|  | 1550 |  | 
|  | 1551 | status_t PreviewPlayer::setImageClipProperties(uint32_t width,uint32_t height) { | 
|  | 1552 | mVideoWidth = width; | 
|  | 1553 | mVideoHeight = height; | 
|  | 1554 | return OK; | 
|  | 1555 | } | 
|  | 1556 |  | 
|  | 1557 |  | 
|  | 1558 | M4OSA_ERR PreviewPlayer::doVideoPostProcessing() { | 
|  | 1559 | M4OSA_ERR err = M4NO_ERROR; | 
|  | 1560 | vePostProcessParams postProcessParams; | 
|  | 1561 | int32_t colorFormat = 0; | 
|  | 1562 |  | 
|  | 1563 |  | 
|  | 1564 | if(!mIsVideoSourceJpg) { | 
|  | 1565 | sp<MetaData> meta = mVideoSource->getFormat(); | 
|  | 1566 | CHECK(meta->findInt32(kKeyColorFormat, &colorFormat)); | 
|  | 1567 | } | 
|  | 1568 | else { | 
|  | 1569 | colorFormat = OMX_COLOR_FormatYUV420Planar; | 
|  | 1570 | } | 
|  | 1571 |  | 
|  | 1572 | if((colorFormat == OMX_COLOR_FormatYUV420SemiPlanar) || | 
|  | 1573 | (colorFormat == 0x7FA30C00)) { | 
|  | 1574 | LOGE("doVideoPostProcessing: colorFormat YUV420Sp not supported"); | 
|  | 1575 | return M4ERR_UNSUPPORTED_MEDIA_TYPE; | 
|  | 1576 | } | 
|  | 1577 |  | 
|  | 1578 | postProcessParams.vidBuffer = (M4VIFI_UInt8*)mVideoBuffer->data() | 
|  | 1579 | + mVideoBuffer->range_offset(); | 
|  | 1580 |  | 
|  | 1581 | postProcessParams.videoWidth = mVideoWidth; | 
|  | 1582 | postProcessParams.videoHeight = mVideoHeight; | 
|  | 1583 | postProcessParams.timeMs = mDecodedVideoTs/1000; | 
|  | 1584 | postProcessParams.timeOffset = mDecVideoTsStoryBoard/1000; | 
|  | 1585 | postProcessParams.effectsSettings = mEffectsSettings; | 
|  | 1586 | postProcessParams.numberEffects = mNumberEffects; | 
|  | 1587 | postProcessParams.outVideoWidth = mOutputVideoWidth; | 
|  | 1588 | postProcessParams.outVideoHeight = mOutputVideoHeight; | 
|  | 1589 | postProcessParams.currentVideoEffect = mCurrentVideoEffect; | 
|  | 1590 | postProcessParams.renderingMode = mRenderingMode; | 
|  | 1591 | if(mIsFiftiesEffectStarted == M4OSA_TRUE) { | 
|  | 1592 | postProcessParams.isFiftiesEffectStarted = M4OSA_TRUE; | 
|  | 1593 | mIsFiftiesEffectStarted = M4OSA_FALSE; | 
|  | 1594 | } | 
|  | 1595 | else { | 
|  | 1596 | postProcessParams.isFiftiesEffectStarted = M4OSA_FALSE; | 
|  | 1597 | } | 
|  | 1598 |  | 
|  | 1599 | postProcessParams.overlayFrameRGBBuffer = mFrameRGBBuffer; | 
|  | 1600 | postProcessParams.overlayFrameYUVBuffer = mFrameYUVBuffer; | 
|  | 1601 | mVideoRenderer->getBuffer(&(postProcessParams.pOutBuffer), &(postProcessParams.outBufferStride)); | 
| Dharmaray Kundargi | 35cb2de | 2011-01-19 19:09:27 -0800 | [diff] [blame^] | 1602 | err = applyEffectsAndRenderingMode(&postProcessParams, mReportedWidth, mReportedHeight); | 
| Dharmaray Kundargi | 643290d | 2011-01-16 16:02:42 -0800 | [diff] [blame] | 1603 |  | 
|  | 1604 | return err; | 
|  | 1605 | } | 
|  | 1606 |  | 
|  | 1607 | status_t PreviewPlayer::readFirstVideoFrame() { | 
|  | 1608 | LOGV("PreviewPlayer::readFirstVideoFrame"); | 
|  | 1609 |  | 
|  | 1610 | if (!mVideoBuffer) { | 
|  | 1611 | MediaSource::ReadOptions options; | 
|  | 1612 | if (mSeeking) { | 
|  | 1613 | LOGV("LV PLAYER seeking to %lld us (%.2f secs)", mSeekTimeUs, | 
|  | 1614 | mSeekTimeUs / 1E6); | 
|  | 1615 |  | 
|  | 1616 | options.setSeekTo( | 
|  | 1617 | mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST); | 
|  | 1618 | } | 
|  | 1619 | for (;;) { | 
|  | 1620 | status_t err = mVideoSource->read(&mVideoBuffer, &options); | 
|  | 1621 | options.clearSeekTo(); | 
|  | 1622 |  | 
|  | 1623 | if (err != OK) { | 
|  | 1624 | CHECK_EQ(mVideoBuffer, NULL); | 
|  | 1625 |  | 
|  | 1626 | if (err == INFO_FORMAT_CHANGED) { | 
|  | 1627 | LOGV("LV PLAYER VideoSource signalled format change"); | 
|  | 1628 | notifyVideoSize_l(); | 
| Dharmaray Kundargi | 35cb2de | 2011-01-19 19:09:27 -0800 | [diff] [blame^] | 1629 | sp<MetaData> meta = mVideoSource->getFormat(); | 
|  | 1630 |  | 
|  | 1631 | CHECK(meta->findInt32(kKeyWidth, &mReportedWidth)); | 
|  | 1632 | CHECK(meta->findInt32(kKeyHeight, &mReportedHeight)); | 
| Dharmaray Kundargi | 643290d | 2011-01-16 16:02:42 -0800 | [diff] [blame] | 1633 |  | 
|  | 1634 | if (mVideoRenderer != NULL) { | 
|  | 1635 | mVideoRendererIsPreview = false; | 
|  | 1636 | initRenderer_l(); | 
|  | 1637 | } | 
|  | 1638 | continue; | 
|  | 1639 | } | 
|  | 1640 | LOGV("PreviewPlayer: onVideoEvent EOS reached."); | 
|  | 1641 | mFlags |= VIDEO_AT_EOS; | 
|  | 1642 | postStreamDoneEvent_l(err); | 
|  | 1643 | return OK; | 
|  | 1644 | } | 
|  | 1645 |  | 
|  | 1646 | if (mVideoBuffer->range_length() == 0) { | 
|  | 1647 | // Some decoders, notably the PV AVC software decoder | 
|  | 1648 | // return spurious empty buffers that we just want to ignore. | 
|  | 1649 |  | 
|  | 1650 | mVideoBuffer->release(); | 
|  | 1651 | mVideoBuffer = NULL; | 
|  | 1652 | continue; | 
|  | 1653 | } | 
|  | 1654 |  | 
|  | 1655 | int64_t videoTimeUs; | 
|  | 1656 | CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &videoTimeUs)); | 
|  | 1657 |  | 
|  | 1658 | if((videoTimeUs/1000) < mPlayBeginTimeMsec) { | 
|  | 1659 | // buffers are before begin cut time | 
|  | 1660 | // ignore them | 
|  | 1661 | //LOGI("PreviewPlayer: Ignoring buffers before begin cut time"); | 
|  | 1662 | mVideoBuffer->release(); | 
|  | 1663 | mVideoBuffer = NULL; | 
|  | 1664 | continue; | 
|  | 1665 | } | 
|  | 1666 |  | 
|  | 1667 | break; | 
|  | 1668 | } | 
|  | 1669 | } | 
|  | 1670 |  | 
|  | 1671 | int64_t timeUs; | 
|  | 1672 | CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs)); | 
|  | 1673 |  | 
|  | 1674 | { | 
|  | 1675 | Mutex::Autolock autoLock(mMiscStateLock); | 
|  | 1676 | mVideoTimeUs = timeUs; | 
|  | 1677 | } | 
|  | 1678 |  | 
|  | 1679 | mDecodedVideoTs = timeUs; | 
|  | 1680 |  | 
|  | 1681 | return OK; | 
|  | 1682 |  | 
|  | 1683 | } | 
|  | 1684 |  | 
|  | 1685 | }  // namespace android |