videoeditor preview code on honeycomb

Change-Id: I9c3c9cb921ea697ab16732973d26ef9035cda2ee
diff --git a/libvideoeditor/lvpp/Android.mk b/libvideoeditor/lvpp/Android.mk
new file mode 100755
index 0000000..51022bd
--- /dev/null
+++ b/libvideoeditor/lvpp/Android.mk
@@ -0,0 +1,110 @@
+#

+# Copyright (C) 2011 NXP Software

+# Copyright (C) 2011 The Android Open Source Project

+#

+# Licensed under the Apache License, Version 2.0 (the "License");

+# you may not use this file except in compliance with the License.

+# You may obtain a copy of the License at

+#

+#      http://www.apache.org/licenses/LICENSE-2.0

+#

+# Unless required by applicable law or agreed to in writing, software

+# distributed under the License is distributed on an "AS IS" BASIS,

+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+# See the License for the specific language governing permissions and

+# limitations under the License.

+#

+

+LOCAL_PATH:= $(call my-dir)

+

+#

+# libvideoeditorplayer

+#

+

+include $(CLEAR_VARS)

+

+LOCAL_MODULE:= libvideoeditorplayer

+

+LOCAL_SRC_FILES:=          \

+    VideoEditorTools.cpp \

+    VideoEditorPlayer.cpp \

+    PreviewPlayer.cpp \

+    VideoEditorAudioPlayer.cpp \

+    VideoEditorPreviewController.cpp \

+    VideoEditorSRC.cpp \

+    DummyAudioSource.cpp \

+    DummyVideoSource.cpp \

+    VideoEditorBGAudioProcessing.cpp \

+    PreviewRenderer.cpp

+

+LOCAL_MODULE_TAGS := development

+

+LOCAL_STATIC_LIBRARIES := \

+    libvideoeditor_osal \

+    libstagefright_color_conversion

+

+

+

+LOCAL_SHARED_LIBRARIES := \

+    libbinder          \

+    libutils           \

+    libcutils          \

+    libmedia           \

+    libstagefright  \

+    libstagefright_omx  \

+    libstagefright_foundation \

+    libsurfaceflinger_client \

+    libaudioflinger \

+    libui

+

+

+LOCAL_C_INCLUDES += \

+    $(TOP)/frameworks/base/core/jni \

+    $(TOP)/frameworks/base/include \

+    $(TOP)/frameworks/base/include/media \

+    $(TOP)/frameworks/base/media/libmediaplayerservice \

+    $(TOP)/frameworks/base/media/libstagefright \

+    $(TOP)/frameworks/base/media/libstagefright/include \

+    $(TOP)/frameworks/base/media/libstagefright/rtsp \

+    $(JNI_H_INCLUDE) \

+    $(call include-path-for, corecg graphics) \

+    $(TOP)/external/opencore/extern_libs_v2/khronos/openmax/include \

+    $(TOP)/external/opencore/android \

+    $(TOP)/vendor/qcom/proprietary/qdsp6/mm-core/omxcore/inc \

+    $(TOP)/frameworks/media/libvideoeditor/osal/inc \

+    $(TOP)/frameworks/media/libvideoeditor/vss/common/inc \

+    $(TOP)/frameworks/media/libvideoeditor/vss/mcs/inc \

+    $(TOP)/frameworks/media/libvideoeditor/vss/inc \

+    $(TOP)/frameworks/media/libvideoeditor/vss/stagefrightshells/inc \

+    $(TOP)/frameworks/media/libvideoeditor/lvpp \

+    $(TOP)/frameworks/base/media/jni/mediaeditor \

+    $(TOP)/frameworks/base/services/audioflinger

+

+

+ifeq ($(TARGET_SIMULATOR),true)

+else

+    LOCAL_SHARED_LIBRARIES += libdl

+endif

+

+# All of the shared libraries we link against.

+LOCAL_LDLIBS := \

+    -lpthread -ldl

+

+LOCAL_CFLAGS += -Wno-multichar \

+     -DM4_ENABLE_RENDERINGMODE \

+    -DUSE_STAGEFRIGHT_CODECS \

+    -DUSE_STAGEFRIGHT_AUDIODEC \

+    -DUSE_STAGEFRIGHT_VIDEODEC \

+    -DUSE_STAGEFRIGHT_AUDIOENC \

+    -DUSE_STAGEFRIGHT_VIDEOENC \

+    -DUSE_STAGEFRIGHT_READERS \

+    -DUSE_STAGEFRIGHT_3GPP_READER

+

+

+# Don't prelink this library.  For more efficient code, you may want

+# to add this library to the prelink map and set this to true.

+LOCAL_PRELINK_MODULE := false

+

+include $(BUILD_SHARED_LIBRARY)

+

+#include $(call all-makefiles-under,$(LOCAL_PATH))

diff --git a/libvideoeditor/lvpp/DummyAudioSource.cpp b/libvideoeditor/lvpp/DummyAudioSource.cpp
new file mode 100755
index 0000000..04f8dc9
--- /dev/null
+++ b/libvideoeditor/lvpp/DummyAudioSource.cpp
@@ -0,0 +1,193 @@
+/*
+ * Copyright (C) 2011 NXP Software
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 1
+#define LOG_TAG "DummyAudioSource"
+#include "utils/Log.h"
+
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaBufferGroup.h>
+#include <media/stagefright/MetaData.h>
+
+
+#include "DummyAudioSource.h"
+
+
+
+/* Android includes*/
+#include <utils/Log.h>
+#include <memory.h>
+
+
+/*---------------------*/
+/*  DEBUG LEVEL SETUP  */
+/*---------------------*/
+#define LOG1 LOGE    /*ERRORS Logging*/
+#define LOG2 LOGV    /*WARNING Logging*/
+#define LOG3 //LOGV    /*COMMENTS Logging*/
+
+/*---------------------*/
+/*  CONSTANTS          */
+/*---------------------*/
+
+
+namespace android {
+
+
+/*---------------------*/
+/*  SEEK SOURCE        */
+/*---------------------*/
+
+//static
+sp<DummyAudioSource> DummyAudioSource::Create(int32_t samplingRate,
+                                              int32_t channelCount,
+                                              int64_t frameDurationUs,
+                                              int64_t audioDurationUs) {
+    LOG2("DummyAudioSource::Create ");
+    sp<DummyAudioSource> aSource = new DummyAudioSource (samplingRate,
+                                                         channelCount,
+                                                         frameDurationUs,
+                                                         audioDurationUs);
+    return aSource;
+}
+
+
+DummyAudioSource::DummyAudioSource (int32_t samplingRate,
+                                    int32_t channelCount,
+                                    int64_t frameDurationUs,
+                                    int64_t audioDurationUs):
+    mSamplingRate(samplingRate),
+    mChannelCount(channelCount),
+    mFrameDurationUs(frameDurationUs),
+    mAudioDurationUs(audioDurationUs),
+    mTimeStampUs(0) ,
+    mNumberOfSamplePerFrame(0),
+    mBufferGroup(NULL){
+    LOG2("DummyAudioSource::DummyAudioSource constructor START");
+    /* Do nothing here? */
+    LOG2("DummyAudioSource::DummyAudioSource");
+    LOG2("DummyAudioSource:: mSamplingRate = %d",samplingRate);
+    LOG2("DummyAudioSource:: mChannelCount = %d",channelCount);
+    LOG2("DummyAudioSource:: frameDurationUs = %lld",frameDurationUs);
+    LOG2("DummyAudioSource:: mAudioDurationUs = %lld",mAudioDurationUs);
+
+    LOG2("DummyAudioSource::DummyAudioSource constructor END");
+}
+
+
+DummyAudioSource::~DummyAudioSource () {
+    /* Do nothing here? */
+    LOG2("DummyAudioSource::~DummyAudioSource");
+}
+
+
+
+status_t DummyAudioSource::start(MetaData *params) {
+    status_t err = OK;
+    LOG2("DummyAudioSource::start START");
+
+    mTimeStampUs = 0;
+    mNumberOfSamplePerFrame = (int32_t) ((1L * mSamplingRate * mFrameDurationUs)/1000000);
+    mNumberOfSamplePerFrame = mNumberOfSamplePerFrame  * mChannelCount;
+
+    mBufferGroup = new MediaBufferGroup;
+    mBufferGroup->add_buffer(
+            new MediaBuffer(mNumberOfSamplePerFrame * sizeof(int16_t)));
+
+    LOG2("DummyAudioSource:: mNumberOfSamplePerFrame = %d",mNumberOfSamplePerFrame);
+    LOG2("DummyAudioSource::start END");
+
+    return err;
+}
+
+
+status_t DummyAudioSource::stop() {
+    status_t err = OK;
+
+    LOG2("DummyAudioSource::stop START");
+
+    delete mBufferGroup;
+    mBufferGroup = NULL;
+
+
+    LOG2("DummyAudioSource::stop END");
+
+    return err;
+}
+
+
+sp<MetaData> DummyAudioSource::getFormat() {
+    LOG2("DummyAudioSource::getFormat");
+
+    sp<MetaData> meta = new MetaData;
+
+    meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_RAW);
+    meta->setInt32(kKeyChannelCount, mChannelCount);
+    meta->setInt32(kKeySampleRate, mSamplingRate);
+    meta->setInt64(kKeyDuration, mFrameDurationUs);
+
+     meta->setCString(kKeyDecoderComponent, "DummyAudioSource");
+
+    return meta;
+}
+
+status_t DummyAudioSource::read( MediaBuffer **out, const MediaSource::ReadOptions *options) {
+    status_t err            = OK;
+    //LOG2("DummyAudioSource::read START");
+    MediaBuffer *buffer;
+    int32_t byteCount;
+    int64_t seekTimeUs;
+    ReadOptions::SeekMode mode;
+
+    if (options && options->getSeekTo(&seekTimeUs, &mode)) {
+        CHECK(seekTimeUs >= 0);
+        mTimeStampUs = seekTimeUs;
+     }
+
+    if (mTimeStampUs >= mAudioDurationUs) {
+        *out = NULL;
+        return ERROR_END_OF_STREAM;
+    }
+
+    err = mBufferGroup->acquire_buffer(&buffer);
+    if (err != OK) {
+        return err;
+    }
+
+    uint8_t *inputPtr =
+    ( uint8_t *)buffer->data() + buffer->range_offset();
+
+    //TODO: replace with memset
+    for (byteCount = 0; byteCount < (mNumberOfSamplePerFrame << 1); byteCount++) {
+        inputPtr[byteCount] = 0;
+    }
+
+    buffer->set_range(buffer->range_offset(), (mNumberOfSamplePerFrame << 1));
+
+    buffer->meta_data()->setInt64(kKeyTime, mTimeStampUs);
+    LOG2("DummyAudioSource::read  Buffer_offset  = %d,"
+            "Buffer_Size = %d, mTimeStampUs = %lld",
+             buffer->range_offset(),buffer->size(),mTimeStampUs);
+    mTimeStampUs = mTimeStampUs + mFrameDurationUs;
+    *out = buffer;
+    return err;
+}
+
+}// namespace android
diff --git a/libvideoeditor/lvpp/DummyAudioSource.h b/libvideoeditor/lvpp/DummyAudioSource.h
new file mode 100755
index 0000000..a562bc1
--- /dev/null
+++ b/libvideoeditor/lvpp/DummyAudioSource.h
@@ -0,0 +1,77 @@
+/*

+ * Copyright (C) 2011 NXP Software

+ * Copyright (C) 2011 The Android Open Source Project

+ *

+ * Licensed under the Apache License, Version 2.0 (the "License");

+ * you may not use this file except in compliance with the License.

+ * You may obtain a copy of the License at

+ *

+ *      http://www.apache.org/licenses/LICENSE-2.0

+ *

+ * Unless required by applicable law or agreed to in writing, software

+ * distributed under the License is distributed on an "AS IS" BASIS,

+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * See the License for the specific language governing permissions and

+ * limitations under the License.

+ */

+

+#ifndef DUMMY_AUDIOSOURCE_H_

+

+#define DUMMY_AUDIOSOURCE_H_

+

+#include <utils/RefBase.h>

+#include <utils/threads.h>

+#include <media/stagefright/MediaBufferGroup.h>

+#include <media/stagefright/MediaSource.h>

+#include <media/stagefright/DataSource.h>

+

+

+namespace android {

+

+

+class MediaBuffer;

+class MetaData;

+struct MediaBufferGroup;

+

+

+

+struct DummyAudioSource : public MediaSource {

+

+public:

+    static sp<DummyAudioSource> Create(int32_t samplingRate,

+                                       int32_t channelCount,

+                                       int64_t frameDurationUs,

+                                       int64_t audioDurationUs);

+    virtual status_t start(MetaData *params = NULL);

+    virtual status_t stop();

+    virtual sp<MetaData> getFormat();

+    virtual status_t read (MediaBuffer **buffer, 

+                            const MediaSource::ReadOptions *options = NULL);

+

+protected:

+    DummyAudioSource (int32_t samplingRate,

+                      int32_t channelCount,

+                      int64_t frameDurationUs,

+                      int64_t audioDurationUs);

+    virtual ~DummyAudioSource();

+

+private:

+    int32_t mSamplingRate;

+    int32_t mChannelCount;

+    int64_t mFrameDurationUs;

+    int32_t mNumberOfSamplePerFrame;

+    int64_t mAudioDurationUs;

+    int64_t mTimeStampUs;

+    int32_t mNbBuffer;

+    MediaBufferGroup *mBufferGroup;

+

+    DummyAudioSource(const DummyAudioSource &);

+    DummyAudioSource &operator=(const DummyAudioSource &);

+

+};

+

+}//namespace android

+

+

+#endif //DUMMY_AUDIOSOURCE_H_

+

diff --git a/libvideoeditor/lvpp/DummyVideoSource.cpp b/libvideoeditor/lvpp/DummyVideoSource.cpp
new file mode 100755
index 0000000..4dbdc11
--- /dev/null
+++ b/libvideoeditor/lvpp/DummyVideoSource.cpp
@@ -0,0 +1,165 @@
+/*

+ * Copyright (C) 2011 NXP Software

+ * Copyright (C) 2011 The Android Open Source Project

+ *

+ * Licensed under the Apache License, Version 2.0 (the "License");

+ * you may not use this file except in compliance with the License.

+ * You may obtain a copy of the License at

+ *

+ *      http://www.apache.org/licenses/LICENSE-2.0

+ *

+ * Unless required by applicable law or agreed to in writing, software

+ * distributed under the License is distributed on an "AS IS" BASIS,

+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * See the License for the specific language governing permissions and

+ * limitations under the License.

+ */

+

+#define LOG_NDEBUG 1

+#define LOG_TAG "DummyVideoSource"

+#include "utils/Log.h"

+

+#include <media/stagefright/MediaErrors.h>

+#include <media/stagefright/MediaDebug.h>

+#include <media/stagefright/MediaDefs.h>

+#include <media/stagefright/MediaBuffer.h>

+#include <media/stagefright/MediaBufferGroup.h>

+#include <media/stagefright/MetaData.h>

+

+#include "DummyVideoSource.h"

+

+/* Android includes*/

+#include <utils/Log.h>

+#include <memory.h>

+

+

+#define LOG1 LOGE    /*ERRORS Logging*/

+#define LOG2 LOGV    /*WARNING Logging*/

+#define LOG3 //LOGV    /*COMMENTS Logging*/

+

+

+namespace android {

+

+

+sp<DummyVideoSource> DummyVideoSource::Create (

+            uint32_t width, uint32_t height, 

+            uint64_t clipDuration, const char *imageUri) {

+    LOG2("DummyVideoSource::Create ");

+    sp<DummyVideoSource> vSource = new DummyVideoSource (

+                         width, height, clipDuration, imageUri);

+    return vSource;

+}

+

+

+DummyVideoSource::DummyVideoSource (

+            uint32_t width, uint32_t height, 

+            uint64_t clipDuration, const char *imageUri) {

+

+    LOG2("DummyVideoSource::DummyVideoSource constructor START");

+    mFrameWidth = width;

+    mFrameHeight = height;

+    mImageClipDuration = clipDuration;

+    mUri = imageUri;

+    mImageBuffer = NULL;

+

+    LOG2("DummyVideoSource::DummyVideoSource constructor END");

+}

+

+

+DummyVideoSource::~DummyVideoSource () {

+    /* Do nothing here? */

+    LOG2("DummyVideoSource::~DummyVideoSource");

+}

+

+

+

+status_t DummyVideoSource::start(MetaData *params) {

+    status_t err = OK;

+    LOG2("DummyVideoSource::start START, %s", mUri);

+    //get the frame buffer from the rgb file and store into a MediaBuffer

+    err = LvGetImageThumbNail((const char *)mUri,

+                          mFrameHeight , mFrameWidth ,

+                          (M4OSA_Void **)&mImageBuffer);

+

+    mIsFirstImageFrame = true;

+    mImageSeekTime = 0;

+    mImagePlayStartTime = 0;

+    mFrameTimeUs = 0;

+    LOG2("DummyVideoSource::start END");

+

+    return err;

+}

+

+

+status_t DummyVideoSource::stop() {

+    status_t err = OK;

+

+    LOG2("DummyVideoSource::stop START");

+    if (mImageBuffer != NULL) {

+        M4OSA_free((M4OSA_MemAddr32)mImageBuffer);

+        mImageBuffer = NULL;

+    }

+    LOG2("DummyVideoSource::stop END");

+

+    return err;

+}

+

+

+sp<MetaData> DummyVideoSource::getFormat() {

+    LOG2("DummyVideoSource::getFormat");

+

+    sp<MetaData> meta = new MetaData;

+

+    meta->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420Planar);

+    meta->setInt32(kKeyWidth, mFrameWidth);

+    meta->setInt32(kKeyHeight, mFrameHeight);

+    meta->setInt64(kKeyDuration, mImageClipDuration);

+    meta->setCString(kKeyDecoderComponent, "DummyVideoSource");

+

+    return meta;

+}

+

+status_t DummyVideoSource::read( 

+                        MediaBuffer **out, 

+                        const MediaSource::ReadOptions *options) {

+    status_t err = OK;

+    MediaBuffer *buffer;

+    LOG2("DummyVideoSource::read START");

+

+    bool seeking = false;

+    int64_t seekTimeUs;

+    ReadOptions::SeekMode seekMode;

+

+    if (options && options->getSeekTo(&seekTimeUs, &seekMode)) {

+        seeking = true;

+        mImageSeekTime = seekTimeUs;

+    }

+

+    if ((mImageSeekTime == mImageClipDuration) || (mFrameTimeUs == mImageClipDuration)) {

+        LOG2("DummyVideoSource::read() End of stream reached; return NULL buffer");

+        *out = NULL;

+        return ERROR_END_OF_STREAM;

+    }

+

+    buffer = new MediaBuffer(mImageBuffer, (mFrameWidth*mFrameHeight*1.5));

+

+    //set timestamp of buffer

+    if (mIsFirstImageFrame) {

+        M4OSA_clockGetTime(&mImagePlayStartTime, 1000); //1000 time scale for time in ms

+        mFrameTimeUs =  (mImageSeekTime + 1);

+        LOG2("DummyVideoSource::read() jpg 1st frame timeUs = %lld, begin cut time = %ld", mFrameTimeUs, mImageSeekTime);

+        mIsFirstImageFrame = false;

+    } else {

+        M4OSA_Time  currentTimeMs;

+        M4OSA_clockGetTime(&currentTimeMs, 1000);

+

+        mFrameTimeUs = mImageSeekTime + (currentTimeMs - mImagePlayStartTime)*1000;

+        LOG2("DummyVideoSource::read() jpg frame timeUs = %lld", mFrameTimeUs);

+    }

+    buffer->meta_data()->setInt64(kKeyTime, mFrameTimeUs);

+    buffer->set_range(buffer->range_offset(), mFrameWidth*mFrameHeight*1.5);

+    *out = buffer;

+    return err;

+}

+

+}// namespace android

diff --git a/libvideoeditor/lvpp/DummyVideoSource.h b/libvideoeditor/lvpp/DummyVideoSource.h
new file mode 100755
index 0000000..6fcc0a9
--- /dev/null
+++ b/libvideoeditor/lvpp/DummyVideoSource.h
@@ -0,0 +1,82 @@
+/*

+ * Copyright (C) 2011 NXP Software

+ * Copyright (C) 2011 The Android Open Source Project

+ *

+ * Licensed under the Apache License, Version 2.0 (the "License");

+ * you may not use this file except in compliance with the License.

+ * You may obtain a copy of the License at

+ *

+ *      http://www.apache.org/licenses/LICENSE-2.0

+ *

+ * Unless required by applicable law or agreed to in writing, software

+ * distributed under the License is distributed on an "AS IS" BASIS,

+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * See the License for the specific language governing permissions and

+ * limitations under the License.

+ */

+

+#ifndef DUMMY_VIDEOSOURCE_H_

+

+#define DUMMY_VIDEOSOURCE_H_

+

+#include <utils/RefBase.h>

+#include <utils/threads.h>

+#include <media/stagefright/MediaBufferGroup.h>

+#include <media/stagefright/MediaSource.h>

+#include <media/stagefright/DataSource.h>

+#include "OMX_IVCommon.h"

+#include "VideoEditorTools.h"

+#include "M4OSA_Clock.h"

+#include "M4OSA_Time.h"

+#include "M4OSA_Types.h"

+

+

+namespace android {

+

+

+class  MediaBuffer;

+class  MetaData;

+struct MediaBufferGroup;

+

+struct DummyVideoSource : public MediaSource {

+

+public:

+    static sp<DummyVideoSource> Create(uint32_t width, 

+                                       uint32_t height, 

+                                       uint64_t clipDuration, 

+                                       const char *imageUri);

+

+    virtual status_t start(MetaData *params = NULL);

+    virtual status_t stop();

+    virtual sp<MetaData> getFormat();

+    virtual status_t read(MediaBuffer **buffer, 

+                          const MediaSource::ReadOptions *options = NULL);

+

+protected:

+    DummyVideoSource (uint32_t width, uint32_t height, 

+                      uint64_t clipDuration, const char *imageUri);

+    virtual ~DummyVideoSource();

+

+private:

+    uint32_t mFrameWidth;

+    uint32_t mFrameHeight;

+    uint64_t mImageClipDuration;

+    const char *mUri;

+    int64_t mFrameTimeUs;

+    MediaBufferGroup *mBufferGroup;

+    bool mIsFirstImageFrame;

+    void *mImageBuffer;

+    M4OSA_Time mImagePlayStartTime;

+    uint32_t mImageSeekTime;

+

+    DummyVideoSource(const DummyVideoSource &);

+    DummyVideoSource &operator=(const DummyVideoSource &);

+

+};

+

+

+}//namespace android

+

+

+#endif //DUMMY_VIDEOSOURCE_H_

+

diff --git a/libvideoeditor/lvpp/PreviewPlayer.cpp b/libvideoeditor/lvpp/PreviewPlayer.cpp
new file mode 100755
index 0000000..2f46e4c
--- /dev/null
+++ b/libvideoeditor/lvpp/PreviewPlayer.cpp
@@ -0,0 +1,1670 @@
+/*
+ * Copyright (C) 2011 NXP Software
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 1
+#define LOG_TAG "PreviewPlayer"
+#include <utils/Log.h>
+
+#include <dlfcn.h>
+
+#include "include/ARTSPController.h"
+#include "PreviewPlayer.h"
+#include "DummyAudioSource.h"
+#include "DummyVideoSource.h"
+#include "VideoEditorSRC.h"
+#include "include/LiveSession.h"
+#include "include/NuCachedSource2.h"
+#include "include/ThrottledSource.h"
+
+
+#include "PreviewRenderer.h"
+
+#include <binder/IPCThreadState.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/FileSource.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaExtractor.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/OMXCodec.h>
+
+#include <surfaceflinger/Surface.h>
+#include <media/stagefright/foundation/ALooper.h>
+
+namespace android {
+
+
+struct PreviewPlayerEvent : public TimedEventQueue::Event {
+    PreviewPlayerEvent(
+            PreviewPlayer *player,
+            void (PreviewPlayer::*method)())
+        : mPlayer(player),
+          mMethod(method) {
+    }
+
+protected:
+    virtual ~PreviewPlayerEvent() {}
+
+    virtual void fire(TimedEventQueue *queue, int64_t /* now_us */) {
+        (mPlayer->*mMethod)();
+    }
+
+private:
+    PreviewPlayer *mPlayer;
+    void (PreviewPlayer::*mMethod)();
+
+    PreviewPlayerEvent(const PreviewPlayerEvent &);
+    PreviewPlayerEvent &operator=(const PreviewPlayerEvent &);
+};
+
+
+struct PreviewLocalRenderer : public PreviewPlayerRenderer {
+    PreviewLocalRenderer(
+            bool previewOnly,
+            OMX_COLOR_FORMATTYPE colorFormat,
+            const sp<Surface> &surface,
+            size_t displayWidth, size_t displayHeight,
+            size_t decodedWidth, size_t decodedHeight,
+            int32_t rotationDegrees = 0)
+        : mTarget(NULL) {
+            init(previewOnly,
+                 colorFormat, surface,
+                 displayWidth, displayHeight,
+                 decodedWidth, decodedHeight,
+                 rotationDegrees);
+    }
+
+    virtual void render(MediaBuffer *buffer) {
+        render((const uint8_t *)buffer->data() + buffer->range_offset(),
+               buffer->range_length());
+    }
+
+    void render(const void *data, size_t size) {
+        mTarget->render(data, size, NULL);
+    }
+    void render() {
+        mTarget->renderYV12();
+    }
+    void getBuffer(uint8_t **data, size_t *stride) {
+        mTarget->getBufferYV12(data, stride);
+    }
+
+protected:
+    virtual ~PreviewLocalRenderer() {
+        delete mTarget;
+        mTarget = NULL;
+    }
+
+private:
+    PreviewRenderer *mTarget;
+
+    void init(
+            bool previewOnly,
+            OMX_COLOR_FORMATTYPE colorFormat,
+            const sp<Surface> &surface,
+            size_t displayWidth, size_t displayHeight,
+            size_t decodedWidth, size_t decodedHeight,
+            int32_t rotationDegrees = 0);
+
+    PreviewLocalRenderer(const PreviewLocalRenderer &);
+    PreviewLocalRenderer &operator=(const PreviewLocalRenderer &);;
+};
+
+void PreviewLocalRenderer::init(
+        bool previewOnly,
+        OMX_COLOR_FORMATTYPE colorFormat,
+        const sp<Surface> &surface,
+        size_t displayWidth, size_t displayHeight,
+        size_t decodedWidth, size_t decodedHeight,
+        int32_t rotationDegrees) {
+    mTarget = new PreviewRenderer(
+            colorFormat, surface, displayWidth, displayHeight,
+            decodedWidth, decodedHeight, rotationDegrees);
+}
+
+PreviewPlayer::PreviewPlayer()
+    : AwesomePlayer(),
+      mFrameRGBBuffer(NULL),
+      mFrameYUVBuffer(NULL) {
+
+    mVideoRenderer = NULL;
+    mLastVideoBuffer = NULL;
+    mSuspensionState = NULL;
+    mEffectsSettings = NULL;
+    mAudioMixStoryBoardTS = 0;
+    mCurrentMediaBeginCutTime = 0;
+    mCurrentMediaVolumeValue = 0;
+    mNumberEffects = 0;
+    mDecodedVideoTs = 0;
+    mDecVideoTsStoryBoard = 0;
+    mCurrentVideoEffect = VIDEO_EFFECT_NONE;
+    mProgressCbInterval = 0;
+    mNumberDecVideoFrames = 0;
+
+    mVideoEvent = new PreviewPlayerEvent(this, &PreviewPlayer::onVideoEvent);
+    mVideoEventPending = false;
+    mStreamDoneEvent = new PreviewPlayerEvent(this,
+     &AwesomePlayer::onStreamDone);
+
+    mStreamDoneEventPending = false;
+
+    mCheckAudioStatusEvent = new PreviewPlayerEvent(
+            this, &AwesomePlayer::onCheckAudioStatus);
+
+    mAudioStatusEventPending = false;
+
+    mProgressCbEvent = new PreviewPlayerEvent(this,
+     &PreviewPlayer::onProgressCbEvent);
+
+    mProgressCbEventPending = false;
+    mResizedVideoBuffer = NULL;
+    mVideoResizedOrCropped = false;
+    mRenderingMode = (M4xVSS_MediaRendering)MEDIA_RENDERING_INVALID;
+    mIsFiftiesEffectStarted = false;
+    reset();
+}
+
+PreviewPlayer::~PreviewPlayer() {
+
+    if (mQueueStarted) {
+        mQueue.stop();
+    }
+
+    reset();
+
+    if(mResizedVideoBuffer != NULL) {
+        M4OSA_free((M4OSA_MemAddr32)(mResizedVideoBuffer->data()));
+        mResizedVideoBuffer = NULL;
+    }
+
+    mVideoRenderer.clear();
+    mVideoRenderer = NULL;
+}
+
+void PreviewPlayer::cancelPlayerEvents(bool keepBufferingGoing) {
+    mQueue.cancelEvent(mVideoEvent->eventID());
+    mVideoEventPending = false;
+    mQueue.cancelEvent(mStreamDoneEvent->eventID());
+    mStreamDoneEventPending = false;
+    mQueue.cancelEvent(mCheckAudioStatusEvent->eventID());
+    mAudioStatusEventPending = false;
+
+    mQueue.cancelEvent(mProgressCbEvent->eventID());
+    mProgressCbEventPending = false;
+}
+
+status_t PreviewPlayer::setDataSource(
+        const char *uri, const KeyedVector<String8, String8> *headers) {
+    Mutex::Autolock autoLock(mLock);
+    return setDataSource_l(uri, headers);
+}
+
+status_t PreviewPlayer::setDataSource_l(
+        const char *uri, const KeyedVector<String8, String8> *headers) {
+    reset_l();
+
+    mUri = uri;
+
+    if (headers) {
+        mUriHeaders = *headers;
+    }
+
+    // The actual work will be done during preparation in the call to
+    // ::finishSetDataSource_l to avoid blocking the calling thread in
+    // setDataSource for any significant time.
+    return OK;
+}
+
+status_t PreviewPlayer::setDataSource_l(const sp<MediaExtractor> &extractor) {
+    bool haveAudio = false;
+    bool haveVideo = false;
+    for (size_t i = 0; i < extractor->countTracks(); ++i) {
+        sp<MetaData> meta = extractor->getTrackMetaData(i);
+
+        const char *mime;
+        CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+        if (!haveVideo && !strncasecmp(mime, "video/", 6)) {
+            setVideoSource(extractor->getTrack(i));
+            haveVideo = true;
+        } else if (!haveAudio && !strncasecmp(mime, "audio/", 6)) {
+            setAudioSource(extractor->getTrack(i));
+            haveAudio = true;
+
+            if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) {
+                // Only do this for vorbis audio, none of the other audio
+                // formats even support this ringtone specific hack and
+                // retrieving the metadata on some extractors may turn out
+                // to be very expensive.
+                sp<MetaData> fileMeta = extractor->getMetaData();
+                int32_t loop;
+                if (fileMeta != NULL
+                        && fileMeta->findInt32(kKeyAutoLoop, &loop)
+                         && loop != 0) {
+                    mFlags |= AUTO_LOOPING;
+                }
+            }
+        }
+
+        if (haveAudio && haveVideo) {
+            break;
+        }
+    }
+
+    /* Add the support for Dummy audio*/
+    if( !haveAudio ){
+        LOGV("PreviewPlayer: setDataSource_l Dummyaudiocreation started");
+
+        mAudioTrack = DummyAudioSource::Create(32000, 2, 20000,
+                                              ((mPlayEndTimeMsec)*1000));
+        LOGV("PreviewPlayer: setDataSource_l Dummyauiosource created");
+        if(mAudioTrack != NULL) {
+            haveAudio = true;
+        }
+    }
+
+    if (!haveAudio && !haveVideo) {
+        return UNKNOWN_ERROR;
+    }
+
+    mExtractorFlags = extractor->flags();
+    return OK;
+}
+
+status_t PreviewPlayer::setDataSource_l_jpg() {
+    M4OSA_ERR err = M4NO_ERROR;
+    LOGV("PreviewPlayer: setDataSource_l_jpg started");
+
+    mAudioSource = DummyAudioSource::Create(32000, 2, 20000,
+                                          ((mPlayEndTimeMsec)*1000));
+    LOGV("PreviewPlayer: setDataSource_l_jpg Dummyaudiosource created");
+    if(mAudioSource != NULL) {
+        setAudioSource(mAudioSource);
+    }
+    status_t error = mAudioSource->start();
+    if (error != OK) {
+        LOGV("Error starting dummy audio source");
+        mAudioSource.clear();
+        return err;
+    }
+
+    mDurationUs = (mPlayEndTimeMsec - mPlayBeginTimeMsec)*1000;
+
+    mVideoSource = DummyVideoSource::Create(mVideoWidth, mVideoHeight,
+                                            mDurationUs, mUri);
+    setVideoSource(mVideoSource);
+    status_t err1 = mVideoSource->start();
+    if (err1 != OK) {
+        mVideoSource.clear();
+        return err;
+    }
+
+    mIsVideoSourceJpg = true;
+    return OK;
+}
+
+void PreviewPlayer::reset() {
+    Mutex::Autolock autoLock(mLock);
+    reset_l();
+}
+
+void PreviewPlayer::reset_l() {
+
+    if (mFlags & PREPARING) {
+        mFlags |= PREPARE_CANCELLED;
+    }
+
+    while (mFlags & PREPARING) {
+        mPreparedCondition.wait(mLock);
+    }
+
+    cancelPlayerEvents();
+    mAudioTrack.clear();
+    mVideoTrack.clear();
+
+    // Shutdown audio first, so that the respone to the reset request
+    // appears to happen instantaneously as far as the user is concerned
+    // If we did this later, audio would continue playing while we
+    // shutdown the video-related resources and the player appear to
+    // not be as responsive to a reset request.
+    if (mAudioPlayer == NULL && mAudioSource != NULL) {
+        // If we had an audio player, it would have effectively
+        // taken possession of the audio source and stopped it when
+        // _it_ is stopped. Otherwise this is still our responsibility.
+        mAudioSource->stop();
+    }
+    mAudioSource.clear();
+
+    mTimeSource = NULL;
+
+    delete mAudioPlayer;
+    mAudioPlayer = NULL;
+
+    if (mLastVideoBuffer) {
+        mLastVideoBuffer->release();
+        mLastVideoBuffer = NULL;
+    }
+
+    if (mVideoBuffer) {
+        mVideoBuffer->release();
+        mVideoBuffer = NULL;
+    }
+
+    if (mVideoSource != NULL) {
+        mVideoSource->stop();
+
+        // The following hack is necessary to ensure that the OMX
+        // component is completely released by the time we may try
+        // to instantiate it again.
+        wp<MediaSource> tmp = mVideoSource;
+        mVideoSource.clear();
+        while (tmp.promote() != NULL) {
+            usleep(1000);
+        }
+        IPCThreadState::self()->flushCommands();
+    }
+
+    mDurationUs = -1;
+    mFlags = 0;
+    mExtractorFlags = 0;
+    mVideoWidth = mVideoHeight = -1;
+    mTimeSourceDeltaUs = 0;
+    mVideoTimeUs = 0;
+
+    mSeeking = false;
+    mSeekNotificationSent = false;
+    mSeekTimeUs = 0;
+
+    mUri.setTo("");
+    mUriHeaders.clear();
+
+    mFileSource.clear();
+
+    delete mSuspensionState;
+    mSuspensionState = NULL;
+
+    mCurrentVideoEffect = VIDEO_EFFECT_NONE;
+    mIsVideoSourceJpg = false;
+    mFrameRGBBuffer = NULL;
+    if(mFrameYUVBuffer != NULL) {
+        M4OSA_free((M4OSA_MemAddr32)mFrameYUVBuffer);
+        mFrameYUVBuffer = NULL;
+    }
+}
+
+void PreviewPlayer::partial_reset_l() {
+
+    if (mLastVideoBuffer) {
+        mLastVideoBuffer->release();
+        mLastVideoBuffer = NULL;
+    }
+
+    /* call base struct */
+    AwesomePlayer::partial_reset_l();
+
+}
+
+status_t PreviewPlayer::play() {
+    Mutex::Autolock autoLock(mLock);
+
+    mFlags &= ~CACHE_UNDERRUN;
+
+    return play_l();
+}
+
+status_t PreviewPlayer::play_l() {
+VideoEditorAudioPlayer  *mVePlayer;
+    if (mFlags & PLAYING) {
+        return OK;
+    }
+    mStartNextPlayer = false;
+
+    if (!(mFlags & PREPARED)) {
+        status_t err = prepare_l();
+
+        if (err != OK) {
+            return err;
+        }
+    }
+
+    mFlags |= PLAYING;
+    mFlags |= FIRST_FRAME;
+
+    bool deferredAudioSeek = false;
+
+    if (mAudioSource != NULL) {
+        if (mAudioPlayer == NULL) {
+            if (mAudioSink != NULL) {
+
+                mAudioPlayer = new VideoEditorAudioPlayer(mAudioSink, this);
+                mVePlayer =
+                          (VideoEditorAudioPlayer*)mAudioPlayer;
+
+                mAudioPlayer->setSource(mAudioSource);
+
+                mVePlayer->setAudioMixSettings(
+                 mPreviewPlayerAudioMixSettings);
+
+                mVePlayer->setAudioMixPCMFileHandle(
+                 mAudioMixPCMFileHandle);
+
+                mVePlayer->setAudioMixStoryBoardSkimTimeStamp(
+                 mAudioMixStoryBoardTS, mCurrentMediaBeginCutTime,
+                 mCurrentMediaVolumeValue);
+
+                // We've already started the MediaSource in order to enable
+                // the prefetcher to read its data.
+                status_t err = mVePlayer->start(
+                        true /* sourceAlreadyStarted */);
+
+                if (err != OK) {
+                    delete mAudioPlayer;
+                    mAudioPlayer = NULL;
+
+                    mFlags &= ~(PLAYING | FIRST_FRAME);
+                    return err;
+                }
+
+                mTimeSource = mVePlayer; //mAudioPlayer;
+
+                deferredAudioSeek = true;
+                mWatchForAudioSeekComplete = false;
+                mWatchForAudioEOS = true;
+            }
+        } else {
+            mVePlayer->resume();
+        }
+
+    }
+
+    if (mTimeSource == NULL && mAudioPlayer == NULL) {
+        mTimeSource = &mSystemTimeSource;
+    }
+
+    if (mVideoSource != NULL) {
+        // Kick off video playback
+        postVideoEvent_l();
+    }
+
+    if (deferredAudioSeek) {
+        // If there was a seek request while we were paused
+        // and we're just starting up again, honor the request now.
+        seekAudioIfNecessary_l();
+    }
+
+    if (mFlags & AT_EOS) {
+        // Legacy behaviour, if a stream finishes playing and then
+        // is started again, we play from the start...
+        seekTo_l(0);
+    }
+
+    return OK;
+}
+
+
+void PreviewPlayer::initRenderer_l() {
+    if (mSurface != NULL || mISurface != NULL) {
+        sp<MetaData> meta = mVideoSource->getFormat();
+
+        int32_t format;
+        const char *component;
+        int32_t decodedWidth, decodedHeight;
+        CHECK(meta->findInt32(kKeyColorFormat, &format));
+        CHECK(meta->findCString(kKeyDecoderComponent, &component));
+        CHECK(meta->findInt32(kKeyWidth, &decodedWidth));
+        CHECK(meta->findInt32(kKeyHeight, &decodedHeight));
+
+        // Must ensure that mVideoRenderer's destructor is actually executed
+        // before creating a new one.
+        IPCThreadState::self()->flushCommands();
+
+        // always use localrenderer since decoded buffers are modified
+        // by postprocessing module
+        // Other decoders are instantiated locally and as a consequence
+        // allocate their buffers in local address space.
+        if(mVideoRenderer == NULL) {
+
+            mVideoRenderer = new PreviewLocalRenderer(
+                false,  // previewOnly
+                (OMX_COLOR_FORMATTYPE)format,
+                mSurface,
+                mOutputVideoWidth, mOutputVideoHeight,
+                mOutputVideoWidth, mOutputVideoHeight);
+        }
+    }
+}
+
+
+void PreviewPlayer::setISurface(const sp<ISurface> &isurface) {
+    Mutex::Autolock autoLock(mLock);
+    mISurface = isurface;
+}
+
+
+status_t PreviewPlayer::seekTo(int64_t timeUs) {
+
+    if ((mExtractorFlags & MediaExtractor::CAN_SEEK) || (mIsVideoSourceJpg)) {
+        Mutex::Autolock autoLock(mLock);
+        return seekTo_l(timeUs);
+    }
+
+    return OK;
+}
+
+
+status_t PreviewPlayer::getVideoDimensions(
+        int32_t *width, int32_t *height) const {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mVideoWidth < 0 || mVideoHeight < 0) {
+        return UNKNOWN_ERROR;
+    }
+
+    *width = mVideoWidth;
+    *height = mVideoHeight;
+
+    return OK;
+}
+
+
+status_t PreviewPlayer::initAudioDecoder() {
+    sp<MetaData> meta = mAudioTrack->getFormat();
+    const char *mime;
+    CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+    if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) {
+        mAudioSource = mAudioTrack;
+    } else {
+        sp<MediaSource> aRawSource;
+        aRawSource = OMXCodec::Create(
+                mClient.interface(), mAudioTrack->getFormat(),
+                false, // createEncoder
+                mAudioTrack);
+
+        if(aRawSource != NULL) {
+            LOGV("initAudioDecoder: new VideoEditorSRC");
+            mAudioSource = new VideoEditorSRC(aRawSource);
+        }
+    }
+
+    if (mAudioSource != NULL) {
+        int64_t durationUs;
+        if (mAudioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
+            Mutex::Autolock autoLock(mMiscStateLock);
+            if (mDurationUs < 0 || durationUs > mDurationUs) {
+                mDurationUs = durationUs;
+            }
+        }
+        status_t err = mAudioSource->start();
+
+        if (err != OK) {
+            mAudioSource.clear();
+            return err;
+        }
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_QCELP)) {
+        // For legacy reasons we're simply going to ignore the absence
+        // of an audio decoder for QCELP instead of aborting playback
+        // altogether.
+        return OK;
+    }
+
+    return mAudioSource != NULL ? OK : UNKNOWN_ERROR;
+}
+
+
+status_t PreviewPlayer::initVideoDecoder(uint32_t flags) {
+
+    mVideoSource = OMXCodec::Create(
+            mClient.interface(), mVideoTrack->getFormat(),
+            false,
+            mVideoTrack,
+            NULL, flags);
+
+    if (mVideoSource != NULL) {
+        int64_t durationUs;
+        if (mVideoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
+            Mutex::Autolock autoLock(mMiscStateLock);
+            if (mDurationUs < 0 || durationUs > mDurationUs) {
+                mDurationUs = durationUs;
+            }
+        }
+
+        CHECK(mVideoTrack->getFormat()->findInt32(kKeyWidth, &mVideoWidth));
+        CHECK(mVideoTrack->getFormat()->findInt32(kKeyHeight, &mVideoHeight));
+
+        status_t err = mVideoSource->start();
+
+        if (err != OK) {
+            mVideoSource.clear();
+            return err;
+        }
+    }
+
+    return mVideoSource != NULL ? OK : UNKNOWN_ERROR;
+}
+
+
+void PreviewPlayer::onVideoEvent() {
+    uint32_t i=0;
+    bool bAppliedVideoEffect = false;
+    M4OSA_ERR err1 = M4NO_ERROR;
+    int64_t imageFrameTimeUs = 0;
+
+    Mutex::Autolock autoLock(mLock);
+    if (!mVideoEventPending) {
+        // The event has been cancelled in reset_l() but had already
+        // been scheduled for execution at that time.
+        return;
+    }
+    mVideoEventPending = false;
+
+    TimeSource *ts_st =  &mSystemTimeSource;
+    int64_t timeStartUs = ts_st->getRealTimeUs();
+
+    if (mSeeking) {
+        if (mLastVideoBuffer) {
+            mLastVideoBuffer->release();
+            mLastVideoBuffer = NULL;
+        }
+
+
+        if(mAudioSource != NULL) {
+
+            // We're going to seek the video source first, followed by
+            // the audio source.
+            // In order to avoid jumps in the DataSource offset caused by
+            // the audio codec prefetching data from the old locations
+            // while the video codec is already reading data from the new
+            // locations, we'll "pause" the audio source, causing it to
+            // stop reading input data until a subsequent seek.
+
+            if (mAudioPlayer != NULL) {
+                mAudioPlayer->pause();
+            }
+            mAudioSource->pause();
+        }
+    }
+
+    if (!mVideoBuffer) {
+        MediaSource::ReadOptions options;
+        if (mSeeking) {
+            LOGV("LV PLAYER seeking to %lld us (%.2f secs)", mSeekTimeUs,
+                                                      mSeekTimeUs / 1E6);
+
+            options.setSeekTo(
+                    mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST);
+        }
+        for (;;) {
+            status_t err = mVideoSource->read(&mVideoBuffer, &options);
+            options.clearSeekTo();
+
+            if (err != OK) {
+                CHECK_EQ(mVideoBuffer, NULL);
+
+                if (err == INFO_FORMAT_CHANGED) {
+                    LOGV("LV PLAYER VideoSource signalled format change");
+                    notifyVideoSize_l();
+
+                    if (mVideoRenderer != NULL) {
+                        mVideoRendererIsPreview = false;
+                        initRenderer_l();
+                    }
+                    continue;
+                }
+                // So video playback is complete, but we may still have
+                // a seek request pending that needs to be applied
                // to the audio track.
                if (mSeeking) {
                    LOGV("video stream ended while seeking!");
                }
                finishSeekIfNecessary(-1);

+                LOGV("PreviewPlayer: onVideoEvent EOS reached.");
+                mFlags |= VIDEO_AT_EOS;
+                postStreamDoneEvent_l(err);
+                return;
+            }
+
+            if (mVideoBuffer->range_length() == 0) {
+                // Some decoders, notably the PV AVC software decoder
+                // return spurious empty buffers that we just want to ignore.
+
+                mVideoBuffer->release();
+                mVideoBuffer = NULL;
+                continue;
+            }
+
+            int64_t videoTimeUs;
+            CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &videoTimeUs));
+
+            if((videoTimeUs/1000) < mPlayBeginTimeMsec) {
+                // Frames are before begin cut time
+                // Donot render
+                mVideoBuffer->release();
+                mVideoBuffer = NULL;
+                continue;
+            }
+
+            break;
+        }
+    }
+
+    mNumberDecVideoFrames++;
+
+    int64_t timeUs;
+    CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
+
+    {
+        Mutex::Autolock autoLock(mMiscStateLock);
+        mVideoTimeUs = timeUs;
+    }
+
+    mDecodedVideoTs = timeUs;
+
+    if(!mStartNextPlayer) {
+        int64_t playbackTimeRemaining = (mPlayEndTimeMsec*1000) - timeUs;
+        if(playbackTimeRemaining <= 1500000) {
+            //When less than 1.5 sec of playback left
+            // send notification to start next player
+
+            mStartNextPlayer = true;
+            notifyListener_l(0xAAAAAAAA);
+        }
+    }
+
+    bool wasSeeking = mSeeking;
+    finishSeekIfNecessary(timeUs);
+
+    TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource;
+
+    if(ts == NULL) {
+        mVideoBuffer->release();
+        mVideoBuffer = NULL;
+        return;
+    }
+
+    if(!mIsVideoSourceJpg) {
+        if (mFlags & FIRST_FRAME) {
+            mFlags &= ~FIRST_FRAME;
+
+            mTimeSourceDeltaUs = ts->getRealTimeUs() - timeUs;
+        }
+
+        int64_t realTimeUs, mediaTimeUs;
+        if (!(mFlags & AUDIO_AT_EOS) && mAudioPlayer != NULL
+            && mAudioPlayer->getMediaTimeMapping(&realTimeUs, &mediaTimeUs)) {
+            mTimeSourceDeltaUs = realTimeUs - mediaTimeUs;
+        }
+
+        int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs;
+
+        int64_t latenessUs = nowUs - timeUs;
+
+        if (wasSeeking) {
+            // Let's display the first frame after seeking right away.
            latenessUs = 0;
        }

+        LOGV("Audio time stamp = %lld and video time stamp = %lld",
+                                            ts->getRealTimeUs(),timeUs);
+        if (latenessUs > 40000) {
+            // We're more than 40ms late.
+
+            LOGV("LV PLAYER we're late by %lld us (%.2f secs)",
+                                           latenessUs, latenessUs / 1E6);
+
+            mVideoBuffer->release();
+            mVideoBuffer = NULL;
+
+            postVideoEvent_l();
+            return;
+        }
+
+        if (latenessUs < -10000) {
+            // We're more than 10ms early.
+            LOGV("We're more than 10ms early, lateness %lld", latenessUs);
+
+            postVideoEvent_l(10000);
+            return;
+        }
+    }
+
+    if (mVideoRendererIsPreview || mVideoRenderer == NULL) {
+        mVideoRendererIsPreview = false;
+
+        initRenderer_l();
+    }
+
+    // If timestamp exceeds endCutTime of clip, donot render
+    if((timeUs/1000) > mPlayEndTimeMsec) {
+        if (mLastVideoBuffer) {
+            mLastVideoBuffer->release();
+            mLastVideoBuffer = NULL;
+        }
+        mLastVideoBuffer = mVideoBuffer;
+        mVideoBuffer = NULL;
+        mFlags |= VIDEO_AT_EOS;
+        mFlags |= AUDIO_AT_EOS;
+        LOGI("PreviewPlayer: onVideoEvent timeUs > mPlayEndTime; send EOS..");
+        postStreamDoneEvent_l(ERROR_END_OF_STREAM);
+        return;
+    }
+
+    // Post processing to apply video effects
+    for(i=0;i<mNumberEffects;i++) {
+        // First check if effect starttime matches the clip being previewed
+        if((mEffectsSettings[i].uiStartTime < (mDecVideoTsStoryBoard/1000)) ||
+        (mEffectsSettings[i].uiStartTime >=
+         ((mDecVideoTsStoryBoard/1000) + mPlayEndTimeMsec - mPlayBeginTimeMsec)))
+        {
+            // This effect doesn't belong to this clip, check next one
+            continue;
+        }
+        // Check if effect applies to this particular frame timestamp
+        if((mEffectsSettings[i].uiStartTime <=
+         (((timeUs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec)) &&
+            ((mEffectsSettings[i].uiStartTime+mEffectsSettings[i].uiDuration) >=
+             (((timeUs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec))
+              && (mEffectsSettings[i].uiDuration != 0)) {
+
+            setVideoPostProcessingNode(
+             mEffectsSettings[i].VideoEffectType, TRUE);
+        }
+        else {
+            setVideoPostProcessingNode(
+             mEffectsSettings[i].VideoEffectType, FALSE);
+        }
+
+    }
+
+    if(mCurrentVideoEffect != VIDEO_EFFECT_NONE) {
+        err1 = doVideoPostProcessing();
+        if(err1 != M4NO_ERROR) {
+            LOGE("doVideoPostProcessing returned err");
+            bAppliedVideoEffect = false;
+        }
+        else {
+            bAppliedVideoEffect = true;
+        }
+    }
+    else {
+        bAppliedVideoEffect = false;
+        if(mRenderingMode != MEDIA_RENDERING_INVALID) {
+            // No effects to be applied, but media rendering to be done
+            err1 = doMediaRendering();
+            if(err1 != M4NO_ERROR) {
+                LOGE("doMediaRendering returned err");
+                //Use original mVideoBuffer for rendering
+                mVideoResizedOrCropped = false;
+            }
+        }
+    }
+
+    if (mVideoRenderer != NULL) {
+        LOGV("mVideoRenderer CALL render()");
+        mVideoRenderer->render();
+    }
+
+    if (mLastVideoBuffer) {
+        mLastVideoBuffer->release();
+        mLastVideoBuffer = NULL;
+    }
+
+    mLastVideoBuffer = mVideoBuffer;
+    mVideoBuffer = NULL;
+
+    // Post progress callback based on callback interval set
+    if(mNumberDecVideoFrames >= mProgressCbInterval) {
+        postProgressCallbackEvent_l();
+        mNumberDecVideoFrames = 0;  // reset counter
+    }
+
+    // if reached EndCutTime of clip, post EOS event
+    if((timeUs/1000) >= mPlayEndTimeMsec) {
+        LOGV("PreviewPlayer: onVideoEvent EOS.");
+        mFlags |= VIDEO_AT_EOS;
+        mFlags |= AUDIO_AT_EOS;
+        postStreamDoneEvent_l(ERROR_END_OF_STREAM);
+    }
+    else {
+        if(!mIsVideoSourceJpg) {
+            postVideoEvent_l();
+        }
+        else {
+            postVideoEvent_l(33000);
+        }
+    }
+}
+
+status_t PreviewPlayer::prepare() {
+    Mutex::Autolock autoLock(mLock);
+    return prepare_l();
+}
+
+status_t PreviewPlayer::prepare_l() {
+    if (mFlags & PREPARED) {
+        return OK;
+    }
+
+    if (mFlags & PREPARING) {
+        return UNKNOWN_ERROR;
+    }
+
+    mIsAsyncPrepare = false;
+    status_t err = prepareAsync_l();
+
+    if (err != OK) {
+        return err;
+    }
+
+    while (mFlags & PREPARING) {
+        mPreparedCondition.wait(mLock);
+    }
+
+    return mPrepareResult;
+}
+
+status_t PreviewPlayer::prepareAsync_l() {
+    if (mFlags & PREPARING) {
+        return UNKNOWN_ERROR;  // async prepare already pending
+    }
+
+    if (!mQueueStarted) {
+        mQueue.start();
+        mQueueStarted = true;
+    }
+
+    mFlags |= PREPARING;
+    mAsyncPrepareEvent = new PreviewPlayerEvent(
+            this, &PreviewPlayer::onPrepareAsyncEvent);
+
+    mQueue.postEvent(mAsyncPrepareEvent);
+
+    return OK;
+}
+
+status_t PreviewPlayer::finishSetDataSource_l() {
+    sp<DataSource> dataSource;
+    sp<MediaExtractor> extractor;
+
+    dataSource = DataSource::CreateFromURI(mUri.string(), &mUriHeaders);
+
+    if (dataSource == NULL) {
+        return UNKNOWN_ERROR;
+    }
+
+    //If file type is .rgb, then no need to check for Extractor
+    int uriLen = strlen(mUri);
+    int startOffset = uriLen - 4;
+    if(!strncasecmp(mUri+startOffset, ".rgb", 4)) {
+        extractor = NULL;
+    }
+    else {
+        extractor = MediaExtractor::Create(dataSource,
+                                        MEDIA_MIMETYPE_CONTAINER_MPEG4);
+    }
+
+    if (extractor == NULL) {
+        LOGV("PreviewPlayer::finishSetDataSource_l  extractor == NULL");
+        return setDataSource_l_jpg();
+    }
+
+    return setDataSource_l(extractor);
+}
+
+
+// static
+bool PreviewPlayer::ContinuePreparation(void *cookie) {
+    PreviewPlayer *me = static_cast<PreviewPlayer *>(cookie);
+
+    return (me->mFlags & PREPARE_CANCELLED) == 0;
+}
+
+void PreviewPlayer::onPrepareAsyncEvent() {
+    Mutex::Autolock autoLock(mLock);
+    LOGV("onPrepareAsyncEvent");
+
+    if (mFlags & PREPARE_CANCELLED) {
+        LOGI("LV PLAYER prepare was cancelled before doing anything");
+        abortPrepare(UNKNOWN_ERROR);
+        return;
+    }
+
+    if (mUri.size() > 0) {
+        status_t err = finishSetDataSource_l();
+
+        if (err != OK) {
+            abortPrepare(err);
+            return;
+        }
+    }
+
+    if (mVideoTrack != NULL && mVideoSource == NULL) {
+        status_t err = initVideoDecoder(OMXCodec::kHardwareCodecsOnly);
+
+        if (err != OK) {
+            abortPrepare(err);
+            return;
+        }
+    }
+
+    if (mAudioTrack != NULL && mAudioSource == NULL) {
+        status_t err = initAudioDecoder();
+
+        if (err != OK) {
+            abortPrepare(err);
+            return;
+        }
+    }
+    finishAsyncPrepare_l();
+
+}
+
+void PreviewPlayer::finishAsyncPrepare_l() {
+    if (mIsAsyncPrepare) {
+        if (mVideoSource == NULL) {
+            LOGV("finishAsyncPrepare_l: MEDIA_SET_VIDEO_SIZE 0 0 ");
+            notifyListener_l(MEDIA_SET_VIDEO_SIZE, 0, 0);
+        } else {
+            LOGV("finishAsyncPrepare_l: MEDIA_SET_VIDEO_SIZE");
+            notifyVideoSize_l();
+        }
+        LOGV("finishAsyncPrepare_l: MEDIA_PREPARED");
+        notifyListener_l(MEDIA_PREPARED);
+    }
+
+    mPrepareResult = OK;
+    mFlags &= ~(PREPARING|PREPARE_CANCELLED);
+    mFlags |= PREPARED;
+    mAsyncPrepareEvent = NULL;
+    mPreparedCondition.broadcast();
+}
+
+status_t PreviewPlayer::suspend() {
+    LOGV("suspend");
+    Mutex::Autolock autoLock(mLock);
+
+    if (mSuspensionState != NULL) {
+        if (mLastVideoBuffer == NULL) {
+            //go into here if video is suspended again
+            //after resuming without being played between
+            //them
+            SuspensionState *state = mSuspensionState;
+            mSuspensionState = NULL;
+            reset_l();
+            mSuspensionState = state;
+            return OK;
+        }
+
+        delete mSuspensionState;
+        mSuspensionState = NULL;
+    }
+
+    if (mFlags & PREPARING) {
+        mFlags |= PREPARE_CANCELLED;
+    }
+
+    while (mFlags & PREPARING) {
+        mPreparedCondition.wait(mLock);
+    }
+
+    SuspensionState *state = new SuspensionState;
+    state->mUri = mUri;
+    state->mUriHeaders = mUriHeaders;
+    state->mFileSource = mFileSource;
+
+    state->mFlags = mFlags & (PLAYING | AUTO_LOOPING | LOOPING | AT_EOS);
+    getPosition(&state->mPositionUs);
+
+    if (mLastVideoBuffer) {
+        size_t size = mLastVideoBuffer->range_length();
+        if (size) {
+            int32_t unreadable;
+            if (!mLastVideoBuffer->meta_data()->findInt32(
+                        kKeyIsUnreadable, &unreadable)
+                    || unreadable == 0) {
+                state->mLastVideoFrameSize = size;
+                state->mLastVideoFrame = malloc(size);
+                memcpy(state->mLastVideoFrame,
+                   (const uint8_t *)mLastVideoBuffer->data()
+                        + mLastVideoBuffer->range_offset(),
+                   size);
+
+                state->mVideoWidth = mVideoWidth;
+                state->mVideoHeight = mVideoHeight;
+
+                sp<MetaData> meta = mVideoSource->getFormat();
+                CHECK(meta->findInt32(kKeyColorFormat, &state->mColorFormat));
+                CHECK(meta->findInt32(kKeyWidth, &state->mDecodedWidth));
+                CHECK(meta->findInt32(kKeyHeight, &state->mDecodedHeight));
+            } else {
+                LOGV("Unable to save last video frame, we have no access to "
+                     "the decoded video data.");
+            }
+        }
+    }
+
+    reset_l();
+
+    mSuspensionState = state;
+
+    return OK;
+}
+
+status_t PreviewPlayer::resume() {
+    LOGV("resume");
+    Mutex::Autolock autoLock(mLock);
+
+    if (mSuspensionState == NULL) {
+        return INVALID_OPERATION;
+    }
+
+    SuspensionState *state = mSuspensionState;
+    mSuspensionState = NULL;
+
+    status_t err;
+    if (state->mFileSource != NULL) {
+        err = AwesomePlayer::setDataSource_l(state->mFileSource);
+
+        if (err == OK) {
+            mFileSource = state->mFileSource;
+        }
+    } else {
+        err = AwesomePlayer::setDataSource_l(state->mUri, &state->mUriHeaders);
+    }
+
+    if (err != OK) {
+        delete state;
+        state = NULL;
+
+        return err;
+    }
+
+    seekTo_l(state->mPositionUs);
+
+    mFlags = state->mFlags & (AUTO_LOOPING | LOOPING | AT_EOS);
+
+    if (state->mLastVideoFrame && (mSurface != NULL || mISurface != NULL)) {
+        mVideoRenderer =
+            new PreviewLocalRenderer(
+                    true,  // previewOnly
+                    (OMX_COLOR_FORMATTYPE)state->mColorFormat,
+                    mSurface,
+                    state->mVideoWidth,
+                    state->mVideoHeight,
+                    state->mDecodedWidth,
+                    state->mDecodedHeight);
+
+        mVideoRendererIsPreview = true;
+
+        ((PreviewLocalRenderer *)mVideoRenderer.get())->render(
+                state->mLastVideoFrame, state->mLastVideoFrameSize);
+    }
+
+    if (state->mFlags & PLAYING) {
+        play_l();
+    }
+
+    mSuspensionState = state;
+    state = NULL;
+
+    return OK;
+}
+
+
+status_t PreviewPlayer::loadEffectsSettings(
+                    M4VSS3GPP_EffectSettings* pEffectSettings, int nEffects) {
+    M4OSA_UInt32 i = 0, rgbSize = 0;
+    M4VIFI_UInt8 *tmp = M4OSA_NULL;
+
+    mNumberEffects = nEffects;
+    mEffectsSettings = pEffectSettings;
+    return OK;
+}
+
+status_t PreviewPlayer::loadAudioMixSettings(
+                    M4xVSS_AudioMixingSettings* pAudioMixSettings) {
+
+    LOGV("PreviewPlayer: loadAudioMixSettings: ");
+    mPreviewPlayerAudioMixSettings = pAudioMixSettings;
+    return OK;
+}
+
+status_t PreviewPlayer::setAudioMixPCMFileHandle(
+                    M4OSA_Context pAudioMixPCMFileHandle) {
+
+    LOGV("PreviewPlayer: setAudioMixPCMFileHandle: ");
+    mAudioMixPCMFileHandle = pAudioMixPCMFileHandle;
+    return OK;
+}
+
+status_t PreviewPlayer::setAudioMixStoryBoardParam(
+                    M4OSA_UInt32 audioMixStoryBoardTS,
+                    M4OSA_UInt32 currentMediaBeginCutTime,
+                    M4OSA_UInt32 primaryTrackVolValue ) {
+
+    mAudioMixStoryBoardTS = audioMixStoryBoardTS;
+    mCurrentMediaBeginCutTime = currentMediaBeginCutTime;
+    mCurrentMediaVolumeValue = primaryTrackVolValue;
+    return OK;
+}
+
+status_t PreviewPlayer::setPlaybackBeginTime(uint32_t msec) {
+
+    mPlayBeginTimeMsec = msec;
+    return OK;
+}
+
+status_t PreviewPlayer::setPlaybackEndTime(uint32_t msec) {
+
+    mPlayEndTimeMsec = msec;
+    return OK;
+}
+
+status_t PreviewPlayer::setStoryboardStartTime(uint32_t msec) {
+
+    mStoryboardStartTimeMsec = msec;
+    mDecVideoTsStoryBoard = mStoryboardStartTimeMsec*1000;
+    return OK;
+}
+
+status_t PreviewPlayer::setProgressCallbackInterval(uint32_t cbInterval) {
+
+    mProgressCbInterval = cbInterval;
+    return OK;
+}
+
+
+status_t PreviewPlayer::setMediaRenderingMode(
+        M4xVSS_MediaRendering mode,
+        M4VIDEOEDITING_VideoFrameSize outputVideoSize) {
+
+    mRenderingMode = mode;
+
+    /* reset boolean for each clip*/
+    mVideoResizedOrCropped = false;
+
+    switch(outputVideoSize) {
+        case M4VIDEOEDITING_kSQCIF:
+            mOutputVideoWidth = 128;
+            mOutputVideoHeight = 96;
+            break;
+
+        case M4VIDEOEDITING_kQQVGA:
+            mOutputVideoWidth = 160;
+            mOutputVideoHeight = 120;
+            break;
+
+        case M4VIDEOEDITING_kQCIF:
+            mOutputVideoWidth = 176;
+            mOutputVideoHeight = 144;
+            break;
+
+        case M4VIDEOEDITING_kQVGA:
+            mOutputVideoWidth = 320;
+            mOutputVideoHeight = 240;
+            break;
+
+        case M4VIDEOEDITING_kCIF:
+            mOutputVideoWidth = 352;
+            mOutputVideoHeight = 288;
+            break;
+
+        case M4VIDEOEDITING_kVGA:
+            mOutputVideoWidth = 640;
+            mOutputVideoHeight = 480;
+            break;
+
+        case M4VIDEOEDITING_kWVGA:
+            mOutputVideoWidth = 800;
+            mOutputVideoHeight = 480;
+            break;
+
+        case M4VIDEOEDITING_kNTSC:
+            mOutputVideoWidth = 720;
+            mOutputVideoHeight = 480;
+            break;
+
+        case M4VIDEOEDITING_k640_360:
+            mOutputVideoWidth = 640;
+            mOutputVideoHeight = 360;
+            break;
+
+        case M4VIDEOEDITING_k854_480:
+            mOutputVideoWidth = 854;
+            mOutputVideoHeight = 480;
+            break;
+
+        case M4VIDEOEDITING_kHD1280:
+            mOutputVideoWidth = 1280;
+            mOutputVideoHeight = 720;
+            break;
+
+        case M4VIDEOEDITING_kHD1080:
+            mOutputVideoWidth = 1080;
+            mOutputVideoHeight = 720;
+            break;
+
+        case M4VIDEOEDITING_kHD960:
+            mOutputVideoWidth = 960;
+            mOutputVideoHeight = 720;
+            break;
+
+        default:
+            LOGE("unsupported output video size set");
+            return BAD_VALUE;
+    }
+
+    return OK;
+}
+
+M4OSA_ERR PreviewPlayer::doMediaRendering() {
+    M4OSA_ERR err = M4NO_ERROR;
+    M4VIFI_ImagePlane planeIn[3], planeOut[3];
+    M4VIFI_UInt8 *inBuffer = M4OSA_NULL, *finalOutputBuffer = M4OSA_NULL;
+    M4VIFI_UInt8 *tempOutputBuffer= M4OSA_NULL;
+    size_t videoBufferSize = 0;
+    M4OSA_UInt32 frameSize = 0, i=0, index =0, nFrameCount =0, bufferOffset =0;
+    int32_t colorFormat = 0;
+
+    if(!mIsVideoSourceJpg) {
+        sp<MetaData> meta = mVideoSource->getFormat();
+        CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
+    }
+    else {
+        colorFormat = OMX_COLOR_FormatYUV420Planar;
+    }
+
+    videoBufferSize = mVideoBuffer->size();
+    frameSize = (mVideoWidth*mVideoHeight*3) >> 1;
+
+    uint8_t* outBuffer;
+    size_t outBufferStride = 0;
+
+    mVideoRenderer->getBuffer(&outBuffer, &outBufferStride);
+
+    bufferOffset = index*frameSize;
+    inBuffer = (M4OSA_UInt8 *)mVideoBuffer->data()+
+                mVideoBuffer->range_offset()+bufferOffset;
+
+
+    /* In plane*/
+    prepareYUV420ImagePlane(planeIn, mVideoWidth,
+      mVideoHeight, (M4VIFI_UInt8 *)inBuffer);
+
+    // Set the output YUV420 plane to be compatible with YV12 format
+    // W & H even
+    // YVU instead of YUV
+    // align buffers on 32 bits
+
+    //In YV12 format, sizes must be even
+    M4OSA_UInt32 yv12PlaneWidth = ((mOutputVideoWidth +1)>>1)<<1;
+    M4OSA_UInt32 yv12PlaneHeight = ((mOutputVideoHeight+1)>>1)<<1;
+
+    prepareYV12ImagePlane(planeOut, yv12PlaneWidth, yv12PlaneHeight,
+     (M4OSA_UInt32)outBufferStride, (M4VIFI_UInt8 *)outBuffer);
+
+
+    err = applyRenderingMode(planeIn, planeOut, mRenderingMode);
+
+    if(err != M4NO_ERROR)
+    {
+        LOGE("doMediaRendering: applyRenderingMode returned err=0x%x", err);
+        return err;
+    }
+    mVideoResizedOrCropped = true;
+
+    return err;
+}
+
+status_t PreviewPlayer::resetJniCallbackTimeStamp() {
+
+    mDecVideoTsStoryBoard = mStoryboardStartTimeMsec*1000;
+    return OK;
+}
+
+void PreviewPlayer::postProgressCallbackEvent_l() {
+    if (mProgressCbEventPending) {
+        return;
+    }
+    mProgressCbEventPending = true;
+
+    mQueue.postEvent(mProgressCbEvent);
+}
+
+void PreviewPlayer::onProgressCbEvent() {
+    Mutex::Autolock autoLock(mLock);
+    if (!mProgressCbEventPending) {
+        return;
+    }
+    mProgressCbEventPending = false;
+    // If playback starts from previous I-frame,
+    // then send frame storyboard duration
+    if((mDecodedVideoTs/1000) < mPlayBeginTimeMsec) {
+        notifyListener_l(MEDIA_INFO, 0, mDecVideoTsStoryBoard/1000);
+    }
+    else {
+        notifyListener_l(MEDIA_INFO, 0,
+        (((mDecodedVideoTs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec));
+    }
+}
+
+void PreviewPlayer::setVideoPostProcessingNode(
+                    M4VSS3GPP_VideoEffectType type, M4OSA_Bool enable) {
+
+    uint32_t effect = VIDEO_EFFECT_NONE;
+
+    //Map M4VSS3GPP_VideoEffectType to local enum
+    switch(type) {
+        case M4VSS3GPP_kVideoEffectType_FadeFromBlack:
+            effect = VIDEO_EFFECT_FADEFROMBLACK;
+            break;
+
+        case M4VSS3GPP_kVideoEffectType_FadeToBlack:
+            effect = VIDEO_EFFECT_FADETOBLACK;
+            break;
+
+        case M4VSS3GPP_kVideoEffectType_CurtainOpening:
+            effect = VIDEO_EFFECT_CURTAINOPEN;
+            break;
+
+        case M4VSS3GPP_kVideoEffectType_CurtainClosing:
+            effect = VIDEO_EFFECT_CURTAINCLOSE;
+            break;
+
+        case M4xVSS_kVideoEffectType_BlackAndWhite:
+            effect = VIDEO_EFFECT_BLACKANDWHITE;
+            break;
+
+        case M4xVSS_kVideoEffectType_Pink:
+            effect = VIDEO_EFFECT_PINK;
+            break;
+
+        case M4xVSS_kVideoEffectType_Green:
+            effect = VIDEO_EFFECT_GREEN;
+            break;
+
+        case M4xVSS_kVideoEffectType_Sepia:
+            effect = VIDEO_EFFECT_SEPIA;
+            break;
+
+        case M4xVSS_kVideoEffectType_Negative:
+            effect = VIDEO_EFFECT_NEGATIVE;
+            break;
+
+        case M4xVSS_kVideoEffectType_Framing:
+            effect = VIDEO_EFFECT_FRAMING;
+            break;
+
+        case M4xVSS_kVideoEffectType_Fifties:
+            effect = VIDEO_EFFECT_FIFTIES;
+            break;
+
+        case M4xVSS_kVideoEffectType_ColorRGB16:
+            effect = VIDEO_EFFECT_COLOR_RGB16;
+            break;
+
+        case M4xVSS_kVideoEffectType_Gradient:
+            effect = VIDEO_EFFECT_GRADIENT;
+            break;
+
+        default:
+            effect = VIDEO_EFFECT_NONE;
+            break;
+    }
+
+    if(enable == M4OSA_TRUE) {
+        //If already set, then no need to set again
+        if(!(mCurrentVideoEffect & effect)) {
+            mCurrentVideoEffect |= effect;
+            if(effect == VIDEO_EFFECT_FIFTIES) {
+                mIsFiftiesEffectStarted = true;
+            }
+        }
+    }
+    else  {
+        //Reset only if already set
+        if(mCurrentVideoEffect & effect) {
+            mCurrentVideoEffect &= ~effect;
+        }
+    }
+}
+
+status_t PreviewPlayer::setImageClipProperties(uint32_t width,uint32_t height) {
+    mVideoWidth = width;
+    mVideoHeight = height;
+    return OK;
+}
+
+
+M4OSA_ERR PreviewPlayer::doVideoPostProcessing() {
+    M4OSA_ERR err = M4NO_ERROR;
+    vePostProcessParams postProcessParams;
+    int32_t colorFormat = 0;
+
+
+    if(!mIsVideoSourceJpg) {
+        sp<MetaData> meta = mVideoSource->getFormat();
+        CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
+    }
+    else {
+        colorFormat = OMX_COLOR_FormatYUV420Planar;
+    }
+
+    if((colorFormat == OMX_COLOR_FormatYUV420SemiPlanar) ||
+       (colorFormat == 0x7FA30C00)) {
+          LOGE("doVideoPostProcessing: colorFormat YUV420Sp not supported");
+          return M4ERR_UNSUPPORTED_MEDIA_TYPE;
+    }
+
+    postProcessParams.vidBuffer = (M4VIFI_UInt8*)mVideoBuffer->data()
+        + mVideoBuffer->range_offset();
+
+    postProcessParams.videoWidth = mVideoWidth;
+    postProcessParams.videoHeight = mVideoHeight;
+    postProcessParams.timeMs = mDecodedVideoTs/1000;
+    postProcessParams.timeOffset = mDecVideoTsStoryBoard/1000;
+    postProcessParams.effectsSettings = mEffectsSettings;
+    postProcessParams.numberEffects = mNumberEffects;
+    postProcessParams.outVideoWidth = mOutputVideoWidth;
+    postProcessParams.outVideoHeight = mOutputVideoHeight;
+    postProcessParams.currentVideoEffect = mCurrentVideoEffect;
+    postProcessParams.renderingMode = mRenderingMode;
+    if(mIsFiftiesEffectStarted == M4OSA_TRUE) {
+        postProcessParams.isFiftiesEffectStarted = M4OSA_TRUE;
+        mIsFiftiesEffectStarted = M4OSA_FALSE;
+    }
+    else {
+       postProcessParams.isFiftiesEffectStarted = M4OSA_FALSE;
+    }
+
+    postProcessParams.overlayFrameRGBBuffer = mFrameRGBBuffer;
+    postProcessParams.overlayFrameYUVBuffer = mFrameYUVBuffer;
+    mVideoRenderer->getBuffer(&(postProcessParams.pOutBuffer), &(postProcessParams.outBufferStride));
+    err = applyEffectsAndRenderingMode(&postProcessParams);
+
+    return err;
+}
+
+status_t PreviewPlayer::readFirstVideoFrame() {
+    LOGV("PreviewPlayer::readFirstVideoFrame");
+
+    if (!mVideoBuffer) {
+        MediaSource::ReadOptions options;
+        if (mSeeking) {
+            LOGV("LV PLAYER seeking to %lld us (%.2f secs)", mSeekTimeUs,
+                    mSeekTimeUs / 1E6);
+
+            options.setSeekTo(
+                    mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST);
+        }
+        for (;;) {
+            status_t err = mVideoSource->read(&mVideoBuffer, &options);
+            options.clearSeekTo();
+
+            if (err != OK) {
+                CHECK_EQ(mVideoBuffer, NULL);
+
+                if (err == INFO_FORMAT_CHANGED) {
+                    LOGV("LV PLAYER VideoSource signalled format change");
+                    notifyVideoSize_l();
+
+                    if (mVideoRenderer != NULL) {
+                        mVideoRendererIsPreview = false;
+                        initRenderer_l();
+                    }
+                    continue;
+                }
+                LOGV("PreviewPlayer: onVideoEvent EOS reached.");
+                mFlags |= VIDEO_AT_EOS;
+                postStreamDoneEvent_l(err);
+                return OK;
+            }
+
+            if (mVideoBuffer->range_length() == 0) {
+                // Some decoders, notably the PV AVC software decoder
+                // return spurious empty buffers that we just want to ignore.
+
+                mVideoBuffer->release();
+                mVideoBuffer = NULL;
+                continue;
+            }
+
+            int64_t videoTimeUs;
+            CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &videoTimeUs));
+
+            if((videoTimeUs/1000) < mPlayBeginTimeMsec) {
+                // buffers are before begin cut time
+                // ignore them
+                //LOGI("PreviewPlayer: Ignoring buffers before begin cut time");
+                mVideoBuffer->release();
+                mVideoBuffer = NULL;
+                continue;
+            }
+
+            break;
+        }
+    }
+
+    int64_t timeUs;
+    CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
+
+    {
+        Mutex::Autolock autoLock(mMiscStateLock);
+        mVideoTimeUs = timeUs;
+    }
+
+    mDecodedVideoTs = timeUs;
+
+    return OK;
+
+}
+
+}  // namespace android
diff --git a/libvideoeditor/lvpp/PreviewPlayer.h b/libvideoeditor/lvpp/PreviewPlayer.h
new file mode 100755
index 0000000..214c136
--- /dev/null
+++ b/libvideoeditor/lvpp/PreviewPlayer.h
@@ -0,0 +1,217 @@
+/*
+ * Copyright (C) 2011 NXP Software
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef PREVIEW_PLAYER_H_
+
+#define PREVIEW_PLAYER_H_
+
+#include "NuHTTPDataSource.h"
+#include "TimedEventQueue.h"
+#include "VideoEditorAudioPlayer.h"
+
+#include <media/MediaPlayerInterface.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/OMXClient.h>
+#include <media/stagefright/TimeSource.h>
+#include <utils/threads.h>
+#include <AwesomePlayer.h>
+#include "VideoEditorPreviewController.h"
+
+namespace android {
+
+struct AudioPlayer;
+struct DataSource;
+struct MediaBuffer;
+struct MediaExtractor;
+struct MediaSource;
+
+struct PreviewPlayerRenderer : public RefBase {
+    PreviewPlayerRenderer() {}
+
+    virtual void render(MediaBuffer *buffer) = 0;
+    virtual void render() = 0;
+    virtual void getBuffer(uint8_t **data, size_t *stride) = 0;
+
+private:
+    PreviewPlayerRenderer(const PreviewPlayerRenderer &);
+    PreviewPlayerRenderer &operator=(const PreviewPlayerRenderer &);
+};
+
+struct PreviewPlayer : public AwesomePlayer {
+    PreviewPlayer();
+    ~PreviewPlayer();
+
+    //Override baseclass methods
+    void reset();
+
+    status_t play();
+
+    void setISurface(const sp<ISurface> &isurface);
+
+    status_t seekTo(int64_t timeUs);
+
+    status_t getVideoDimensions(int32_t *width, int32_t *height) const;
+
+    status_t suspend();
+    status_t resume();
+
+    status_t prepare();
+    status_t setDataSource(
+        const char *uri, const KeyedVector<String8, String8> *headers);
+
+    //Added methods
+    status_t loadEffectsSettings(M4VSS3GPP_EffectSettings* pEffectSettings, 
+                                 int nEffects);
+    status_t loadAudioMixSettings(M4xVSS_AudioMixingSettings* pAudioMixSettings);
+    status_t setAudioMixPCMFileHandle(M4OSA_Context pAudioMixPCMFileHandle);
+    status_t setAudioMixStoryBoardParam(M4OSA_UInt32 audioMixStoryBoardTS,
+                            M4OSA_UInt32 currentMediaBeginCutTime,
+                            M4OSA_UInt32 currentMediaVolumeVol);
+
+    status_t setPlaybackBeginTime(uint32_t msec);
+    status_t setPlaybackEndTime(uint32_t msec);
+    status_t setStoryboardStartTime(uint32_t msec);
+    status_t setProgressCallbackInterval(uint32_t cbInterval);
+    status_t setMediaRenderingMode(M4xVSS_MediaRendering mode,
+                            M4VIDEOEDITING_VideoFrameSize outputVideoSize);
+
+    status_t resetJniCallbackTimeStamp();
+    status_t setImageClipProperties(uint32_t width, uint32_t height);
+    status_t readFirstVideoFrame();
+
+
+private:
+    friend struct PreviewPlayerEvent;
+
+    enum {
+        PLAYING             = 1,
+        LOOPING             = 2,
+        FIRST_FRAME         = 4,
+        PREPARING           = 8,
+        PREPARED            = 16,
+        AT_EOS              = 32,
+        PREPARE_CANCELLED   = 64,
+        CACHE_UNDERRUN      = 128,
+        AUDIO_AT_EOS        = 256,
+        VIDEO_AT_EOS        = 512,
+        AUTO_LOOPING        = 1024,
+    };
+
+    sp<ISurface> mISurface;
+
+    void cancelPlayerEvents(bool keepBufferingGoing = false);
+    status_t setDataSource_l(const sp<MediaExtractor> &extractor);
+    status_t setDataSource_l(
+        const char *uri, const KeyedVector<String8, String8> *headers);
+    void reset_l();
+    void partial_reset_l();
+    status_t play_l();
+    void initRenderer_l();
+    status_t initAudioDecoder();
+    status_t initVideoDecoder(uint32_t flags = 0);
+    void onVideoEvent();
+    status_t finishSetDataSource_l();
+    static bool ContinuePreparation(void *cookie);
+    void onPrepareAsyncEvent();
+    void finishAsyncPrepare_l();
+
+    sp<PreviewPlayerRenderer> mVideoRenderer;
+
+    int32_t mVideoWidth, mVideoHeight;
+
+    MediaBuffer *mLastVideoBuffer;
+
+    struct SuspensionState {
+        String8 mUri;
+        KeyedVector<String8, String8> mUriHeaders;
+        sp<DataSource> mFileSource;
+
+        uint32_t mFlags;
+        int64_t mPositionUs;
+
+        void *mLastVideoFrame;
+        size_t mLastVideoFrameSize;
+        int32_t mColorFormat;
+        int32_t mVideoWidth, mVideoHeight;
+        int32_t mDecodedWidth, mDecodedHeight;
+
+        SuspensionState()
+            : mLastVideoFrame(NULL) {
+        }
+
+        ~SuspensionState() {
+            if (mLastVideoFrame) {
+                free(mLastVideoFrame);
+                mLastVideoFrame = NULL;
+            }
+        }
+    } *mSuspensionState;
+
+    //Data structures used for audio and video effects
+    M4VSS3GPP_EffectSettings* mEffectsSettings;
+    M4xVSS_AudioMixingSettings* mPreviewPlayerAudioMixSettings;
+    M4OSA_Context mAudioMixPCMFileHandle;
+    M4OSA_UInt32 mAudioMixStoryBoardTS;
+    M4OSA_UInt32 mCurrentMediaBeginCutTime;
+    M4OSA_UInt32 mCurrentMediaVolumeValue;
+
+    uint32_t mNumberEffects;
+    uint32_t mPlayBeginTimeMsec;
+    uint32_t mPlayEndTimeMsec;
+    uint64_t mDecodedVideoTs; // timestamp of current decoded video frame buffer
+    uint64_t mDecVideoTsStoryBoard; // timestamp of frame relative to storyboard
+    uint32_t mCurrentVideoEffect;
+    uint32_t mProgressCbInterval;
+    uint32_t mNumberDecVideoFrames; // Counter of number of video frames decoded
+    sp<TimedEventQueue::Event> mProgressCbEvent;
+    bool mProgressCbEventPending;
+    MediaBuffer *mResizedVideoBuffer;
+    bool mVideoResizedOrCropped;
+    M4xVSS_MediaRendering mRenderingMode;
+    uint32_t mOutputVideoWidth;
+    uint32_t mOutputVideoHeight;
+
+    uint32_t mStoryboardStartTimeMsec;
+
+    bool mIsVideoSourceJpg;
+    bool mIsFiftiesEffectStarted;
+    int64_t mImageFrameTimeUs;
+    bool mStartNextPlayer;
+
+    M4VIFI_UInt8*  mFrameRGBBuffer;
+    M4VIFI_UInt8*  mFrameYUVBuffer;
+
+    void setVideoPostProcessingNode(
+                    M4VSS3GPP_VideoEffectType type, M4OSA_Bool enable);
+    M4OSA_ERR doVideoPostProcessing();
+    M4OSA_ERR doMediaRendering();
+    void postProgressCallbackEvent_l();
+    void onProgressCbEvent();
+
+    status_t setDataSource_l_jpg();
+
+    status_t prepare_l();
+    status_t prepareAsync_l();
+
+    PreviewPlayer(const PreviewPlayer &);
+    PreviewPlayer &operator=(const PreviewPlayer &);
+};
+
+}  // namespace android
+
+#endif  // PREVIEW_PLAYER_H_
+
diff --git a/libvideoeditor/lvpp/PreviewRenderer.cpp b/libvideoeditor/lvpp/PreviewRenderer.cpp
new file mode 100755
index 0000000..0f43043
--- /dev/null
+++ b/libvideoeditor/lvpp/PreviewRenderer.cpp
@@ -0,0 +1,265 @@
+/*

+ * Copyright (C) 2011 NXP Software

+ * Copyright (C) 2011 The Android Open Source Project

+ *

+ * Licensed under the Apache License, Version 2.0 (the "License");

+ * you may not use this file except in compliance with the License.

+ * You may obtain a copy of the License at

+ *

+ *      http://www.apache.org/licenses/LICENSE-2.0

+ *

+ * Unless required by applicable law or agreed to in writing, software

+ * distributed under the License is distributed on an "AS IS" BASIS,

+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * See the License for the specific language governing permissions and

+ * limitations under the License.

+ */

+

+

+#define LOG_NDEBUG 1

+#define LOG_TAG "PreviewRenderer"

+#include <utils/Log.h>

+

+#include "PreviewRenderer.h"

+

+#include <binder/MemoryHeapBase.h>

+#include <binder/MemoryHeapPmem.h>

+#include <media/stagefright/MediaDebug.h>

+#include <surfaceflinger/Surface.h>

+

+namespace android {

+

+PreviewRenderer::PreviewRenderer(

+        OMX_COLOR_FORMATTYPE colorFormat,

+        const sp<Surface> &surface,

+        size_t displayWidth, size_t displayHeight,

+        size_t decodedWidth, size_t decodedHeight,

+        int32_t rotationDegrees)

+    : mColorFormat(colorFormat),

+      mConverter(NULL),

+      mYUVMode(None),

+      mSurface(surface),

+      mDisplayWidth(displayWidth),

+      mDisplayHeight(displayHeight),

+      mDecodedWidth(decodedWidth),

+      mDecodedHeight(decodedHeight) {

+    LOGV("input format = %d", mColorFormat);

+    LOGV("display = %d x %d, decoded = %d x %d",

+            mDisplayWidth, mDisplayHeight, mDecodedWidth, mDecodedHeight);

+

+    mDecodedWidth = mDisplayWidth;

+    mDecodedHeight = mDisplayHeight;

+

+    int halFormat;

+    switch (mColorFormat) {

+        case OMX_COLOR_FormatYUV420Planar:

+        {

+            halFormat = HAL_PIXEL_FORMAT_YV12;

+            mYUVMode = None;

+            break;

+        }

+        default:

+            halFormat = HAL_PIXEL_FORMAT_RGB_565;

+

+            mConverter = new ColorConverter(

+                    mColorFormat, OMX_COLOR_Format16bitRGB565);

+            CHECK(mConverter->isValid());

+            break;

+    }

+

+    CHECK(mSurface.get() != NULL);

+    CHECK(mDecodedWidth > 0);

+    CHECK(mDecodedHeight > 0);

+    CHECK(mConverter == NULL || mConverter->isValid());

+

+    CHECK_EQ(0,

+            native_window_set_usage(

+            mSurface.get(),

+            GRALLOC_USAGE_SW_READ_NEVER | GRALLOC_USAGE_SW_WRITE_OFTEN

+            | GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP));

+

+    CHECK_EQ(0, native_window_set_buffer_count(mSurface.get(), 2));

+

+    // Width must be multiple of 32???

+    CHECK_EQ(0, native_window_set_buffers_geometry(

+                mSurface.get(), mDecodedWidth, mDecodedHeight,

+                halFormat));

+

+    uint32_t transform;

+    switch (rotationDegrees) {

+        case 0: transform = 0; break;

+        case 90: transform = HAL_TRANSFORM_ROT_90; break;

+        case 180: transform = HAL_TRANSFORM_ROT_180; break;

+        case 270: transform = HAL_TRANSFORM_ROT_270; break;

+        default: transform = 0; break;

+    }

+

+    if (transform) {

+        CHECK_EQ(0, native_window_set_buffers_transform(

+                    mSurface.get(), transform));

+    }

+}

+

+PreviewRenderer::~PreviewRenderer() {

+    delete mConverter;

+    mConverter = NULL;

+}

+

+

+//

+// Provides a buffer and associated stride

+// This buffer is allocated by the SurfaceFlinger

+//

+// For optimal display performances, you should :

+// 1) call getBufferYV12()

+// 2) fill the buffer with your data

+// 3) call renderYV12() to take these changes into account

+//

+// For each call to getBufferYV12(), you must also call renderYV12()

+// Expected format in the buffer is YV12 formats (similar to YUV420 planar fromat)

+// for more details on this YV12 cf hardware/libhardware/include/hardware/hardware.h

+//

+void PreviewRenderer::getBufferYV12(uint8_t **data, size_t *stride) {

+    int err = OK;

+    LOGV("getBuffer START");

+

+    if ((err = mSurface->dequeueBuffer(mSurface.get(), &mBuf)) != 0) {

+        LOGW("Surface::dequeueBuffer returned error %d", err);

+        return;

+    }

+

+    CHECK_EQ(0, mSurface->lockBuffer(mSurface.get(), mBuf));

+

+    GraphicBufferMapper &mapper = GraphicBufferMapper::get();

+

+    Rect bounds(mDecodedWidth, mDecodedHeight);

+

+    void *dst;

+    CHECK_EQ(0, mapper.lock(

+                mBuf->handle, GRALLOC_USAGE_SW_WRITE_OFTEN, bounds, &dst));

+    LOGV("Buffer locked");

+

+    *data   = (uint8_t*)dst;

+    *stride = mBuf->stride;

+

+    LOGV("getBuffer END %p %d", dst, mBuf->stride);

+}

+

+

+//

+// Display the content of the buffer provided by last call to getBufferYV12()

+//

+// See getBufferYV12() for details.

+//

+void PreviewRenderer::renderYV12() {

+    LOGV("renderYV12() START");

+    int err = OK;

+

+    GraphicBufferMapper &mapper = GraphicBufferMapper::get();

+

+    if (mBuf!= NULL) {

+        CHECK_EQ(0, mapper.unlock(mBuf->handle));

+

+        if ((err = mSurface->queueBuffer(mSurface.get(), mBuf)) != 0) {

+            LOGW("Surface::queueBuffer returned error %d", err);

+        }

+    }

+    mBuf = NULL;

+    LOGV("renderYV12() END");

+}

+

+

+

+//

+// Display the given data buffer

+// platformPrivate is not used (kept for backwrad compatibility)

+// Please rather use getbuffer() and the other render()functions (with no params)

+// for optimal display

+//

+void PreviewRenderer::render(

+        const void *data, size_t size, void *platformPrivate) {

+    android_native_buffer_t *buf;

+    int err;

+

+    if ((err = mSurface->dequeueBuffer(mSurface.get(), &buf)) != 0) {

+        LOGW("Surface::dequeueBuffer returned error %d", err);

+        return;

+    }

+

+    CHECK_EQ(0, mSurface->lockBuffer(mSurface.get(), buf));

+

+    GraphicBufferMapper &mapper = GraphicBufferMapper::get();

+

+    Rect bounds(mDecodedWidth, mDecodedHeight);

+

+    void *dst;

+    CHECK_EQ(0, mapper.lock(

+                buf->handle, GRALLOC_USAGE_SW_WRITE_OFTEN, bounds, &dst));

+    LOGV("Buffer locked");

+

+    if (mConverter) {

+        LOGV("Convert to RGB565");

+        mConverter->convert(data,

+                mDecodedWidth, mDecodedHeight,

+                0,0,mDecodedWidth, mDecodedHeight,

+                dst, mDecodedWidth, mDecodedHeight,

+                0,0,mDecodedWidth, mDecodedHeight);

+    } else if (mYUVMode == None) {

+        // Input and output are both YUV420sp, but the alignment requirements

+        // are different.

+        LOGV("mYUVMode == None %d x %d", mDecodedWidth, mDecodedHeight);

+        size_t srcYStride = mDecodedWidth;

+        const uint8_t *srcY = (const uint8_t *)data;

+        uint8_t *dstY = (uint8_t *)dst;

+        LOGV("srcY =       %p   dstY =       %p", srcY, dstY);

+        LOGV("srcYStride = %d   dstYstride = %d", srcYStride, buf->stride);

+        for (size_t i = 0; i < mDecodedHeight; ++i) {

+            memcpy(dstY, srcY, mDecodedWidth);

+            srcY += srcYStride;

+            dstY += buf->stride;

+        }

+

+        size_t srcUVStride = (mDecodedWidth + 1) / 2;

+        size_t dstUVStride = ALIGN(mDecodedWidth / 2, 32);

+        LOGV("srcUVStride = %d   dstUVStride = %d", srcUVStride, dstUVStride);

+

+        // Copy V

+        // Source buffer is YUV, skip U

+        const uint8_t *srcV = (const uint8_t *)data

+                + mDecodedHeight * mDecodedWidth + (mDecodedHeight * mDecodedWidth)/4;

+        // Destination buffer is YVU

+        uint8_t *dstUV = (uint8_t *)dst

+                + buf->stride*mDecodedHeight;

+        LOGV("srcV =       %p   dstUV =       %p", srcV, dstUV);

+        for (size_t i = 0; i < (mDecodedHeight+1)/2; ++i) {

+            memcpy(dstUV, srcV, mDecodedWidth/2);

+            srcV += srcUVStride;

+            dstUV += dstUVStride;

+        }

+

+

+        // Copy V

+        // Source buffer is YUV, go back to end of Y

+        const uint8_t *srcU = (const uint8_t *)data

+            + mDecodedHeight * mDecodedWidth ;

+        // Destination buffer is YVU

+        // Keep writing after V buffer has been filled, U follows immediately

+        LOGV("srcU =       %p   dstUV =       %p", srcU, dstUV);

+        for (size_t i = 0; i < (mDecodedHeight+1)/2; ++i) {

+            memcpy(dstUV, srcU, mDecodedWidth/2);

+            srcU += srcUVStride;

+            dstUV += dstUVStride;

+        }

+    } else {

+        memcpy(dst, data, size);

+    }

+

+    CHECK_EQ(0, mapper.unlock(buf->handle));

+

+    if ((err = mSurface->queueBuffer(mSurface.get(), buf)) != 0) {

+        LOGW("Surface::queueBuffer returned error %d", err);

+    }

+    buf = NULL;

+}

+

+}  // namespace android

diff --git a/libvideoeditor/lvpp/PreviewRenderer.h b/libvideoeditor/lvpp/PreviewRenderer.h
new file mode 100755
index 0000000..d5dcd85
--- /dev/null
+++ b/libvideoeditor/lvpp/PreviewRenderer.h
@@ -0,0 +1,76 @@
+/*

+ * Copyright (C) 2011 NXP Software

+ * Copyright (C) 2011 The Android Open Source Project

+ *

+ * Licensed under the Apache License, Version 2.0 (the "License");

+ * you may not use this file except in compliance with the License.

+ * You may obtain a copy of the License at

+ *

+ *      http://www.apache.org/licenses/LICENSE-2.0

+ *

+ * Unless required by applicable law or agreed to in writing, software

+ * distributed under the License is distributed on an "AS IS" BASIS,

+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * See the License for the specific language governing permissions and

+ * limitations under the License.

+ */

+

+#ifndef PREVIEW_RENDERER_H_

+

+#define PREVIEW_RENDERER_H_

+

+#include <media/stagefright/ColorConverter.h>

+#include <utils/RefBase.h>

+#include <ui/android_native_buffer.h>

+#include <ui/GraphicBufferMapper.h>

+#include "SoftwareRenderer.h"

+

+namespace android {

+

+class Surface;

+

+class PreviewRenderer {

+public:

+    PreviewRenderer(

+            OMX_COLOR_FORMATTYPE colorFormat,

+            const sp<Surface> &surface,

+            size_t displayWidth, size_t displayHeight,

+            size_t decodedWidth, size_t decodedHeight,

+            int32_t rotationDegrees);

+

+    ~PreviewRenderer();

+

+    void render(

+            const void *data, size_t size, void *platformPrivate);

+

+    void getBufferYV12(uint8_t **data, size_t *stride);

+

+    void renderYV12();

+

+    static size_t ALIGN(size_t x, size_t alignment) {

+        return (x + alignment - 1) & ~(alignment - 1);

+    }

+

+private:

+    enum YUVMode {

+        None,

+        YUV420ToYUV420sp,

+        YUV420spToYUV420sp,

+    };

+

+    OMX_COLOR_FORMATTYPE mColorFormat;

+    ColorConverter *mConverter;

+    YUVMode mYUVMode;

+    sp<Surface> mSurface;

+    size_t mDisplayWidth, mDisplayHeight;

+    size_t mDecodedWidth, mDecodedHeight;

+

+    android_native_buffer_t *mBuf;

+

+    PreviewRenderer(const PreviewRenderer &);

+    PreviewRenderer &operator=(const PreviewRenderer &);

+};

+

+}  // namespace android

+

+#endif  // PREVIEW_RENDERER_H_

diff --git a/libvideoeditor/lvpp/VideoEditorAudioPlayer.cpp b/libvideoeditor/lvpp/VideoEditorAudioPlayer.cpp
new file mode 100755
index 0000000..ed9de6e
--- /dev/null
+++ b/libvideoeditor/lvpp/VideoEditorAudioPlayer.cpp
@@ -0,0 +1,620 @@
+/*
+ * Copyright (C) 2011 NXP Software
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 1
+#define LOG_TAG "VideoEditorAudioPlayer"
+#include <utils/Log.h>
+
+#include <binder/IPCThreadState.h>
+#include <media/AudioTrack.h>
+#include <VideoEditorAudioPlayer.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MetaData.h>
+
+#include "PreviewPlayer.h"
+namespace android {
+
+VideoEditorAudioPlayer::VideoEditorAudioPlayer(
+        const sp<MediaPlayerBase::AudioSink> &audioSink,
+        AwesomePlayer *observer)
+    : AudioPlayer(audioSink, observer) {
+
+    LOGV("VideoEditorAudioPlayer");
+    mBGAudioPCMFileHandle = NULL;
+    mBGAudioPCMFileLength = 0;
+    mBGAudioPCMFileTrimmedLength = 0;
+    mBGAudioPCMFileDuration = 0;
+    mBGAudioPCMFileSeekPoint = 0;
+    mBGAudioPCMFileOriginalSeekPoint = 0;
+    mBGAudioStoryBoardSkimTimeStamp = 0;
+    mBGAudioStoryBoardCurrentMediaBeginCutTS = 0;
+    mBGAudioStoryBoardCurrentMediaVolumeVal = 0;
+    mSeekTimeUs = 0;
+}
+
+VideoEditorAudioPlayer::~VideoEditorAudioPlayer() {
+
+    LOGV("~VideoEditorAudioPlayer");
+    if (mStarted) {
+        reset();
+    }
+}
+
+status_t VideoEditorAudioPlayer::start(bool sourceAlreadyStarted) {
+
+    CHECK(!mStarted);
+    CHECK(mSource != NULL);
+    LOGV("Start");
+    status_t err;
+    M4OSA_ERR result = M4NO_ERROR;
+    M4OSA_UInt32 startTime = 0;
+    M4OSA_UInt32 seekTimeStamp = 0;
+    M4OSA_Bool bStoryBoardTSBeyondBTEndCutTime = M4OSA_FALSE;
+
+    if (!sourceAlreadyStarted) {
+        err = mSource->start();
+        if (err != OK) {
+            return err;
+        }
+    }
+
+    // Create the BG Audio handler
+    mAudioProcess = new VideoEditorBGAudioProcessing();
+    veAudMixSettings audioMixSettings;
+
+    // Pass on the audio ducking parameters
+    audioMixSettings.lvInDucking_threshold = mAudioMixSettings->uiInDucking_threshold;
+    audioMixSettings.lvInDucking_lowVolume = ((M4OSA_Float)mAudioMixSettings->uiInDucking_lowVolume) / 100.0;
+    audioMixSettings.lvInDucking_enable = mAudioMixSettings->bInDucking_enable;
+    audioMixSettings.lvPTVolLevel = ((M4OSA_Float)mBGAudioStoryBoardCurrentMediaVolumeVal) / 100.0;
+    audioMixSettings.lvBTVolLevel = ((M4OSA_Float)mAudioMixSettings->uiAddVolume) /100.0;
+    audioMixSettings.lvBTChannelCount = mAudioMixSettings->uiBTChannelCount;
+    audioMixSettings.lvPTChannelCount = mAudioMixSettings->uiNbChannels;
+
+    // Call to Audio mix param setting
+    mAudioProcess->veSetAudioProcessingParams(audioMixSettings);
+
+    // Get the BG Audio PCM file details
+    if ( mBGAudioPCMFileHandle ) {
+
+        // TODO : 32bits required for OSAL, to be updated once OSAL is updated
+        M4OSA_UInt32 tmp32 = 0;
+        result = M4OSA_fileReadGetOption(mBGAudioPCMFileHandle,
+                                        M4OSA_kFileReadGetFileSize,
+                                        (M4OSA_Void**)&tmp32);
+        mBGAudioPCMFileLength = tmp32;
+        mBGAudioPCMFileTrimmedLength = mBGAudioPCMFileLength;
+
+
+        LOGV("VideoEditorAudioPlayer::start M4OSA_kFileReadGetFileSize = %lld",
+                            mBGAudioPCMFileLength);
+
+        // Get the duration in time of the audio BT
+        if ( result == M4NO_ERROR ) {
+         LOGV("VEAP: channels = %d freq = %d",
+         mAudioMixSettings->uiNbChannels,  mAudioMixSettings->uiSamplingFrequency);
+
+            // No trim
+            mBGAudioPCMFileDuration = ((
+                    (int64_t)(mBGAudioPCMFileLength/sizeof(M4OSA_UInt16)/
+                    mAudioMixSettings->uiNbChannels))*1000 ) /
+                    mAudioMixSettings->uiSamplingFrequency;
+
+            LOGV("VideoEditorAudioPlayer:: beginCutMs %d , endCutMs %d",
+                    (unsigned int) mAudioMixSettings->beginCutMs,
+                    (unsigned int) mAudioMixSettings->endCutMs);
+
+            // Remove the trim part
+            if ((mAudioMixSettings->beginCutMs == 0) &&
+                (mAudioMixSettings->endCutMs != 0)) {
+                // End time itself the file duration
+                mBGAudioPCMFileDuration = mAudioMixSettings->endCutMs;
+                // Limit the file length also
+                mBGAudioPCMFileTrimmedLength = ((
+                     (int64_t)(mBGAudioPCMFileDuration *
+                     mAudioMixSettings->uiSamplingFrequency) *
+                     mAudioMixSettings->uiNbChannels) *
+                     sizeof(M4OSA_UInt16)) / 1000;
+            }
+            else if ((mAudioMixSettings->beginCutMs != 0) &&
+                     (mAudioMixSettings->endCutMs == mBGAudioPCMFileDuration)) {
+                // End time itself the file duration
+                mBGAudioPCMFileDuration = mBGAudioPCMFileDuration -
+                      mAudioMixSettings->beginCutMs;
+                // Limit the file length also
+                mBGAudioPCMFileTrimmedLength = ((
+                     (int64_t)(mBGAudioPCMFileDuration *
+                     mAudioMixSettings->uiSamplingFrequency) *
+                     mAudioMixSettings->uiNbChannels) *
+                     sizeof(M4OSA_UInt16)) / 1000;
+            }
+            else if ((mAudioMixSettings->beginCutMs != 0) &&
+                    (mAudioMixSettings->endCutMs != 0)) {
+                // End time itself the file duration
+                mBGAudioPCMFileDuration = mAudioMixSettings->endCutMs -
+                    mAudioMixSettings->beginCutMs;
+                // Limit the file length also
+                mBGAudioPCMFileTrimmedLength = ((
+                    (int64_t)(mBGAudioPCMFileDuration *
+                    mAudioMixSettings->uiSamplingFrequency) *
+                    mAudioMixSettings->uiNbChannels) *
+                    sizeof(M4OSA_UInt16)) / 1000; /*make to sec from ms*/
+            }
+
+            LOGV("VideoEditorAudioPlayer: file duration recorded : %lld",
+                    mBGAudioPCMFileDuration);
+        }
+
+        // Last played location to be seeked at for next media item
+        if ( result == M4NO_ERROR ) {
+            LOGV("VideoEditorAudioPlayer::mBGAudioStoryBoardSkimTimeStamp %lld",
+                    mBGAudioStoryBoardSkimTimeStamp);
+            LOGV("VideoEditorAudioPlayer::uiAddCts %d",
+                    mAudioMixSettings->uiAddCts);
+            if (mBGAudioStoryBoardSkimTimeStamp >= mAudioMixSettings->uiAddCts) {
+                startTime = (mBGAudioStoryBoardSkimTimeStamp -
+                 mAudioMixSettings->uiAddCts);
+            }
+            else {
+                // do nothing
+            }
+
+            LOGV("VideoEditorAudioPlayer::startTime %d", startTime);
+            seekTimeStamp = 0;
+            if (startTime) {
+                if (startTime >= mBGAudioPCMFileDuration) {
+                    // The BG track should be looped and started again
+                    if (mAudioMixSettings->bLoop) {
+                        // Add begin cut time to the mod value
+                        seekTimeStamp = ((startTime%mBGAudioPCMFileDuration) +
+                        mAudioMixSettings->beginCutMs);
+                    }else {
+                        // Looping disabled, donot do BT Mix , set to file end
+                        seekTimeStamp = (mBGAudioPCMFileDuration +
+                        mAudioMixSettings->beginCutMs);
+                    }
+                }else {
+                    // BT still present , just seek to story board time
+                    seekTimeStamp = startTime + mAudioMixSettings->beginCutMs;
+                }
+            }
+            else {
+                seekTimeStamp = mAudioMixSettings->beginCutMs;
+            }
+
+            // Convert the seekTimeStamp to file location
+            mBGAudioPCMFileOriginalSeekPoint = (
+                                        (int64_t)(mAudioMixSettings->beginCutMs)
+                                        * mAudioMixSettings->uiSamplingFrequency
+                                        * mAudioMixSettings->uiNbChannels
+                                        * sizeof(M4OSA_UInt16))/ 1000 ; /*make to sec from ms*/
+
+            mBGAudioPCMFileSeekPoint = ((int64_t)(seekTimeStamp)
+                                        * mAudioMixSettings->uiSamplingFrequency
+                                        * mAudioMixSettings->uiNbChannels
+                                        * sizeof(M4OSA_UInt16))/ 1000 ;
+        }
+    }
+
+    // We allow an optional INFO_FORMAT_CHANGED at the very beginning
+    // of playback, if there is one, getFormat below will retrieve the
+    // updated format, if there isn't, we'll stash away the valid buffer
+    // of data to be used on the first audio callback.
+
+    CHECK(mFirstBuffer == NULL);
+
+    mFirstBufferResult = mSource->read(&mFirstBuffer);
+    if (mFirstBufferResult == INFO_FORMAT_CHANGED) {
+        LOGV("INFO_FORMAT_CHANGED!!!");
+
+        CHECK(mFirstBuffer == NULL);
+        mFirstBufferResult = OK;
+        mIsFirstBuffer = false;
+    } else {
+        mIsFirstBuffer = true;
+    }
+
+    sp<MetaData> format = mSource->getFormat();
+    const char *mime;
+    bool success = format->findCString(kKeyMIMEType, &mime);
+    CHECK(success);
+    CHECK(!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW));
+
+    success = format->findInt32(kKeySampleRate, &mSampleRate);
+    CHECK(success);
+
+    int32_t numChannels;
+    success = format->findInt32(kKeyChannelCount, &numChannels);
+    CHECK(success);
+
+    if (mAudioSink.get() != NULL) {
+        status_t err = mAudioSink->open(
+                mSampleRate, numChannels, AudioSystem::PCM_16_BIT,
+                DEFAULT_AUDIOSINK_BUFFERCOUNT,
+                &VideoEditorAudioPlayer::AudioSinkCallback, this);
+        if (err != OK) {
+            if (mFirstBuffer != NULL) {
+                mFirstBuffer->release();
+                mFirstBuffer = NULL;
+            }
+
+            if (!sourceAlreadyStarted) {
+                mSource->stop();
+            }
+
+            return err;
+        }
+
+        mLatencyUs = (int64_t)mAudioSink->latency() * 1000;
+        mFrameSize = mAudioSink->frameSize();
+
+        mAudioSink->start();
+    } else {
+        mAudioTrack = new AudioTrack(
+                AudioSystem::MUSIC, mSampleRate, AudioSystem::PCM_16_BIT,
+                (numChannels == 2)
+                    ? AudioSystem::CHANNEL_OUT_STEREO
+                    : AudioSystem::CHANNEL_OUT_MONO,
+                0, 0, &AudioCallback, this, 0);
+
+        if ((err = mAudioTrack->initCheck()) != OK) {
+            delete mAudioTrack;
+            mAudioTrack = NULL;
+
+            if (mFirstBuffer != NULL) {
+                mFirstBuffer->release();
+                mFirstBuffer = NULL;
+            }
+
+            if (!sourceAlreadyStarted) {
+                mSource->stop();
+            }
+
+            return err;
+        }
+
+        mLatencyUs = (int64_t)mAudioTrack->latency() * 1000;
+        mFrameSize = mAudioTrack->frameSize();
+
+        mAudioTrack->start();
+    }
+
+    mStarted = true;
+
+    return OK;
+}
+
+void VideoEditorAudioPlayer::reset() {
+
+    LOGV("reset");
+    AudioPlayer::reset();
+
+    // Capture the current seek point
+    mBGAudioPCMFileSeekPoint = 0;
+    mBGAudioStoryBoardSkimTimeStamp =0;
+    mBGAudioStoryBoardCurrentMediaBeginCutTS=0;
+}
+
+size_t VideoEditorAudioPlayer::AudioSinkCallback(
+        MediaPlayerBase::AudioSink *audioSink,
+        void *buffer, size_t size, void *cookie) {
+    VideoEditorAudioPlayer *me = (VideoEditorAudioPlayer *)cookie;
+
+    return me->fillBuffer(buffer, size);
+}
+
+
+size_t VideoEditorAudioPlayer::fillBuffer(void *data, size_t size) {
+
+    if (mReachedEOS) {
+        return 0;
+    }
+
+    size_t size_done = 0;
+    size_t size_remaining = size;
+
+    M4OSA_ERR err = M4NO_ERROR;
+    M4AM_Buffer bgFrame = {NULL, 0};
+    M4AM_Buffer mixFrame = {NULL, 0};
+    M4AM_Buffer ptFrame = {NULL, 0};
+    int64_t currentSteamTS = 0;
+    int64_t startTimeForBT = 0;
+    M4OSA_Float fPTVolLevel =
+     ((M4OSA_Float)mBGAudioStoryBoardCurrentMediaVolumeVal)/100;
+    M4OSA_Int16     *pPTMdata;
+    M4OSA_UInt32     uiPCMsize = 0;
+
+    while ((size_remaining > 0)&&(err==M4NO_ERROR)) {
+        MediaSource::ReadOptions options;
+
+        {
+            Mutex::Autolock autoLock(mLock);
+            if (mSeeking) {
+                if (mIsFirstBuffer) {
+                    if (mFirstBuffer != NULL) {
+                        mFirstBuffer->release();
+                        mFirstBuffer = NULL;
+                    }
+                    mIsFirstBuffer = false;
+                }
+
+                options.setSeekTo(mSeekTimeUs);
+
+                if (mInputBuffer != NULL) {
+                    mInputBuffer->release();
+                    mInputBuffer = NULL;
+                }
+
+                mSeeking = false;
+                if (mObserver) {
+                    mObserver->postAudioSeekComplete();
+                }
+            }
+        }
+
+        if (mInputBuffer == NULL) {
+            status_t status = OK;
+
+            if (mIsFirstBuffer) {
+                mInputBuffer = mFirstBuffer;
+                mFirstBuffer = NULL;
+                status = mFirstBufferResult;
+
+                mIsFirstBuffer = false;
+            } else {
+                status = mSource->read(&mInputBuffer, &options);
+                // Data is Primary Track, mix with background track
+                // after reading same size from Background track PCM file
+                if (status == OK)
+                {
+                    // Mix only when skim point is after startTime of BT
+                    if (((mBGAudioStoryBoardSkimTimeStamp* 1000) +
+                          (mPositionTimeMediaUs - mSeekTimeUs)) >=
+                          (int64_t)(mAudioMixSettings->uiAddCts * 1000)) {
+
+                        LOGV("VideoEditorAudioPlayer::INSIDE MIXING");
+                        LOGV("Checking %lld <= %lld - %d",
+                            mBGAudioPCMFileSeekPoint-mBGAudioPCMFileOriginalSeekPoint,
+                            mBGAudioPCMFileTrimmedLength, len);
+
+
+                        M4OSA_Void* ptr;
+                        ptr = (M4OSA_Void*)((unsigned int)mInputBuffer->data() +
+                        mInputBuffer->range_offset());
+
+                        M4OSA_UInt32 len = mInputBuffer->range_length();
+                        M4OSA_Context fp = M4OSA_NULL;
+
+                        uiPCMsize = (mInputBuffer->range_length())/2;
+                        pPTMdata = (M4OSA_Int16*)(mInputBuffer->data() +
+                        mInputBuffer->range_offset());
+
+                        LOGV("mix with background malloc to do len %d", len);
+
+                        bgFrame.m_dataAddress = (M4OSA_UInt16*)M4OSA_malloc( len, 1,
+                                                       (M4OSA_Char*)"bgFrame");
+                        if (NULL == bgFrame.m_dataAddress) {
+                            LOGE("mBackgroundAudioSetting Malloc failed");
+                        }
+
+                        bgFrame.m_bufferSize = len;
+
+                        mixFrame.m_dataAddress = (M4OSA_UInt16*)M4OSA_malloc(len, 1,
+                                                    (M4OSA_Char*)"mixFrame");
+                        if (NULL == mixFrame.m_dataAddress) {
+                            LOGE("mBackgroundAudioSetting Malloc failed");
+                        }
+
+                        mixFrame.m_bufferSize = len;
+
+                        LOGV("mix with bgm with size %lld", mBGAudioPCMFileLength);
+
+                        CHECK(mInputBuffer->meta_data()->findInt64(kKeyTime,
+                                         &mPositionTimeMediaUs));
+
+                        if (mBGAudioPCMFileSeekPoint -
+                             mBGAudioPCMFileOriginalSeekPoint <=
+                              (mBGAudioPCMFileTrimmedLength - len)) {
+
+                            LOGV("Checking mBGAudioPCMFileHandle %d",
+                                (unsigned int)mBGAudioPCMFileHandle);
+
+                            if (mBGAudioPCMFileHandle != M4OSA_NULL) {
+                                LOGV("fillBuffer seeking file to %lld",
+                                    mBGAudioPCMFileSeekPoint);
+
+                            // TODO : 32bits required for OSAL
+                                M4OSA_UInt32 tmp32 =
+                                    (M4OSA_UInt32)mBGAudioPCMFileSeekPoint;
+                                err = M4OSA_fileReadSeek(mBGAudioPCMFileHandle,
+                                                M4OSA_kFileSeekBeginning,
+                                                (M4OSA_FilePosition*)&tmp32);
+
+                                mBGAudioPCMFileSeekPoint = tmp32;
+
+                                if (err != M4NO_ERROR){
+                                    LOGE("M4OSA_fileReadSeek err %d", err);
+                                }
+
+                                err = M4OSA_fileReadData(mBGAudioPCMFileHandle,
+                                       (M4OSA_Int8*)bgFrame.m_dataAddress,
+                                       (M4OSA_UInt32*)&len);
+                                if (err == M4WAR_NO_DATA_YET ) {
+
+                                    LOGV("fillBuffer End of file reached");
+                                    err = M4NO_ERROR;
+
+                                    // We reached the end of file
+                                    // move to begin cut time equal value
+                                    if (mAudioMixSettings->bLoop) {
+                                        mBGAudioPCMFileSeekPoint =
+                                         (((int64_t)(mAudioMixSettings->beginCutMs) *
+                                          mAudioMixSettings->uiSamplingFrequency) *
+                                          mAudioMixSettings->uiNbChannels *
+                                           sizeof(M4OSA_UInt16)) / 1000;
+                                        LOGV("fillBuffer Looping \
+                                            to mBGAudioPCMFileSeekPoint %lld",
+                                            mBGAudioPCMFileSeekPoint);
+                                    }
+                                    else {
+                                            // No mixing;
+                                            // take care of volume of primary track
+                                        if (fPTVolLevel < 1.0) {
+                                            setPrimaryTrackVolume(pPTMdata,
+                                             uiPCMsize, fPTVolLevel);
+                                        }
+                                    }
+                                } else if (err != M4NO_ERROR ) {
+                                     LOGV("fileReadData for audio err %d", err);
+                                } else {
+                                    mBGAudioPCMFileSeekPoint += len;
+                                    LOGV("fillBuffer mBGAudioPCMFileSeekPoint \
+                                         %lld", mBGAudioPCMFileSeekPoint);
+
+                                    // Assign the ptr data to primary track
+                                    ptFrame.m_dataAddress = (M4OSA_UInt16*)ptr;
+                                    ptFrame.m_bufferSize = len;
+
+                                    // Call to mix and duck
+                                    mAudioProcess->veProcessAudioMixNDuck(
+                                         &ptFrame, &bgFrame, &mixFrame);
+
+                                        // Overwrite the decoded buffer
+                                    M4OSA_memcpy((M4OSA_MemAddr8)ptr,
+                                         (M4OSA_MemAddr8)mixFrame.m_dataAddress, len);
+                                }
+                            }
+                        } else if (mAudioMixSettings->bLoop){
+                            // Move to begin cut time equal value
+                            mBGAudioPCMFileSeekPoint =
+                                mBGAudioPCMFileOriginalSeekPoint;
+                        } else {
+                            // No mixing;
+                            // take care of volume level of primary track
+                            if(fPTVolLevel < 1.0) {
+                                setPrimaryTrackVolume(
+                                      pPTMdata, uiPCMsize, fPTVolLevel);
+                            }
+                        }
+                        if (bgFrame.m_dataAddress) {
+                            M4OSA_free((M4OSA_MemAddr32)bgFrame.m_dataAddress);
+                        }
+                        if (mixFrame.m_dataAddress) {
+                            M4OSA_free((M4OSA_MemAddr32)mixFrame.m_dataAddress);
+                        }
+                    } else {
+                        // No mixing;
+                        // take care of volume level of primary track
+                        if(fPTVolLevel < 1.0) {
+                            setPrimaryTrackVolume(pPTMdata, uiPCMsize,
+                                                 fPTVolLevel);
+                        }
+                    }
+                }
+            }
+
+            CHECK((status == OK && mInputBuffer != NULL)
+                   || (status != OK && mInputBuffer == NULL));
+
+            Mutex::Autolock autoLock(mLock);
+
+            if (status != OK) {
+                LOGV("fillBuffer: mSource->read returned err %d", status);
+                if (mObserver && !mReachedEOS) {
+                    mObserver->postAudioEOS();
+                }
+
+                mReachedEOS = true;
+                mFinalStatus = status;
+                break;
+            }
+
+            CHECK(mInputBuffer->meta_data()->findInt64(
+                        kKeyTime, &mPositionTimeMediaUs));
+
+            mPositionTimeRealUs =
+                ((mNumFramesPlayed + size_done / mFrameSize) * 1000000)
+                    / mSampleRate;
+
+            LOGV("buffer->size() = %d, "
+                     "mPositionTimeMediaUs=%.2f mPositionTimeRealUs=%.2f",
+                 mInputBuffer->range_length(),
+                 mPositionTimeMediaUs / 1E6, mPositionTimeRealUs / 1E6);
+        }
+
+        if (mInputBuffer->range_length() == 0) {
+            mInputBuffer->release();
+            mInputBuffer = NULL;
+
+            continue;
+        }
+
+        size_t copy = size_remaining;
+        if (copy > mInputBuffer->range_length()) {
+            copy = mInputBuffer->range_length();
+        }
+
+        memcpy((char *)data + size_done,
+           (const char *)mInputBuffer->data() + mInputBuffer->range_offset(),
+               copy);
+
+        mInputBuffer->set_range(mInputBuffer->range_offset() + copy,
+                            mInputBuffer->range_length() - copy);
+
+        size_done += copy;
+        size_remaining -= copy;
+    }
+
+    Mutex::Autolock autoLock(mLock);
+    mNumFramesPlayed += size_done / mFrameSize;
+
+    return size_done;
+}
+
+void VideoEditorAudioPlayer::setAudioMixSettings(
+                            M4xVSS_AudioMixingSettings* pAudioMixSettings) {
+    mAudioMixSettings = pAudioMixSettings;
+}
+
+void VideoEditorAudioPlayer::setAudioMixPCMFileHandle(
+                            M4OSA_Context pBGAudioPCMFileHandle){
+    mBGAudioPCMFileHandle = pBGAudioPCMFileHandle;
+}
+
+void VideoEditorAudioPlayer::setAudioMixStoryBoardSkimTimeStamp(
+                            M4OSA_UInt32 pBGAudioStoryBoardSkimTimeStamp,
+                            M4OSA_UInt32 pBGAudioCurrentMediaBeginCutTS,
+                            M4OSA_UInt32 pBGAudioCurrentMediaVolumeVal) {
+
+    mBGAudioStoryBoardSkimTimeStamp = pBGAudioStoryBoardSkimTimeStamp;
+    mBGAudioStoryBoardCurrentMediaBeginCutTS = pBGAudioCurrentMediaBeginCutTS;
+    mBGAudioStoryBoardCurrentMediaVolumeVal = pBGAudioCurrentMediaVolumeVal;
+}
+
+void VideoEditorAudioPlayer::setPrimaryTrackVolume(
+    M4OSA_Int16 *data, M4OSA_UInt32 size, M4OSA_Float volLevel) {
+
+    while(size-- > 0) {
+        *data = (M4OSA_Int16)((*data)*volLevel);
+        data++;
+    }
+}
+
+}
diff --git a/libvideoeditor/lvpp/VideoEditorAudioPlayer.h b/libvideoeditor/lvpp/VideoEditorAudioPlayer.h
new file mode 100755
index 0000000..92b77b6
--- /dev/null
+++ b/libvideoeditor/lvpp/VideoEditorAudioPlayer.h
@@ -0,0 +1,90 @@
+/*

+ * Copyright (C) 2011 NXP Software

+ * Copyright (C) 2011 The Android Open Source Project

+ *

+ * Licensed under the Apache License, Version 2.0 (the "License");

+ * you may not use this file except in compliance with the License.

+ * You may obtain a copy of the License at

+ *

+ *      http://www.apache.org/licenses/LICENSE-2.0

+ *

+ * Unless required by applicable law or agreed to in writing, software

+ * distributed under the License is distributed on an "AS IS" BASIS,

+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * See the License for the specific language governing permissions and

+ * limitations under the License.

+ */

+

+#ifndef VE_AUDIO_PLAYER_H_

+

+#define VE_AUDIO_PLAYER_H_

+

+#include <media/MediaPlayerInterface.h>

+#include <media/stagefright/MediaBuffer.h>

+#include <media/stagefright/TimeSource.h>

+#include <utils/threads.h>

+#include "M4xVSS_API.h"

+#include "VideoEditorMain.h"

+#include "M4OSA_FileReader.h"

+#include "VideoEditorBGAudioProcessing.h"

+#include <media/stagefright/AudioPlayer.h>

+

+namespace android {

+

+class MediaSource;

+class AudioTrack;

+class PreviewPlayer;

+

+

+class VideoEditorAudioPlayer : public AudioPlayer {

+public:

+    enum {

+        REACHED_EOS,

+        SEEK_COMPLETE

+    };

+

+    VideoEditorAudioPlayer(const sp<MediaPlayerBase::AudioSink> &audioSink,

+        AwesomePlayer *audioObserver = NULL);

+

+    virtual ~VideoEditorAudioPlayer();

+

+    status_t start(bool sourceAlreadyStarted = false);

+

+    void setAudioMixSettings(M4xVSS_AudioMixingSettings* pAudioMixSettings);

+    void setAudioMixPCMFileHandle(M4OSA_Context pBGAudioPCMFileHandle);

+    void setAudioMixStoryBoardSkimTimeStamp(

+        M4OSA_UInt32 pBGAudioStoryBoardSkimTimeStamp,

+        M4OSA_UInt32 pBGAudioCurrentMediaBeginCutTS,

+        M4OSA_UInt32 pBGAudioCurrentMediaVolumeVal);

+

+private:

+

+    M4xVSS_AudioMixingSettings *mAudioMixSettings;

+    VideoEditorBGAudioProcessing *mAudioProcess;

+

+    M4OSA_Context mBGAudioPCMFileHandle;

+    int64_t mBGAudioPCMFileLength;

+    int64_t mBGAudioPCMFileTrimmedLength;

+    int64_t mBGAudioPCMFileDuration;

+    int64_t mBGAudioPCMFileSeekPoint;

+    int64_t mBGAudioPCMFileOriginalSeekPoint;

+    int64_t mBGAudioStoryBoardSkimTimeStamp;

+    int64_t mBGAudioStoryBoardCurrentMediaBeginCutTS;

+    int64_t mBGAudioStoryBoardCurrentMediaVolumeVal;

+

+    size_t fillBuffer(void *data, size_t size);

+

+    void reset();

+    void setPrimaryTrackVolume(M4OSA_Int16 *data, M4OSA_UInt32 size, M4OSA_Float volLevel);

+

+    static size_t AudioSinkCallback(

+            MediaPlayerBase::AudioSink *audioSink,

+            void *data, size_t size, void *me);

+

+    VideoEditorAudioPlayer(const VideoEditorAudioPlayer &);

+    VideoEditorAudioPlayer &operator=(const VideoEditorAudioPlayer &);

+};

+

+}  // namespace android

+

+#endif  // VE_AUDIO_PLAYER_H_

diff --git a/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.cpp b/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.cpp
new file mode 100755
index 0000000..2049f08
--- /dev/null
+++ b/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.cpp
@@ -0,0 +1,350 @@
+/*

+ * Copyright (C) 2011 NXP Software

+ * Copyright (C) 2011 The Android Open Source Project

+ *

+ * Licensed under the Apache License, Version 2.0 (the "License");

+ * you may not use this file except in compliance with the License.

+ * You may obtain a copy of the License at

+ *

+ *      http://www.apache.org/licenses/LICENSE-2.0

+ *

+ * Unless required by applicable law or agreed to in writing, software

+ * distributed under the License is distributed on an "AS IS" BASIS,

+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * See the License for the specific language governing permissions and

+ * limitations under the License.

+ */

+

+#define LOG_NDEBUG 1

+#define LOG_TAG "VideoEditorBGAudioProcessing"

+#include <utils/Log.h>

+#include "VideoEditorBGAudioProcessing.h"

+

+namespace android {

+

+VideoEditorBGAudioProcessing ::VideoEditorBGAudioProcessing() {

+

+    LOGV("VideoEditorBGAudioProcessing:: Construct  VideoEditorBGAudioProcessing ");

+

+    VideoEditorBGAudioProcessing::mAudVolArrIndex = 0;

+    VideoEditorBGAudioProcessing::mDoDucking = 0;

+    VideoEditorBGAudioProcessing::mDucking_enable = 0;

+    VideoEditorBGAudioProcessing::mDucking_lowVolume = 0;

+    VideoEditorBGAudioProcessing::mDucking_threshold = 0;

+    VideoEditorBGAudioProcessing::mDuckingFactor = 0;

+

+    VideoEditorBGAudioProcessing::mBTVolLevel = 0;

+    VideoEditorBGAudioProcessing::mPTVolLevel = 0;

+

+    VideoEditorBGAudioProcessing::mIsSSRCneeded = 0;

+    VideoEditorBGAudioProcessing::mChannelConversion = 0;

+

+    VideoEditorBGAudioProcessing::mBTFormat = MONO_16_BIT;

+

+    VideoEditorBGAudioProcessing::mInSampleRate = 8000;

+    VideoEditorBGAudioProcessing::mOutSampleRate = 16000;

+    VideoEditorBGAudioProcessing::mPTChannelCount = 2;

+    VideoEditorBGAudioProcessing::mBTChannelCount = 1;

+}

+

+M4OSA_Int32 VideoEditorBGAudioProcessing::veProcessAudioMixNDuck(

+        void *pPTBuffer, void *pBTBuffer, void *pOutBuffer) {

+

+    M4AM_Buffer* pPrimaryTrack   = (M4AM_Buffer*)pPTBuffer;

+    M4AM_Buffer* pBackgroundTrack = (M4AM_Buffer*)pBTBuffer;

+    M4AM_Buffer* pMixedOutBuffer  = (M4AM_Buffer*)pOutBuffer;

+

+    LOGV("VideoEditorBGAudioProcessing::lvProcessAudioMixNDuck \

+     pPTBuffer 0x%x pBTBuffer 0x%x pOutBuffer 0x%x", pPTBuffer,

+      pBTBuffer, pOutBuffer);

+

+    M4OSA_ERR result = M4NO_ERROR;

+    M4OSA_Int16 *pBTMdata1;

+    M4OSA_Int16 *pPTMdata2;

+    M4OSA_UInt32 uiPCMsize;

+

+    // Ducking variable

+    M4OSA_UInt16 loopIndex = 0;

+    M4OSA_Int16 *pPCM16Sample = M4OSA_NULL;

+    M4OSA_Int32 peakDbValue = 0;

+    M4OSA_Int32 previousDbValue = 0;

+    M4OSA_UInt32 i;

+

+    // Output size if same as PT size

+    pMixedOutBuffer->m_bufferSize = pPrimaryTrack->m_bufferSize;

+

+    // Before mixing, we need to have only PT as out buffer

+    M4OSA_memcpy((M4OSA_MemAddr8)pMixedOutBuffer->m_dataAddress,

+     (M4OSA_MemAddr8)pPrimaryTrack->m_dataAddress, pMixedOutBuffer->m_bufferSize);

+

+    // Initially contains the input primary track

+    pPTMdata2 = (M4OSA_Int16*)pMixedOutBuffer->m_dataAddress;

+    // Contains BG track processed data(like channel conversion etc..

+    pBTMdata1 = (M4OSA_Int16*) pBackgroundTrack->m_dataAddress;

+

+    // Since we need to give sample count and not buffer size

+    uiPCMsize = pMixedOutBuffer->m_bufferSize/2 ;

+

+    if((this->mDucking_enable) && (this->mPTVolLevel != 0.0)) {

+        // LOGI("VideoEditorBGAudioProcessing:: In Ducking analysis ");

+        loopIndex = 0;

+        peakDbValue = 0;

+        previousDbValue = peakDbValue;

+

+        pPCM16Sample = (M4OSA_Int16*)pPrimaryTrack->m_dataAddress;

+

+        while( loopIndex < pPrimaryTrack->m_bufferSize/sizeof(M4OSA_Int16))

+        {

+            if (pPCM16Sample[loopIndex] >= 0){

+                peakDbValue = previousDbValue > pPCM16Sample[loopIndex] ?

+                 previousDbValue : pPCM16Sample[loopIndex];

+                previousDbValue = peakDbValue;

+            }else{

+                peakDbValue = previousDbValue > -pPCM16Sample[loopIndex] ?

+                 previousDbValue: -pPCM16Sample[loopIndex];

+                previousDbValue = peakDbValue;

+            }

+            loopIndex++;

+        }

+

+        this->mAudioVolumeArray[this->mAudVolArrIndex] =

+         getDecibelSound(peakDbValue);

+

+        LOGV("VideoEditorBGAudioProcessing:: getDecibelSound %d",

+         this->mAudioVolumeArray[this->mAudVolArrIndex]);

+

+        // WINDOW_SIZE is 10 by default

+        // Check for threshold is done after 10 cycles

+        if ( this->mAudVolArrIndex >= WINDOW_SIZE -1) {

+            this->mDoDucking = isThresholdBreached(this->mAudioVolumeArray,

+             this->mAudVolArrIndex,this->mDucking_threshold );

+

+            this->mAudVolArrIndex = 0;

+        } else {

+            this->mAudVolArrIndex++;

+        }

+

+        //

+        // Below logic controls the mixing weightage

+        // for Background and Primary Tracks

+        // for the duration of window under analysis,

+        // to give fade-out for Background and fade-in for primary

+        // Current fading factor is distributed in equal range over

+        // the defined window size.

+        // For a window size = 25

+        // (500 ms (window under analysis) / 20 ms (sample duration))

+        //

+

+        if(this->mDoDucking){

+            if ( this->mDuckingFactor > this->mDucking_lowVolume) {

+                // FADE OUT BG Track

+                // Increment ducking factor in total steps in factor

+                // of low volume steps to reach low volume level

+                this->mDuckingFactor -= (this->mDucking_lowVolume);

+            }

+            else {

+                this->mDuckingFactor = this->mDucking_lowVolume;

+            }

+        } else {

+            if ( this->mDuckingFactor < 1.0 ){

+                // FADE IN BG Track

+                // Increment ducking factor in total steps of

+                // low volume factor to reach orig.volume level

+                this->mDuckingFactor += (this->mDucking_lowVolume);

+            }

+            else{

+                this->mDuckingFactor = 1.0;

+            }

+        }

+    } // end if - mDucking_enable

+

+

+    // Mixing Logic

+

+    LOGV("VideoEditorBGAudioProcessing:: Out of Ducking analysis uiPCMsize\

+     %d %f %f", this->mDoDucking, this->mDuckingFactor,this->mBTVolLevel);

+

+    while(uiPCMsize-->0) {

+

+        M4OSA_Int32 temp;

+        // Set vol factor for BT and PT

+        *pBTMdata1 = (M4OSA_Int16)(*pBTMdata1*this->mBTVolLevel);

+        *pPTMdata2 = (M4OSA_Int16)(*pPTMdata2*this->mPTVolLevel);

+

+        // Mix the two samples

+        if ( this->mDoDucking) {

+

+            // Duck the BG track to ducking factor value before mixing

+            *pBTMdata1 = (M4OSA_Int16)((*pBTMdata1)*(this->mDuckingFactor));

+

+            // mix as normal case

+            *pBTMdata1 = (M4OSA_Int16)(*pBTMdata1 /2 + *pPTMdata2 /2);

+        }

+        else {

+

+            *pBTMdata1 = (M4OSA_Int16)((*pBTMdata1)*(this->mDuckingFactor));

+            *pBTMdata1 = (M4OSA_Int16)(*pBTMdata1 /2 + *pPTMdata2 /2);

+        }

+

+        if ( *pBTMdata1 < 0) {

+            temp = -(*pBTMdata1) * 2; // bring to original Amplitude level

+

+            if ( temp > 32767) {

+                *pBTMdata1 = -32766; // less then max allowed value

+            }

+            else{

+                *pBTMdata1 = (M4OSA_Int16)(-temp);

+            }

+        }

+        else {

+            temp = (*pBTMdata1) * 2; // bring to original Amplitude level

+            if ( temp > 32768) {

+                *pBTMdata1 = 32767; // less than max allowed value

+            }

+            else {

+                *pBTMdata1 = (M4OSA_Int16)temp;

+            }

+        }

+

+        pBTMdata1++;

+        pPTMdata2++;

+    }

+    //LOGV("VideoEditorBGAudioProcessing:: Copy final out ");

+    M4OSA_memcpy((M4OSA_MemAddr8)pMixedOutBuffer->m_dataAddress,

+     (M4OSA_MemAddr8)pBackgroundTrack->m_dataAddress,

+      pBackgroundTrack->m_bufferSize);

+

+    LOGV("VideoEditorBGAudioProcessing::lvProcessAudioMixNDuck EXIT");

+    return result;

+}

+

+VideoEditorBGAudioProcessing:: ~VideoEditorBGAudioProcessing() {

+

+ //free(VideoEditorBGAudioProcessing:: pTempBuffer);

+}

+

+M4OSA_Int32 VideoEditorBGAudioProcessing::calculateOutResampleBufSize() {

+

+    M4OSA_Int32 bufSize =0;

+

+    // This already takes care of channel count in mBTBuffer.m_bufferSize

+    bufSize = (this->mOutSampleRate/this->mInSampleRate)*this->mBTBuffer.m_bufferSize;

+

+    return bufSize;

+}

+

+void VideoEditorBGAudioProcessing ::veSetAudioProcessingParams(

+        veAudMixSettings gInputParams) {

+

+    LOGV("VideoEditorBGAudioProcessing:: ENTER lvSetAudioProcessingParams ");

+    this->mDucking_enable       = gInputParams.lvInDucking_enable;

+    this->mDucking_lowVolume    = gInputParams.lvInDucking_lowVolume;

+    this->mDucking_threshold    = gInputParams.lvInDucking_threshold;

+

+    this->mPTVolLevel           = gInputParams.lvPTVolLevel;

+    this->mBTVolLevel           = gInputParams.lvBTVolLevel ;

+

+    this->mBTChannelCount       = gInputParams.lvBTChannelCount;

+    this->mPTChannelCount       = gInputParams.lvPTChannelCount;

+

+    this->mBTFormat             = gInputParams.lvBTFormat;

+

+    this->mInSampleRate         = gInputParams.lvInSampleRate;

+    this->mOutSampleRate        = gInputParams.lvOutSampleRate;

+

+    this->mAudVolArrIndex       = 0;

+    this->mDoDucking            = 0;

+    this->mDuckingFactor        = 1.0; // default

+

+    LOGV("VideoEditorBGAudioProcessing::  ducking_enable 0x%x \

+     ducking_lowVolume %f  ducking_threshold %d  fPTVolLevel %f BTVolLevel %f",

+     this->mDucking_enable, this->mDucking_lowVolume, this->mDucking_threshold,

+     this->mPTVolLevel, this->mPTVolLevel);

+

+    // Following logc decides if SSRC support is needed for this mixing

+    if ( gInputParams.lvInSampleRate != gInputParams.lvOutSampleRate){

+        this->mIsSSRCneeded      = 1;

+    }else{

+        this->mIsSSRCneeded      = 0;

+    }

+    if( gInputParams.lvBTChannelCount != gInputParams.lvPTChannelCount){

+        if (gInputParams.lvBTChannelCount == 2){

+            this->mChannelConversion   = 1; // convert to MONO

+        }else{

+            this->mChannelConversion   = 2; // Convert to STEREO

+        }

+    }else{

+        this->mChannelConversion   = 0;

+    }

+    LOGV("VideoEditorBGAudioProcessing:: EXIT veSetAudioProcessingParams ");

+}

+

+

+M4OSA_Int32 VideoEditorBGAudioProcessing:: getDecibelSound(M4OSA_UInt32 value) {

+

+    int dbSound = 1;

+

+    if (value == 0) return 0;

+

+    if (value > 0x4000 && value <= 0x8000) // 32768

+        dbSound = 90;

+    else if (value > 0x2000 && value <= 0x4000) // 16384

+        dbSound = 84;

+    else if (value > 0x1000 && value <= 0x2000) // 8192

+        dbSound = 78;

+    else if (value > 0x0800 && value <= 0x1000) // 4028

+        dbSound = 72;

+    else if (value > 0x0400 && value <= 0x0800) // 2048

+        dbSound = 66;

+    else if (value > 0x0200 && value <= 0x0400) // 1024

+        dbSound = 60;

+    else if (value > 0x0100 && value <= 0x0200) // 512

+        dbSound = 54;

+    else if (value > 0x0080 && value <= 0x0100) // 256

+        dbSound = 48;

+    else if (value > 0x0040 && value <= 0x0080) // 128

+        dbSound = 42;

+    else if (value > 0x0020 && value <= 0x0040) // 64

+        dbSound = 36;

+    else if (value > 0x0010 && value <= 0x0020) // 32

+        dbSound = 30;

+    else if (value > 0x0008 && value <= 0x0010) //16

+        dbSound = 24;

+    else if (value > 0x0007 && value <= 0x0008) //8

+        dbSound = 24;

+    else if (value > 0x0003 && value <= 0x0007) // 4

+        dbSound = 18;

+    else if (value > 0x0001 && value <= 0x0003) //2

+        dbSound = 12;

+    else if (value > 0x000 && value <= 0x0001) // 1

+        dbSound = 6;

+    else

+        dbSound = 0;

+

+    return dbSound;

+}

+

+M4OSA_Bool VideoEditorBGAudioProcessing:: isThresholdBreached(

+        M4OSA_Int32* averageValue, M4OSA_Int32 storeCount,

+         M4OSA_Int32 thresholdValue) {

+

+    M4OSA_Bool result = 0;

+    int i;

+    int finalValue = 0;

+

+    for (i=0; i< storeCount;i++)

+        finalValue += averageValue[i];

+

+    finalValue = finalValue/storeCount;

+

+    //printf ("<%d > \t  ", finalValue);

+

+    if (finalValue > thresholdValue)

+        result = M4OSA_TRUE;

+    else

+        result = M4OSA_FALSE;

+

+    return result;

+}

+

+}//namespace android

diff --git a/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.h b/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.h
new file mode 100755
index 0000000..851a133
--- /dev/null
+++ b/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.h
@@ -0,0 +1,96 @@
+/*

+ * Copyright (C) 2011 NXP Software

+ * Copyright (C) 2011 The Android Open Source Project

+ *

+ * Licensed under the Apache License, Version 2.0 (the "License");

+ * you may not use this file except in compliance with the License.

+ * You may obtain a copy of the License at

+ *

+ *      http://www.apache.org/licenses/LICENSE-2.0

+ *

+ * Unless required by applicable law or agreed to in writing, software

+ * distributed under the License is distributed on an "AS IS" BASIS,

+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * See the License for the specific language governing permissions and

+ * limitations under the License.

+ */

+

+#include "M4OSA_Error.h"

+#include "M4OSA_Types.h"

+#include "M4OSA_Memory.h"

+#include "M4OSA_Export.h"

+#include "M4OSA_CoreID.h"

+

+namespace android{

+

+#define WINDOW_SIZE 10

+

+enum veAudioFormat {MONO_16_BIT, STEREO_16_BIT};

+

+

+typedef struct {

+    M4OSA_UInt16*   m_dataAddress; // Android SRC needs a Int16 pointer

+    M4OSA_UInt32    m_bufferSize;

+} M4AM_Buffer;

+

+// Following struct will be used by app to supply the PT and BT properties

+// along with ducking values

+typedef struct {

+    M4OSA_Int32 lvInSampleRate; // Sampling audio freq (8000,16000 or more )

+    M4OSA_Int32 lvOutSampleRate; //Sampling audio freq (8000,16000 or more )

+    veAudioFormat lvBTFormat;

+

+    M4OSA_Int32 lvInDucking_threshold;

+    M4OSA_Float lvInDucking_lowVolume;

+    M4OSA_Bool lvInDucking_enable;

+    M4OSA_Float lvPTVolLevel;

+    M4OSA_Float lvBTVolLevel;

+    M4OSA_Int32 lvBTChannelCount;

+    M4OSA_Int32 lvPTChannelCount;

+} veAudMixSettings;

+

+// This class is defined to get SF SRC access

+class VideoEditorBGAudioProcessing {

+public:

+    VideoEditorBGAudioProcessing();

+    void veSetAudioProcessingParams(veAudMixSettings mixParams);

+    M4OSA_Int32 veProcessAudioMixNDuck(void* , void *, void *);

+

+protected:

+    ~VideoEditorBGAudioProcessing();

+

+private:

+    M4OSA_Int32 mInSampleRate;

+    M4OSA_Int32 mOutSampleRate;

+    veAudioFormat mBTFormat;

+

+    M4OSA_Bool mIsSSRCneeded;

+    M4OSA_Int32 mBTChannelCount;

+    M4OSA_Int32 mPTChannelCount;

+    M4OSA_UInt8 mChannelConversion;

+

+    M4OSA_UInt32 mDucking_threshold;

+    M4OSA_Float mDucking_lowVolume;

+    M4OSA_Float mDuckingFactor ;

+    M4OSA_Bool mDucking_enable;

+    M4OSA_Int32 mAudioVolumeArray[WINDOW_SIZE];

+    M4OSA_Int32 mAudVolArrIndex;

+    M4OSA_Bool mDoDucking;

+    M4OSA_Float mPTVolLevel;

+    M4OSA_Float mBTVolLevel;

+

+    M4AM_Buffer mPTBuffer;

+    M4AM_Buffer mBTBuffer;

+    M4AM_Buffer mOutMixBuffer;

+    M4OSA_Int16 *mTempBuffer;

+    M4OSA_Int32 mTempFrameCount;

+

+    M4OSA_Int32 getDecibelSound(M4OSA_UInt32 value);

+    M4OSA_Bool  isThresholdBreached(M4OSA_Int32* averageValue,

+     M4OSA_Int32 storeCount, M4OSA_Int32 thresholdValue);

+

+    // This returns the size of buffer which needs to allocated

+    // before resampling is called

+    M4OSA_Int32 calculateOutResampleBufSize();

+};

+} // namespace android

diff --git a/libvideoeditor/lvpp/VideoEditorPlayer.cpp b/libvideoeditor/lvpp/VideoEditorPlayer.cpp
new file mode 100755
index 0000000..46b18d9
--- /dev/null
+++ b/libvideoeditor/lvpp/VideoEditorPlayer.cpp
@@ -0,0 +1,542 @@
+/*

+ * Copyright (C) 2011 NXP Software

+ * Copyright (C) 2011 The Android Open Source Project

+ *

+ * Licensed under the Apache License, Version 2.0 (the "License");

+ * you may not use this file except in compliance with the License.

+ * You may obtain a copy of the License at

+ *

+ *      http://www.apache.org/licenses/LICENSE-2.0

+ *

+ * Unless required by applicable law or agreed to in writing, software

+ * distributed under the License is distributed on an "AS IS" BASIS,

+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * See the License for the specific language governing permissions and

+ * limitations under the License.

+ */

+

+#define LOG_NDEBUG 1

+#define LOG_TAG "VideoEditorPlayer"

+#include <utils/Log.h>

+

+#include "VideoEditorPlayer.h"

+#include "PreviewPlayer.h"

+

+#include <media/Metadata.h>

+#include <media/stagefright/MediaExtractor.h>

+

+namespace android {

+

+VideoEditorPlayer::VideoEditorPlayer()

+    : mPlayer(new PreviewPlayer) {

+

+    LOGV("VideoEditorPlayer");

+    mPlayer->setListener(this);

+}

+

+VideoEditorPlayer::~VideoEditorPlayer() {

+    LOGV("~VideoEditorPlayer");

+

+    reset();

+    mVeAudioSink.clear();

+

+    delete mPlayer;

+    mPlayer = NULL;

+}

+

+status_t VideoEditorPlayer::initCheck() {

+    LOGV("initCheck");

+    return OK;

+}

+

+status_t VideoEditorPlayer::setDataSource(

+        const char *url, const KeyedVector<String8, String8> *headers) {

+    LOGI("setDataSource('%s')", url);

+

+    mVeAudioSink = new VeAudioOutput();

+    mPlayer->setAudioSink(mVeAudioSink);

+

+    return mPlayer->setDataSource(url, headers);

+}

+

+//We donot use this in preview, dummy implimentation as this is pure virtual

+status_t VideoEditorPlayer::setDataSource(int fd, int64_t offset,

+    int64_t length) {

+    LOGE("setDataSource(%d, %lld, %lld) Not supported", fd, offset, length);

+    return (!OK);

+}

+

+status_t VideoEditorPlayer::setVideoISurface(const sp<ISurface> &surface) {

+    LOGV("setVideoISurface");

+

+    mPlayer->setISurface(surface);

+    return OK;

+}

+

+status_t VideoEditorPlayer::setVideoSurface(const sp<Surface> &surface) {

+    LOGV("setVideoSurface");

+

+    mPlayer->setSurface(surface);

+    return OK;

+}

+

+status_t VideoEditorPlayer::prepare() {

+    LOGV("prepare");

+    return mPlayer->prepare();

+}

+

+status_t VideoEditorPlayer::prepareAsync() {

+    return mPlayer->prepareAsync();

+}

+

+status_t VideoEditorPlayer::start() {

+    LOGV("start");

+    return mPlayer->play();

+}

+

+status_t VideoEditorPlayer::stop() {

+    LOGV("stop");

+    return pause();

+}

+

+status_t VideoEditorPlayer::pause() {

+    LOGV("pause");

+    return mPlayer->pause();

+}

+

+bool VideoEditorPlayer::isPlaying() {

+    LOGV("isPlaying");

+    return mPlayer->isPlaying();

+}

+

+status_t VideoEditorPlayer::seekTo(int msec) {

+    LOGV("seekTo");

+    status_t err = mPlayer->seekTo((int64_t)msec * 1000);

+    return err;

+}

+

+status_t VideoEditorPlayer::getCurrentPosition(int *msec) {

+    LOGV("getCurrentPosition");

+    int64_t positionUs;

+    status_t err = mPlayer->getPosition(&positionUs);

+

+    if (err != OK) {

+        return err;

+    }

+

+    *msec = (positionUs + 500) / 1000;

+    return OK;

+}

+

+status_t VideoEditorPlayer::getDuration(int *msec) {

+    LOGV("getDuration");

+

+    int64_t durationUs;

+    status_t err = mPlayer->getDuration(&durationUs);

+

+    if (err != OK) {

+        *msec = 0;

+        return OK;

+    }

+

+    *msec = (durationUs + 500) / 1000;

+    return OK;

+}

+

+status_t VideoEditorPlayer::reset() {

+    LOGV("reset");

+    mPlayer->reset();

+    return OK;

+}

+

+status_t VideoEditorPlayer::setLooping(int loop) {

+    LOGV("setLooping");

+    return mPlayer->setLooping(loop);

+}

+

+player_type VideoEditorPlayer::playerType() {

+    LOGV("playerType");

+    return STAGEFRIGHT_PLAYER;

+}

+

+status_t VideoEditorPlayer::suspend() {

+    LOGV("suspend");

+    return mPlayer->suspend();

+}

+

+status_t VideoEditorPlayer::resume() {

+    LOGV("resume");

+    return mPlayer->resume();

+}

+

+status_t VideoEditorPlayer::invoke(const Parcel &request, Parcel *reply) {

+    return INVALID_OPERATION;

+}

+

+void VideoEditorPlayer::setAudioSink(const sp<AudioSink> &audioSink) {

+    MediaPlayerInterface::setAudioSink(audioSink);

+

+    mPlayer->setAudioSink(audioSink);

+}

+

+status_t VideoEditorPlayer::getMetadata(

+        const media::Metadata::Filter& ids, Parcel *records) {

+    using media::Metadata;

+

+    uint32_t flags = mPlayer->flags();

+

+    Metadata metadata(records);

+

+    metadata.appendBool(

+            Metadata::kPauseAvailable,

+            flags & MediaExtractor::CAN_PAUSE);

+

+    metadata.appendBool(

+            Metadata::kSeekBackwardAvailable,

+            flags & MediaExtractor::CAN_SEEK_BACKWARD);

+

+    metadata.appendBool(

+            Metadata::kSeekForwardAvailable,

+            flags & MediaExtractor::CAN_SEEK_FORWARD);

+

+    metadata.appendBool(

+            Metadata::kSeekAvailable,

+            flags & MediaExtractor::CAN_SEEK);

+

+    return OK;

+}

+

+status_t VideoEditorPlayer::loadEffectsSettings(

+    M4VSS3GPP_EffectSettings* pEffectSettings, int nEffects) {

+    LOGV("loadEffectsSettings");

+    return mPlayer->loadEffectsSettings(pEffectSettings, nEffects);

+}

+

+status_t VideoEditorPlayer::loadAudioMixSettings(

+    M4xVSS_AudioMixingSettings* pAudioMixSettings) {

+    LOGV("VideoEditorPlayer: loadAudioMixSettings");

+    return mPlayer->loadAudioMixSettings(pAudioMixSettings);

+}

+

+status_t VideoEditorPlayer::setAudioMixPCMFileHandle(

+    M4OSA_Context pAudioMixPCMFileHandle) {

+

+    LOGV("VideoEditorPlayer: loadAudioMixSettings");

+    return mPlayer->setAudioMixPCMFileHandle(pAudioMixPCMFileHandle);

+}

+

+status_t VideoEditorPlayer::setAudioMixStoryBoardParam(

+    M4OSA_UInt32 audioMixStoryBoardTS,

+    M4OSA_UInt32 currentMediaBeginCutTime,

+    M4OSA_UInt32 primaryTrackVolValue) {

+

+    LOGV("VideoEditorPlayer: loadAudioMixSettings");

+    return mPlayer->setAudioMixStoryBoardParam(audioMixStoryBoardTS,

+     currentMediaBeginCutTime, primaryTrackVolValue);

+}

+

+status_t VideoEditorPlayer::setPlaybackBeginTime(uint32_t msec) {

+    LOGV("setPlaybackBeginTime");

+    return mPlayer->setPlaybackBeginTime(msec);

+}

+

+status_t VideoEditorPlayer::setPlaybackEndTime(uint32_t msec) {

+    LOGV("setPlaybackEndTime");

+    return mPlayer->setPlaybackEndTime(msec);

+}

+

+status_t VideoEditorPlayer::setStoryboardStartTime(uint32_t msec) {

+    LOGV("setStoryboardStartTime");

+    return mPlayer->setStoryboardStartTime(msec);

+}

+

+status_t VideoEditorPlayer::setProgressCallbackInterval(uint32_t cbInterval) {

+    LOGV("setProgressCallbackInterval");

+    return mPlayer->setProgressCallbackInterval(cbInterval);

+}

+

+status_t VideoEditorPlayer::setMediaRenderingMode(

+    M4xVSS_MediaRendering mode,

+    M4VIDEOEDITING_VideoFrameSize outputVideoSize) {

+

+    LOGV("setMediaRenderingMode");

+    return mPlayer->setMediaRenderingMode(mode, outputVideoSize);

+}

+

+status_t VideoEditorPlayer::resetJniCallbackTimeStamp() {

+    LOGV("resetJniCallbackTimeStamp");

+    return mPlayer->resetJniCallbackTimeStamp();

+}

+

+status_t VideoEditorPlayer::setImageClipProperties(

+    uint32_t width, uint32_t height) {

+    return mPlayer->setImageClipProperties(width, height);

+}

+

+status_t VideoEditorPlayer::readFirstVideoFrame() {

+    return mPlayer->readFirstVideoFrame();

+}

+

+/* Implementation of AudioSink interface */

+#undef LOG_TAG

+#define LOG_TAG "VeAudioSink"

+

+int VideoEditorPlayer::VeAudioOutput::mMinBufferCount = 4;

+bool VideoEditorPlayer::VeAudioOutput::mIsOnEmulator = false;

+

+VideoEditorPlayer::VeAudioOutput::VeAudioOutput()

+    : mCallback(NULL),

+      mCallbackCookie(NULL) {

+    mTrack = 0;

+    mStreamType = AudioSystem::MUSIC;

+    mLeftVolume = 1.0;

+    mRightVolume = 1.0;

+    mLatency = 0;

+    mMsecsPerFrame = 0;

+    mNumFramesWritten = 0;

+    setMinBufferCount();

+}

+

+VideoEditorPlayer::VeAudioOutput::~VeAudioOutput() {

+    close();

+}

+

+void VideoEditorPlayer::VeAudioOutput::setMinBufferCount() {

+

+    mIsOnEmulator = false;

+    mMinBufferCount =12;

+}

+

+bool VideoEditorPlayer::VeAudioOutput::isOnEmulator() {

+

+    setMinBufferCount();

+    return mIsOnEmulator;

+}

+

+int VideoEditorPlayer::VeAudioOutput::getMinBufferCount() {

+

+    setMinBufferCount();

+    return mMinBufferCount;

+}

+

+ssize_t VideoEditorPlayer::VeAudioOutput::bufferSize() const {

+

+    if (mTrack == 0) return NO_INIT;

+    return mTrack->frameCount() * frameSize();

+}

+

+ssize_t VideoEditorPlayer::VeAudioOutput::frameCount() const {

+

+    if (mTrack == 0) return NO_INIT;

+    return mTrack->frameCount();

+}

+

+ssize_t VideoEditorPlayer::VeAudioOutput::channelCount() const

+{

+    if (mTrack == 0) return NO_INIT;

+    return mTrack->channelCount();

+}

+

+ssize_t VideoEditorPlayer::VeAudioOutput::frameSize() const

+{

+    if (mTrack == 0) return NO_INIT;

+    return mTrack->frameSize();

+}

+

+uint32_t VideoEditorPlayer::VeAudioOutput::latency () const

+{

+    return mLatency;

+}

+

+float VideoEditorPlayer::VeAudioOutput::msecsPerFrame() const

+{

+    return mMsecsPerFrame;

+}

+

+status_t VideoEditorPlayer::VeAudioOutput::getPosition(uint32_t *position) {

+

+    if (mTrack == 0) return NO_INIT;

+    return mTrack->getPosition(position);

+}

+

+status_t VideoEditorPlayer::VeAudioOutput::open(

+        uint32_t sampleRate, int channelCount, int format, int bufferCount,

+        AudioCallback cb, void *cookie) {

+

+    mCallback = cb;

+    mCallbackCookie = cookie;

+

+    // Check argument "bufferCount" against the mininum buffer count

+    if (bufferCount < mMinBufferCount) {

+        LOGD("bufferCount (%d) is too small and increased to %d",

+         bufferCount, mMinBufferCount);

+        bufferCount = mMinBufferCount;

+

+    }

+    LOGV("open(%u, %d, %d, %d)", sampleRate, channelCount, format, bufferCount);

+    if (mTrack) close();

+    int afSampleRate;

+    int afFrameCount;

+    int frameCount;

+

+    if (AudioSystem::getOutputFrameCount(&afFrameCount, mStreamType) !=

+     NO_ERROR) {

+        return NO_INIT;

+    }

+    if (AudioSystem::getOutputSamplingRate(&afSampleRate, mStreamType) !=

+     NO_ERROR) {

+        return NO_INIT;

+    }

+

+    frameCount = (sampleRate*afFrameCount*bufferCount)/afSampleRate;

+

+    AudioTrack *t;

+    if (mCallback != NULL) {

+        t = new AudioTrack(

+                mStreamType,

+                sampleRate,

+                format,

+                (channelCount == 2) ?

+                 AudioSystem::CHANNEL_OUT_STEREO : AudioSystem::CHANNEL_OUT_MONO,

+                frameCount,

+                0 /* flags */,

+                CallbackWrapper,

+                this);

+    } else {

+        t = new AudioTrack(

+                mStreamType,

+                sampleRate,

+                format,

+                (channelCount == 2) ?

+                 AudioSystem::CHANNEL_OUT_STEREO : AudioSystem::CHANNEL_OUT_MONO,

+                frameCount);

+    }

+

+    if ((t == 0) || (t->initCheck() != NO_ERROR)) {

+        LOGE("Unable to create audio track");

+        delete t;

+        return NO_INIT;

+    }

+

+    LOGV("setVolume");

+    t->setVolume(mLeftVolume, mRightVolume);

+    mMsecsPerFrame = 1.e3 / (float) sampleRate;

+    mLatency = t->latency();

+    mTrack = t;

+    return NO_ERROR;

+}

+

+void VideoEditorPlayer::VeAudioOutput::start() {

+

+    LOGV("start");

+    if (mTrack) {

+        mTrack->setVolume(mLeftVolume, mRightVolume);

+        mTrack->start();

+        mTrack->getPosition(&mNumFramesWritten);

+    }

+}

+

+void VideoEditorPlayer::VeAudioOutput::snoopWrite(

+    const void* buffer, size_t size) {

+    // Visualization buffers not supported

+    return;

+

+}

+

+ssize_t VideoEditorPlayer::VeAudioOutput::write(

+     const void* buffer, size_t size) {

+

+    LOG_FATAL_IF(mCallback != NULL, "Don't call write if supplying a callback.");

+

+    //LOGV("write(%p, %u)", buffer, size);

+    if (mTrack) {

+        snoopWrite(buffer, size);

+        ssize_t ret = mTrack->write(buffer, size);

+        mNumFramesWritten += ret / 4; // assume 16 bit stereo

+        return ret;

+    }

+    return NO_INIT;

+}

+

+void VideoEditorPlayer::VeAudioOutput::stop() {

+

+    LOGV("stop");

+    if (mTrack) mTrack->stop();

+}

+

+void VideoEditorPlayer::VeAudioOutput::flush() {

+

+    LOGV("flush");

+    if (mTrack) mTrack->flush();

+}

+

+void VideoEditorPlayer::VeAudioOutput::pause() {

+

+    LOGV("VeAudioOutput::pause");

+    if (mTrack) mTrack->pause();

+}

+

+void VideoEditorPlayer::VeAudioOutput::close() {

+

+    LOGV("close");

+    delete mTrack;

+    mTrack = 0;

+}

+

+void VideoEditorPlayer::VeAudioOutput::setVolume(float left, float right) {

+

+    LOGV("setVolume(%f, %f)", left, right);

+    mLeftVolume = left;

+    mRightVolume = right;

+    if (mTrack) {

+        mTrack->setVolume(left, right);

+    }

+}

+

+// static

+void VideoEditorPlayer::VeAudioOutput::CallbackWrapper(

+        int event, void *cookie, void *info) {

+    //LOGV("VeAudioOutput::callbackwrapper");

+    if (event != AudioTrack::EVENT_MORE_DATA) {

+        return;

+    }

+

+    VeAudioOutput *me = (VeAudioOutput *)cookie;

+    AudioTrack::Buffer *buffer = (AudioTrack::Buffer *)info;

+

+    size_t actualSize = (*me->mCallback)(

+            me, buffer->raw, buffer->size, me->mCallbackCookie);

+

+    buffer->size = actualSize;

+

+    if (actualSize > 0) {

+        me->snoopWrite(buffer->raw, actualSize);

+    }

+}

+

+status_t VideoEditorPlayer::VeAudioOutput::dump(int fd, const Vector<String16>& args) const

+{

+    const size_t SIZE = 256;

+    char buffer[SIZE];

+    String8 result;

+

+    result.append(" VeAudioOutput\n");

+    snprintf(buffer, SIZE-1, "  stream type(%d), left - right volume(%f, %f)\n",

+            mStreamType, mLeftVolume, mRightVolume);

+    result.append(buffer);

+    snprintf(buffer, SIZE-1, "  msec per frame(%f), latency (%d)\n",

+            mMsecsPerFrame, mLatency);

+    result.append(buffer);

+    ::write(fd, result.string(), result.size());

+    if (mTrack != 0) {

+        mTrack->dump(fd, args);

+    }

+    return NO_ERROR;

+}

+

+int VideoEditorPlayer::VeAudioOutput::getSessionId() {

+

+    return mSessionId;

+}

+

+}  // namespace android

diff --git a/libvideoeditor/lvpp/VideoEditorPlayer.h b/libvideoeditor/lvpp/VideoEditorPlayer.h
new file mode 100755
index 0000000..47d174d
--- /dev/null
+++ b/libvideoeditor/lvpp/VideoEditorPlayer.h
@@ -0,0 +1,157 @@
+/*

+ * Copyright (C) 2011 NXP Software

+ * Copyright (C) 2011 The Android Open Source Project

+ *

+ * Licensed under the Apache License, Version 2.0 (the "License");

+ * you may not use this file except in compliance with the License.

+ * You may obtain a copy of the License at

+ *

+ *      http://www.apache.org/licenses/LICENSE-2.0

+ *

+ * Unless required by applicable law or agreed to in writing, software

+ * distributed under the License is distributed on an "AS IS" BASIS,

+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * See the License for the specific language governing permissions and

+ * limitations under the License.

+ */

+

+#ifndef ANDROID_VIDEOEDITOR_PLAYER_H

+#define ANDROID_VIDEOEDITOR_PLAYER_H

+

+#include <media/MediaPlayerInterface.h>

+#include "AudioTrack.h"

+#include "M4xVSS_API.h"

+#include "VideoEditorMain.h"

+#include "VideoEditorTools.h"

+

+

+namespace android {

+

+struct PreviewPlayer;

+

+class VideoEditorPlayer : public MediaPlayerInterface {

+

+    class VeAudioOutput: public MediaPlayerBase::AudioSink

+    {

+    public:

+                                VeAudioOutput();

+        virtual                 ~VeAudioOutput();

+

+        virtual bool            ready() const { return mTrack != NULL; }

+        virtual bool            realtime() const { return true; }

+        virtual ssize_t         bufferSize() const;

+        virtual ssize_t         frameCount() const;

+        virtual ssize_t         channelCount() const;

+        virtual ssize_t         frameSize() const;

+        virtual uint32_t        latency() const;

+        virtual float           msecsPerFrame() const;

+        virtual status_t        getPosition(uint32_t *position);

+        virtual int             getSessionId();

+

+        virtual status_t        open(

+                uint32_t sampleRate, int channelCount,

+                int format, int bufferCount,

+                AudioCallback cb, void *cookie);

+

+        virtual void            start();

+        virtual ssize_t         write(const void* buffer, size_t size);

+        virtual void            stop();

+        virtual void            flush();

+        virtual void            pause();

+        virtual void            close();

+        void setAudioStreamType(int streamType) { mStreamType = streamType; }

+                void            setVolume(float left, float right);

+        virtual status_t        dump(int fd,const Vector<String16>& args) const;

+

+        static bool             isOnEmulator();

+        static int              getMinBufferCount();

+    private:

+        static void             setMinBufferCount();

+        static void             CallbackWrapper(

+                int event, void *me, void *info);

+

+        AudioTrack*             mTrack;

+        AudioCallback           mCallback;

+        void *                  mCallbackCookie;

+        int                     mStreamType;

+        float                   mLeftVolume;

+        float                   mRightVolume;

+        float                   mMsecsPerFrame;

+        uint32_t                mLatency;

+        int                     mSessionId;

+        static bool             mIsOnEmulator;

+        static int              mMinBufferCount; // 12 for emulator; otherwise 4

+

+        public:

+        uint32_t                mNumFramesWritten;

+        void                    snoopWrite(const void*, size_t);

+    };

+

+public:

+    VideoEditorPlayer();

+    virtual ~VideoEditorPlayer();

+

+    virtual status_t initCheck();

+

+    virtual status_t setDataSource(

+            const char *url, const KeyedVector<String8, String8> *headers);

+

+    virtual status_t setDataSource(int fd, int64_t offset, int64_t length);

+    virtual status_t setVideoISurface(const sp<ISurface> &surface);

+    virtual status_t setVideoSurface(const sp<Surface> &surface);

+    virtual status_t prepare();

+    virtual status_t prepareAsync();

+    virtual status_t start();

+    virtual status_t stop();

+    virtual status_t pause();

+    virtual bool isPlaying();

+    virtual status_t seekTo(int msec);

+    virtual status_t getCurrentPosition(int *msec);

+    virtual status_t getDuration(int *msec);

+    virtual status_t reset();

+    virtual status_t setLooping(int loop);

+    virtual player_type playerType();

+    virtual status_t invoke(const Parcel &request, Parcel *reply);

+    virtual void setAudioSink(const sp<AudioSink> &audioSink);

+    virtual status_t suspend();

+    virtual status_t resume();

+

+    virtual status_t getMetadata(

+                        const media::Metadata::Filter& ids, Parcel *records);

+

+    virtual status_t loadEffectsSettings(

+                         M4VSS3GPP_EffectSettings* pEffectSettings, int nEffects);

+

+    virtual status_t loadAudioMixSettings(

+                         M4xVSS_AudioMixingSettings* pAudioMixSettings);

+

+    virtual status_t setAudioMixPCMFileHandle(

+                         M4OSA_Context pAudioMixPCMFileHandle);

+

+    virtual status_t setAudioMixStoryBoardParam(

+                         M4OSA_UInt32 x, M4OSA_UInt32 y, M4OSA_UInt32 z);

+

+    virtual status_t setPlaybackBeginTime(uint32_t msec);

+    virtual status_t setPlaybackEndTime(uint32_t msec);

+    virtual status_t setStoryboardStartTime(uint32_t msec);

+    virtual status_t setProgressCallbackInterval(uint32_t cbInterval);

+

+    virtual status_t setMediaRenderingMode(M4xVSS_MediaRendering mode,

+                          M4VIDEOEDITING_VideoFrameSize outputVideoSize);

+

+    virtual status_t resetJniCallbackTimeStamp();

+    virtual status_t setImageClipProperties(uint32_t width, uint32_t height);

+    virtual status_t readFirstVideoFrame();

+

+

+private:

+    PreviewPlayer       *mPlayer;

+    sp<VeAudioOutput>    mVeAudioSink;

+

+    VideoEditorPlayer(const VideoEditorPlayer &);

+    VideoEditorPlayer &operator=(const VideoEditorPlayer &);

+};

+

+}  // namespace android

+

+#endif  // ANDROID_VIDEOEDITOR_PLAYER_H

diff --git a/libvideoeditor/lvpp/VideoEditorPreviewController.cpp b/libvideoeditor/lvpp/VideoEditorPreviewController.cpp
new file mode 100755
index 0000000..d862255
--- /dev/null
+++ b/libvideoeditor/lvpp/VideoEditorPreviewController.cpp
@@ -0,0 +1,1303 @@
+/*

+ * Copyright (C) 2011 NXP Software

+ * Copyright (C) 2011 The Android Open Source Project

+ *

+ * Licensed under the Apache License, Version 2.0 (the "License");

+ * you may not use this file except in compliance with the License.

+ * You may obtain a copy of the License at

+ *

+ *      http://www.apache.org/licenses/LICENSE-2.0

+ *

+ * Unless required by applicable law or agreed to in writing, software

+ * distributed under the License is distributed on an "AS IS" BASIS,

+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * See the License for the specific language governing permissions and

+ * limitations under the License.

+ */

+

+#define LOG_NDEBUG 1

+#define LOG_TAG "VideoEditorPreviewController"

+#include "VideoEditorPreviewController.h"

+

+namespace android {

+

+#define PREVIEW_THREAD_STACK_SIZE                           (65536)

+

+VideoEditorPreviewController::VideoEditorPreviewController()

+    : mCurrentPlayer(0),

+      mThreadContext(NULL),

+      mPlayerState(VePlayerIdle),

+      mClipList(NULL),

+      mNumberClipsInStoryBoard(0),

+      mNumberClipsToPreview(0),

+      mStartingClipIndex(0),

+      mPreviewLooping(M4OSA_FALSE),

+      mCallBackAfterFrameCnt(0),

+      mEffectsSettings(NULL),

+      mNumberEffects(0),

+      mCurrentClipNumber(-1),

+      mClipTotalDuration(0),

+      mCurrentVideoEffect(VIDEO_EFFECT_NONE),

+      mTarget(NULL),

+      mJniCookie(NULL),

+      mJniCallback(NULL),

+      mBackgroundAudioSetting(NULL),

+      mAudioMixPCMFileHandle(NULL),

+      mVideoStoryBoardTimeMsUptoFirstPreviewClip(0),

+      mCurrentPlayedDuration(0),

+      mCurrentClipDuration(0),

+      mOutputVideoWidth(0),

+      mOutputVideoHeight(0),

+      bStopThreadInProgress(false),

+      mSemThreadWait(NULL) {

+    LOGV("VideoEditorPreviewController");

+    mRenderingMode = M4xVSS_kBlackBorders;

+    mIsFiftiesEffectStarted = false;

+

+    for (int i=0; i<NBPLAYER_INSTANCES; i++) {

+        mVePlayer[i] = NULL;

+    }

+}

+

+VideoEditorPreviewController::~VideoEditorPreviewController() {

+    M4OSA_UInt32 i = 0;

+    M4OSA_ERR err = M4NO_ERROR;

+    LOGV("~VideoEditorPreviewController");

+

+    // Stop the thread if its still running

+    if(mThreadContext != NULL) {

+        err = M4OSA_threadSyncStop(mThreadContext);

+        if(err != M4NO_ERROR) {

+            LOGV("~VideoEditorPreviewController: error 0x%x \

+            in trying to stop thread", err);

+            // Continue even if error

+        }

+

+        err = M4OSA_threadSyncClose(mThreadContext);

+        if(err != M4NO_ERROR) {

+            LOGE("~VideoEditorPreviewController: error 0x%x \

+            in trying to close thread", err);

+            // Continue even if error

+        }

+

+        mThreadContext = NULL;

+    }

+

+    for (int playerInst=0; playerInst<NBPLAYER_INSTANCES;

+         playerInst++) {

+        if(mVePlayer[playerInst] != NULL) {

+            LOGV("clearing mVePlayer %d", playerInst);

+            mVePlayer[playerInst].clear();

+        }

+    }

+

+    if(mClipList != NULL) {

+        // Clean up

+        for(i=0;i<mNumberClipsInStoryBoard;i++)

+        {

+            if(mClipList[i]->pFile != NULL) {

+                M4OSA_free((M4OSA_MemAddr32)mClipList[i]->pFile);

+                mClipList[i]->pFile = NULL;

+            }

+

+            M4OSA_free((M4OSA_MemAddr32)mClipList[i]);

+        }

+        M4OSA_free((M4OSA_MemAddr32)mClipList);

+        mClipList = NULL;

+    }

+

+    if(mEffectsSettings) {

+        for(i=0;i<mNumberEffects;i++) {

+            if(mEffectsSettings[i].xVSS.pFramingBuffer != NULL) {

+                M4OSA_free(

+                (M4OSA_MemAddr32)mEffectsSettings[i].xVSS.pFramingBuffer->pac_data);

+

+                M4OSA_free(

+                (M4OSA_MemAddr32)mEffectsSettings[i].xVSS.pFramingBuffer);

+

+                mEffectsSettings[i].xVSS.pFramingBuffer = NULL;

+            }

+        }

+        M4OSA_free((M4OSA_MemAddr32)mEffectsSettings);

+        mEffectsSettings = NULL;

+    }

+

+    if(mTarget != NULL) {

+        delete mTarget;

+        mTarget = NULL;

+    }

+

+    LOGV("~VideoEditorPreviewController returns");

+}

+

+M4OSA_ERR VideoEditorPreviewController::loadEditSettings(

+    M4VSS3GPP_EditSettings* pSettings,M4xVSS_AudioMixingSettings* bgmSettings) {

+

+    M4OSA_UInt32 i = 0, iClipDuration = 0, rgbSize = 0;

+    M4VIFI_UInt8 *tmp = NULL;

+    M4OSA_ERR err = M4NO_ERROR;

+

+    LOGV("loadEditSettings");

+    LOGV("loadEditSettings Channels = %d, sampling Freq %d",

+          bgmSettings->uiNbChannels, bgmSettings->uiSamplingFrequency  );

+          bgmSettings->uiSamplingFrequency = 32000;

+

+    LOGV("loadEditSettings Channels = %d, sampling Freq %d",

+          bgmSettings->uiNbChannels, bgmSettings->uiSamplingFrequency  );

+    Mutex::Autolock autoLock(mLock);

+

+    // Clean up any previous Edit settings before loading new ones

+    mCurrentVideoEffect = VIDEO_EFFECT_NONE;

+

+    if(mAudioMixPCMFileHandle) {

+        err = M4OSA_fileReadClose (mAudioMixPCMFileHandle);

+        mAudioMixPCMFileHandle = M4OSA_NULL;

+    }

+

+    if(mBackgroundAudioSetting != NULL) {

+        M4OSA_free((M4OSA_MemAddr32)mBackgroundAudioSetting);

+        mBackgroundAudioSetting = NULL;

+    }

+

+    if(mClipList != NULL) {

+        // Clean up

+        for(i=0;i<mNumberClipsInStoryBoard;i++)

+        {

+            if(mClipList[i]->pFile != NULL) {

+                M4OSA_free((M4OSA_MemAddr32)mClipList[i]->pFile);

+                mClipList[i]->pFile = NULL;

+            }

+

+            M4OSA_free((M4OSA_MemAddr32)mClipList[i]);

+        }

+        M4OSA_free((M4OSA_MemAddr32)mClipList);

+        mClipList = NULL;

+    }

+

+    if(mEffectsSettings) {

+        for(i=0;i<mNumberEffects;i++) {

+            if(mEffectsSettings[i].xVSS.pFramingBuffer != NULL) {

+                M4OSA_free(

+                (M4OSA_MemAddr32)mEffectsSettings[i].xVSS.pFramingBuffer->pac_data);

+

+                M4OSA_free(

+                (M4OSA_MemAddr32)mEffectsSettings[i].xVSS.pFramingBuffer);

+

+                mEffectsSettings[i].xVSS.pFramingBuffer = NULL;

+            }

+        }

+        M4OSA_free((M4OSA_MemAddr32)mEffectsSettings);

+        mEffectsSettings = NULL;

+    }

+

+    if(mClipList == NULL) {

+        mNumberClipsInStoryBoard = pSettings->uiClipNumber;

+        LOGV("loadEditSettings: # of Clips = %d", mNumberClipsInStoryBoard);

+

+        mClipList = (M4VSS3GPP_ClipSettings**)M4OSA_malloc(

+         sizeof(M4VSS3GPP_ClipSettings*)*pSettings->uiClipNumber, M4VS,

+         (M4OSA_Char*)"LvPP, copy of pClipList");

+

+        if(NULL == mClipList) {

+            LOGE("loadEditSettings: Malloc error");

+            return M4ERR_ALLOC;

+        }

+        M4OSA_memset((M4OSA_MemAddr8)mClipList,

+         sizeof(M4VSS3GPP_ClipSettings*)*pSettings->uiClipNumber, 0);

+

+        for(i=0;i<pSettings->uiClipNumber;i++) {

+

+            // Allocate current clip

+            mClipList[i] =

+             (M4VSS3GPP_ClipSettings*)M4OSA_malloc(

+              sizeof(M4VSS3GPP_ClipSettings),M4VS,(M4OSA_Char*)"clip settings");

+

+            if(mClipList[i] == NULL) {

+

+                LOGE("loadEditSettings: Allocation error for mClipList[%d]", i);

+                return M4ERR_ALLOC;

+            }

+            // Copy plain structure

+            M4OSA_memcpy((M4OSA_MemAddr8)mClipList[i],

+             (M4OSA_MemAddr8)pSettings->pClipList[i],

+             sizeof(M4VSS3GPP_ClipSettings));

+

+            if(NULL != pSettings->pClipList[i]->pFile) {

+                mClipList[i]->pFile = (M4OSA_Char*)M4OSA_malloc(

+                pSettings->pClipList[i]->filePathSize, M4VS,

+                (M4OSA_Char*)"pClipSettingsDest->pFile");

+

+                if(NULL == mClipList[i]->pFile)

+                {

+                    LOGE("loadEditSettings : ERROR allocating filename");

+                    return M4ERR_ALLOC;

+                }

+

+                M4OSA_memcpy((M4OSA_MemAddr8)mClipList[i]->pFile,

+                 (M4OSA_MemAddr8)pSettings->pClipList[i]->pFile,

+                 pSettings->pClipList[i]->filePathSize);

+            }

+            else {

+                LOGE("NULL file path");

+                return M4ERR_PARAMETER;

+            }

+

+            // Calculate total duration of all clips

+            iClipDuration = pSettings->pClipList[i]->uiEndCutTime -

+             pSettings->pClipList[i]->uiBeginCutTime;

+

+            mClipTotalDuration = mClipTotalDuration+iClipDuration;

+        }

+    }

+

+    if(mEffectsSettings == NULL) {

+        mNumberEffects = pSettings->nbEffects;

+        LOGV("loadEditSettings: mNumberEffects = %d", mNumberEffects);

+

+        if(mNumberEffects != 0) {

+            mEffectsSettings = (M4VSS3GPP_EffectSettings*)M4OSA_malloc(

+             mNumberEffects*sizeof(M4VSS3GPP_EffectSettings),

+             M4VS, (M4OSA_Char*)"effects settings");

+

+            if(mEffectsSettings == NULL) {

+                LOGE("loadEffectsSettings: Allocation error");

+                return M4ERR_ALLOC;

+            }

+

+            M4OSA_memset((M4OSA_MemAddr8)mEffectsSettings,

+             mNumberEffects*sizeof(M4VSS3GPP_EffectSettings), 0);

+

+            for(i=0;i<mNumberEffects;i++) {

+

+                mEffectsSettings[i].xVSS.pFramingFilePath = NULL;

+                mEffectsSettings[i].xVSS.pFramingBuffer = NULL;

+                mEffectsSettings[i].xVSS.pTextBuffer = NULL;

+

+                M4OSA_memcpy((M4OSA_MemAddr8)&(mEffectsSettings[i]),

+                 (M4OSA_MemAddr8)&(pSettings->Effects[i]),

+                 sizeof(M4VSS3GPP_EffectSettings));

+

+                if(pSettings->Effects[i].VideoEffectType ==

+                 M4xVSS_kVideoEffectType_Framing) {

+                    // Allocate the pFraming RGB buffer

+                    mEffectsSettings[i].xVSS.pFramingBuffer =

+                    (M4VIFI_ImagePlane *)M4OSA_malloc(sizeof(M4VIFI_ImagePlane),

+                     M4VS, (M4OSA_Char*)"lvpp framing buffer");

+

+                    if(mEffectsSettings[i].xVSS.pFramingBuffer == NULL) {

+                        LOGE("loadEffectsSettings:Alloc error for pFramingBuf");

+                        M4OSA_free((M4OSA_MemAddr32)mEffectsSettings);

+                        mEffectsSettings = NULL;

+                        return M4ERR_ALLOC;

+                    }

+

+                    // Allocate the pac_data (RGB)

+                    if(pSettings->Effects[i].xVSS.rgbType == M4VSS3GPP_kRGB565){

+                        rgbSize =

+                         pSettings->Effects[i].xVSS.pFramingBuffer->u_width *

+                         pSettings->Effects[i].xVSS.pFramingBuffer->u_height*2;

+                    }

+                    else if(

+                     pSettings->Effects[i].xVSS.rgbType == M4VSS3GPP_kRGB888) {

+                        rgbSize =

+                         pSettings->Effects[i].xVSS.pFramingBuffer->u_width *

+                         pSettings->Effects[i].xVSS.pFramingBuffer->u_height*3;

+                    }

+                    else {

+                        LOGE("loadEffectsSettings: wrong RGB type");

+                        M4OSA_free((M4OSA_MemAddr32)mEffectsSettings);

+                        mEffectsSettings = NULL;

+                        return M4ERR_PARAMETER;

+                    }

+

+                    tmp = (M4VIFI_UInt8 *)M4OSA_malloc(rgbSize, M4VS,

+                     (M4OSA_Char*)"framing buffer pac_data");

+

+                    if(tmp == NULL) {

+                        LOGE("loadEffectsSettings:Alloc error pFramingBuf pac");

+                        M4OSA_free((M4OSA_MemAddr32)mEffectsSettings);

+                        mEffectsSettings = NULL;

+                        M4OSA_free(

+                        (M4OSA_MemAddr32)mEffectsSettings[i].xVSS.pFramingBuffer);

+

+                        mEffectsSettings[i].xVSS.pFramingBuffer = NULL;

+                        return M4ERR_ALLOC;

+                    }

+                    /* Initialize the pFramingBuffer*/

+                    mEffectsSettings[i].xVSS.pFramingBuffer->pac_data = tmp;

+                    mEffectsSettings[i].xVSS.pFramingBuffer->u_height =

+                     pSettings->Effects[i].xVSS.pFramingBuffer->u_height;

+

+                    mEffectsSettings[i].xVSS.pFramingBuffer->u_width =

+                     pSettings->Effects[i].xVSS.pFramingBuffer->u_width;

+

+                    mEffectsSettings[i].xVSS.pFramingBuffer->u_stride =

+                     pSettings->Effects[i].xVSS.pFramingBuffer->u_stride;

+

+                    mEffectsSettings[i].xVSS.pFramingBuffer->u_topleft =

+                     pSettings->Effects[i].xVSS.pFramingBuffer->u_topleft;

+

+                    mEffectsSettings[i].xVSS.uialphaBlendingStart =

+                     pSettings->Effects[i].xVSS.uialphaBlendingStart;

+

+                    mEffectsSettings[i].xVSS.uialphaBlendingMiddle =

+                     pSettings->Effects[i].xVSS.uialphaBlendingMiddle;

+

+                    mEffectsSettings[i].xVSS.uialphaBlendingEnd =

+                     pSettings->Effects[i].xVSS.uialphaBlendingEnd;

+

+                    mEffectsSettings[i].xVSS.uialphaBlendingFadeInTime =

+                     pSettings->Effects[i].xVSS.uialphaBlendingFadeInTime;

+                    mEffectsSettings[i].xVSS.uialphaBlendingFadeOutTime =

+                     pSettings->Effects[i].xVSS.uialphaBlendingFadeOutTime;

+

+                    // Copy the pFraming data

+                    M4OSA_memcpy((M4OSA_MemAddr8)

+                    mEffectsSettings[i].xVSS.pFramingBuffer->pac_data,

+                    (M4OSA_MemAddr8)pSettings->Effects[i].xVSS.pFramingBuffer->pac_data,

+                    rgbSize);

+

+                    mEffectsSettings[i].xVSS.rgbType =

+                     pSettings->Effects[i].xVSS.rgbType;

+                }

+            }

+        }

+    }

+

+    if (mBackgroundAudioSetting == NULL) {

+

+        mBackgroundAudioSetting = (M4xVSS_AudioMixingSettings*)M4OSA_malloc(

+        sizeof(M4xVSS_AudioMixingSettings), M4VS,

+        (M4OSA_Char*)"LvPP, copy of bgmSettings");

+

+        if(NULL == mBackgroundAudioSetting) {

+            LOGE("loadEditSettings: mBackgroundAudioSetting Malloc failed");

+            return M4ERR_ALLOC;

+        }

+

+        M4OSA_memset((M4OSA_MemAddr8)mBackgroundAudioSetting, sizeof(M4xVSS_AudioMixingSettings*), 0);

+        M4OSA_memcpy((M4OSA_MemAddr8)mBackgroundAudioSetting, (M4OSA_MemAddr8)bgmSettings, sizeof(M4xVSS_AudioMixingSettings));

+

+        if ( mBackgroundAudioSetting->pFile != M4OSA_NULL ) {

+

+            mBackgroundAudioSetting->pFile = (M4OSA_Void*) bgmSettings->pPCMFilePath;

+            mBackgroundAudioSetting->uiNbChannels = 2;

+            mBackgroundAudioSetting->uiSamplingFrequency = 32000;

+        }

+

+        // Open the BG file

+        if ( mBackgroundAudioSetting->pFile != M4OSA_NULL ) {

+            err = M4OSA_fileReadOpen(&mAudioMixPCMFileHandle,

+             mBackgroundAudioSetting->pFile, M4OSA_kFileRead);

+

+            if (err != M4NO_ERROR) {

+                LOGE("loadEditSettings: mBackgroundAudio PCM File open failed");

+                return M4ERR_PARAMETER;

+            }

+        }

+    }

+

+    mOutputVideoSize = pSettings->xVSS.outputVideoSize;

+    mFrameStr.pBuffer = M4OSA_NULL;

+    return M4NO_ERROR;

+}

+

+M4OSA_ERR VideoEditorPreviewController::setSurface(const sp<Surface> &surface) {

+    LOGV("setSurface");

+    Mutex::Autolock autoLock(mLock);

+

+    mSurface = surface;

+    mISurface = surface->getISurface();

+    LOGV("setSurface: mISurface = %p", mISurface.get());

+    return M4NO_ERROR;

+}

+

+M4OSA_ERR VideoEditorPreviewController::startPreview(

+    M4OSA_UInt32 fromMS, M4OSA_Int32 toMs, M4OSA_UInt16 callBackAfterFrameCount,

+    M4OSA_Bool loop) {

+

+    M4OSA_ERR err = M4NO_ERROR;

+    M4OSA_UInt32 i = 0, iIncrementedDuration = 0;

+    LOGV("startPreview");

+

+    if(fromMS > toMs) {

+        LOGE("startPreview: fromMS > toMs");

+        return M4ERR_PARAMETER;

+    }

+

+    if(toMs == 0) {

+        LOGE("startPreview: toMs is 0");

+        return M4ERR_PARAMETER;

+    }

+

+    // If already started, then stop preview first

+    for(int playerInst=0; playerInst<NBPLAYER_INSTANCES; playerInst++) {

+        if(mVePlayer[playerInst] != NULL) {

+            LOGV("startPreview: stopping previously started preview playback");

+            stopPreview();

+            break;

+        }

+    }

+

+    // If renderPreview was called previously, then delete Renderer object first

+    if(mTarget != NULL) {

+        LOGV("startPreview: delete previous PreviewRenderer");

+        delete mTarget;

+        mTarget = NULL;

+    }

+

+    LOGV("startPreview: loop = %d", loop);

+    mPreviewLooping = loop;

+

+    LOGV("startPreview: callBackAfterFrameCount = %d", callBackAfterFrameCount);

+    mCallBackAfterFrameCnt = callBackAfterFrameCount;

+

+    for (int playerInst=0; playerInst<NBPLAYER_INSTANCES; playerInst++) {

+        mVePlayer[playerInst] = new VideoEditorPlayer();

+        if(mVePlayer[playerInst] == NULL) {

+            LOGE("startPreview:Error creating VideoEditorPlayer %d",playerInst);

+            return M4ERR_ALLOC;

+        }

+        LOGV("startPreview: object created");

+

+        mVePlayer[playerInst]->setNotifyCallback(this,(notify_callback_f)notify);

+        LOGV("startPreview: notify callback set");

+

+        mVePlayer[playerInst]->loadEffectsSettings(mEffectsSettings,

+         mNumberEffects);

+        LOGV("startPreview: effects settings loaded");

+

+        mVePlayer[playerInst]->loadAudioMixSettings(mBackgroundAudioSetting);

+        LOGV("startPreview: AudioMixSettings settings loaded");

+

+        mVePlayer[playerInst]->setAudioMixPCMFileHandle(mAudioMixPCMFileHandle);

+        LOGV("startPreview: AudioMixPCMFileHandle set");

+

+        mVePlayer[playerInst]->setProgressCallbackInterval(

+         mCallBackAfterFrameCnt);

+        LOGV("startPreview: setProgressCallBackInterval");

+    }

+

+    mPlayerState = VePlayerIdle;

+

+    if(fromMS == 0) {

+        mCurrentClipNumber = -1;

+        // Save original value

+        mFirstPreviewClipBeginTime = mClipList[0]->uiBeginCutTime;

+        mVideoStoryBoardTimeMsUptoFirstPreviewClip = 0;

+    }

+    else {

+        LOGV("startPreview: fromMS=%d", fromMS);

+        if(fromMS >= mClipTotalDuration) {

+            LOGE("startPreview: fromMS >= mClipTotalDuration");

+            return M4ERR_PARAMETER;

+        }

+        for(i=0;i<mNumberClipsInStoryBoard;i++) {

+            if(fromMS < (iIncrementedDuration + (mClipList[i]->uiEndCutTime -

+             mClipList[i]->uiBeginCutTime))) {

+                // Set to 1 index below,

+                // as threadProcess first increments the clip index

+                // and then processes clip in thread loop

+                mCurrentClipNumber = i-1;

+                LOGV("startPreview:mCurrentClipNumber = %d fromMS=%d",i,fromMS);

+

+                // Save original value

+                mFirstPreviewClipBeginTime = mClipList[i]->uiBeginCutTime;

+

+                // Set correct begin time to start playback

+                if((fromMS+mClipList[i]->uiBeginCutTime) >

+                (iIncrementedDuration+mClipList[i]->uiBeginCutTime)) {

+

+                    mClipList[i]->uiBeginCutTime =

+                     mClipList[i]->uiBeginCutTime +

+                     (fromMS - iIncrementedDuration);

+                }

+                break;

+            }

+            else {

+                iIncrementedDuration = iIncrementedDuration +

+                 (mClipList[i]->uiEndCutTime - mClipList[i]->uiBeginCutTime);

+            }

+        }

+        mVideoStoryBoardTimeMsUptoFirstPreviewClip = iIncrementedDuration;

+    }

+

+    for (int playerInst=0; playerInst<NBPLAYER_INSTANCES; playerInst++) {

+        mVePlayer[playerInst]->setAudioMixStoryBoardParam(fromMS,

+         mFirstPreviewClipBeginTime,

+         mClipList[i]->ClipProperties.uiClipAudioVolumePercentage);

+

+        LOGV("startPreview:setAudioMixStoryBoardSkimTimeStamp set %d cuttime \

+         %d", fromMS, mFirstPreviewClipBeginTime);

+    }

+

+    mStartingClipIndex = mCurrentClipNumber+1;

+

+    // Start playing with player instance 0

+    mCurrentPlayer = 0;

+

+    if(toMs == -1) {

+        LOGV("startPreview: Preview till end of storyboard");

+        mNumberClipsToPreview = mNumberClipsInStoryBoard;

+        // Save original value

+        mLastPreviewClipEndTime =

+         mClipList[mNumberClipsToPreview-1]->uiEndCutTime;

+    }

+    else {

+        LOGV("startPreview: toMs=%d", toMs);

+        if(toMs > mClipTotalDuration) {

+            LOGE("startPreview: toMs > mClipTotalDuration");

+            return M4ERR_PARAMETER;

+        }

+

+        iIncrementedDuration = 0;

+

+        for(i=0;i<mNumberClipsInStoryBoard;i++) {

+            if(toMs <= (iIncrementedDuration +

+             (mClipList[i]->uiEndCutTime - mClipList[i]->uiBeginCutTime))) {

+                // Save original value

+                mLastPreviewClipEndTime = mClipList[i]->uiEndCutTime;

+                // Set the end cut time of clip index i to toMs

+                mClipList[i]->uiEndCutTime = toMs;

+

+                // Number of clips to be previewed is from index 0 to i

+                // increment by 1 as i starts from 0

+                mNumberClipsToPreview = i+1;

+                break;

+            }

+            else {

+                iIncrementedDuration = iIncrementedDuration +

+                 (mClipList[i]->uiEndCutTime - mClipList[i]->uiBeginCutTime);

+            }

+        }

+    }

+

+    // Open the thread semaphore

+    M4OSA_semaphoreOpen(&mSemThreadWait, 1);

+

+    // Open the preview process thread

+    err = M4OSA_threadSyncOpen(&mThreadContext, (M4OSA_ThreadDoIt)threadProc);

+    if (M4NO_ERROR != err) {

+        LOGE("VideoEditorPreviewController:M4OSA_threadSyncOpen error %d", err);

+        return err;

+    }

+

+    // Set the stacksize

+    err = M4OSA_threadSyncSetOption(mThreadContext, M4OSA_ThreadStackSize,

+     (M4OSA_DataOption)PREVIEW_THREAD_STACK_SIZE);

+

+    if (M4NO_ERROR != err) {

+        LOGE("VideoEditorPreviewController: threadSyncSetOption error %d", err);

+        M4OSA_threadSyncClose(mThreadContext);

+        mThreadContext = NULL;

+        return err;

+    }

+

+     // Start the thread

+     err = M4OSA_threadSyncStart(mThreadContext, (M4OSA_Void*)this);

+     if (M4NO_ERROR != err) {

+        LOGE("VideoEditorPreviewController: threadSyncStart error %d", err);

+        M4OSA_threadSyncClose(mThreadContext);

+        mThreadContext = NULL;

+        return err;

+    }

+    bStopThreadInProgress = false;

+

+    LOGV("startPreview: process thread started");

+    return M4NO_ERROR;

+}

+

+M4OSA_ERR VideoEditorPreviewController::stopPreview() {

+    M4OSA_ERR err = M4NO_ERROR;

+    LOGV("stopPreview");

+

+    // Stop the thread

+    if(mThreadContext != NULL) {

+        bStopThreadInProgress = true;

+        err = M4OSA_semaphorePost(mSemThreadWait);

+

+        err = M4OSA_threadSyncStop(mThreadContext);

+        if(err != M4NO_ERROR) {

+            LOGV("stopPreview: error 0x%x in trying to stop thread", err);

+            // Continue even if error

+        }

+

+        err = M4OSA_threadSyncClose(mThreadContext);

+        if(err != M4NO_ERROR) {

+            LOGE("stopPreview: error 0x%x in trying to close thread", err);

+            // Continue even if error

+        }

+

+        mThreadContext = NULL;

+    }

+

+    // Close the semaphore first

+    if(mSemThreadWait != NULL) {

+        err = M4OSA_semaphoreClose(mSemThreadWait);

+        LOGV("stopPreview: close semaphore returns 0x%x", err);

+    }

+

+    for (int playerInst=0; playerInst<NBPLAYER_INSTANCES; playerInst++) {

+        if(mVePlayer[playerInst] != NULL) {

+            if(mVePlayer[playerInst]->isPlaying()) {

+                LOGV("stop the player first");

+                mVePlayer[playerInst]->stop();

+            }

+

+            LOGV("stopPreview: clearing mVePlayer");

+            mVePlayer[playerInst].clear();

+            mVePlayer[playerInst] = NULL;

+        }

+    }

+

+    // If image file playing, then free the buffer pointer

+    if(mFrameStr.pBuffer != M4OSA_NULL) {

+        M4OSA_free((M4OSA_MemAddr32)mFrameStr.pBuffer);

+        mFrameStr.pBuffer = M4OSA_NULL;

+    }

+

+    // Reset original begin cuttime of first previewed clip*/

+    mClipList[mStartingClipIndex]->uiBeginCutTime = mFirstPreviewClipBeginTime;

+    // Reset original end cuttime of last previewed clip*/

+    mClipList[mNumberClipsToPreview-1]->uiEndCutTime = mLastPreviewClipEndTime;

+

+    mPlayerState = VePlayerIdle;

+

+    mCurrentPlayedDuration = 0;

+    mCurrentClipDuration = 0;

+    mRenderingMode = M4xVSS_kBlackBorders;

+    mOutputVideoWidth = 0;

+    mOutputVideoHeight = 0;

+

+    return M4NO_ERROR;

+}

+

+M4OSA_ERR VideoEditorPreviewController::renderPreviewFrame(

+    const sp<Surface> &surface, VideoEditor_renderPreviewFrameStr* pFrameInfo) {

+

+    M4OSA_ERR err = M4NO_ERROR;

+    M4OSA_UInt32 i = 0, iIncrementedDuration = 0, tnTimeMs=0, framesize =0;

+    VideoEditor_renderPreviewFrameStr* pFrameStr = pFrameInfo;

+    M4VIFI_UInt8 *pixelArray = NULL;

+

+    Mutex::Autolock autoLock(mLock);

+

+    // Get the Isurface to be passed to renderer

+    mISurface = surface->getISurface();

+

+    // Delete previous renderer instance

+    if(mTarget != NULL) {

+        delete mTarget;

+        mTarget = NULL;

+    }

+

+    if(mOutputVideoWidth == 0) {

+        mOutputVideoWidth = pFrameStr->uiFrameWidth;

+    }

+    if(mOutputVideoHeight == 0) {

+        mOutputVideoHeight = pFrameStr->uiFrameHeight;

+    }

+

+    // Initialize the renderer

+    if(mTarget == NULL) {

+        mTarget = new PreviewRenderer(

+            OMX_COLOR_FormatYUV420Planar, surface, mOutputVideoWidth, mOutputVideoHeight,

+            mOutputVideoWidth, mOutputVideoHeight, 0);

+        if(mTarget == NULL) {

+            LOGE("renderPreviewFrame: cannot create PreviewRenderer");

+            return M4ERR_ALLOC;

+        }

+    }

+

+    pixelArray = NULL;

+

+    // Postprocessing (apply video effect)

+    if(pFrameStr->bApplyEffect == M4OSA_TRUE) {

+

+        for(i=0;i<mNumberEffects;i++) {

+            // First check if effect starttime matches the clip being previewed

+            if((mEffectsSettings[i].uiStartTime < pFrameStr->clipBeginCutTime)

+             ||(mEffectsSettings[i].uiStartTime >= pFrameStr->clipEndCutTime)) {

+                // This effect doesn't belong to this clip, check next one

+                continue;

+            }

+            if((mEffectsSettings[i].uiStartTime <= pFrameStr->timeMs) &&

+            ((mEffectsSettings[i].uiStartTime+mEffectsSettings[i].uiDuration) >=

+             pFrameStr->timeMs) && (mEffectsSettings[i].uiDuration != 0)) {

+                setVideoEffectType(mEffectsSettings[i].VideoEffectType, TRUE);

+            }

+            else {

+                setVideoEffectType(mEffectsSettings[i].VideoEffectType, FALSE);

+            }

+        }

+

+        if(mCurrentVideoEffect != VIDEO_EFFECT_NONE) {

+            err = applyVideoEffect((M4OSA_Void *)pFrameStr->pBuffer,

+             OMX_COLOR_FormatYUV420Planar, pFrameStr->uiFrameWidth,

+             pFrameStr->uiFrameHeight, pFrameStr->timeMs,

+             (M4OSA_Void *)pixelArray);

+

+            if(err != M4NO_ERROR) {

+                LOGE("renderPreviewFrame: applyVideoEffect error 0x%x", err);

+                delete mTarget;

+                mTarget = NULL;

+                M4OSA_free((M4OSA_MemAddr32)pixelArray);

+                pixelArray = NULL;

+                return err;

+           }

+           mCurrentVideoEffect = VIDEO_EFFECT_NONE;

+        }

+        else {

+            // Apply the rendering mode

+            err = doImageRenderingMode((M4OSA_Void *)pFrameStr->pBuffer,

+             OMX_COLOR_FormatYUV420Planar, pFrameStr->uiFrameWidth,

+             pFrameStr->uiFrameHeight, (M4OSA_Void *)pixelArray);

+

+            if(err != M4NO_ERROR) {

+                LOGE("renderPreviewFrame:doImageRenderingMode error 0x%x", err);

+                delete mTarget;

+                mTarget = NULL;

+                M4OSA_free((M4OSA_MemAddr32)pixelArray);

+                pixelArray = NULL;

+                return err;

+            }

+        }

+    }

+    else {

+        // Apply the rendering mode

+        err = doImageRenderingMode((M4OSA_Void *)pFrameStr->pBuffer,

+         OMX_COLOR_FormatYUV420Planar, pFrameStr->uiFrameWidth,

+         pFrameStr->uiFrameHeight, (M4OSA_Void *)pixelArray);

+

+        if(err != M4NO_ERROR) {

+            LOGE("renderPreviewFrame: doImageRenderingMode error 0x%x", err);

+            delete mTarget;

+            mTarget = NULL;

+            M4OSA_free((M4OSA_MemAddr32)pixelArray);

+            pixelArray = NULL;

+            return err;

+        }

+    }

+

+    mTarget->renderYV12();

+    return err;

+}

+

+M4OSA_Void VideoEditorPreviewController::setJniCallback(void* cookie,

+    jni_progress_callback_fct callbackFct) {

+    //LOGV("setJniCallback");

+    mJniCookie = cookie;

+    mJniCallback = callbackFct;

+}

+

+M4OSA_ERR VideoEditorPreviewController::preparePlayer(

+    void* param, int playerInstance, int index) {

+

+    M4OSA_ERR err = M4NO_ERROR;

+    VideoEditorPreviewController *pController =

+     (VideoEditorPreviewController *)param;

+

+    LOGV("preparePlayer: instance %d file %d", playerInstance, index);

+

+    pController->mVePlayer[playerInstance]->setDataSource(

+    (const char *)pController->mClipList[index]->pFile, NULL);

+    LOGV("preparePlayer: setDataSource instance %s",

+     (const char *)pController->mClipList[index]->pFile);

+

+    pController->mVePlayer[playerInstance]->setVideoISurface(

+     pController->mISurface);

+    LOGV("preparePlayer: setVideoISurface");

+

+    pController->mVePlayer[playerInstance]->setVideoSurface(

+     pController->mSurface);

+    LOGV("preparePlayer: setVideoSurface");

+

+    pController->mVePlayer[playerInstance]->setMediaRenderingMode(

+     pController->mClipList[index]->xVSS.MediaRendering,

+     pController->mOutputVideoSize);

+    LOGV("preparePlayer: setMediaRenderingMode");

+

+    if(index == pController->mStartingClipIndex) {

+        pController->mVePlayer[playerInstance]->setPlaybackBeginTime(

+        pController->mFirstPreviewClipBeginTime);

+    }

+    else {

+        pController->mVePlayer[playerInstance]->setPlaybackBeginTime(

+        pController->mClipList[index]->uiBeginCutTime);

+    }

+    LOGV("preparePlayer: setPlaybackBeginTime(%d)",

+     pController->mClipList[index]->uiBeginCutTime);

+

+    pController->mVePlayer[playerInstance]->setPlaybackEndTime(

+     pController->mClipList[index]->uiEndCutTime);

+    LOGV("preparePlayer: setPlaybackEndTime(%d)",

+     pController->mClipList[index]->uiEndCutTime);

+

+    if(pController->mClipList[index]->FileType == M4VIDEOEDITING_kFileType_ARGB8888) {

+        pController->mVePlayer[playerInstance]->setImageClipProperties(

+                 pController->mClipList[index]->ClipProperties.uiVideoWidth,

+                 pController->mClipList[index]->ClipProperties.uiVideoHeight);

+        LOGV("preparePlayer: setImageClipProperties");

+    }

+

+    pController->mVePlayer[playerInstance]->prepare();

+    LOGV("preparePlayer: prepared");

+

+    if(pController->mClipList[index]->uiBeginCutTime > 0) {

+        pController->mVePlayer[playerInstance]->seekTo(

+         pController->mClipList[index]->uiBeginCutTime);

+

+        LOGV("preparePlayer: seekTo(%d)",

+         pController->mClipList[index]->uiBeginCutTime);

+    }

+

+    pController->mVePlayer[playerInstance]->readFirstVideoFrame();

+    LOGV("preparePlayer: readFirstVideoFrame of clip");

+

+    return err;

+}

+

+M4OSA_ERR VideoEditorPreviewController::threadProc(M4OSA_Void* param) {

+    M4OSA_ERR err = M4NO_ERROR;

+    M4OSA_Int32 index = 0;

+    VideoEditorPreviewController *pController =

+     (VideoEditorPreviewController *)param;

+

+    LOGV("inside threadProc");

+    if(pController->mPlayerState == VePlayerIdle) {

+        (pController->mCurrentClipNumber)++;

+

+        LOGV("threadProc: playing file index %d total clips %d",

+         pController->mCurrentClipNumber, pController->mNumberClipsToPreview);

+

+        if(pController->mCurrentClipNumber >=

+         pController->mNumberClipsToPreview) {

+

+            LOGV("All clips previewed");

+

+            pController->mCurrentPlayedDuration = 0;

+            pController->mCurrentClipDuration = 0;

+            pController->mCurrentPlayer = 0;

+

+            if(pController->mPreviewLooping == M4OSA_TRUE) {

+                pController->mCurrentClipNumber =

+                 pController->mStartingClipIndex;

+

+                LOGV("Preview looping TRUE, restarting from clip index %d",

+                 pController->mCurrentClipNumber);

+

+                // Reset the story board timestamp inside the player

+                for (int playerInst=0; playerInst<NBPLAYER_INSTANCES;

+                 playerInst++) {

+                    pController->mVePlayer[playerInst]->resetJniCallbackTimeStamp();

+                }

+            }

+            else {

+                if(pController->mJniCallback != NULL) {

+                    pController->mJniCallback(

+                     pController->mJniCookie, MSG_TYPE_PREVIEW_END, 0);

+                }

+                pController->mPlayerState = VePlayerAutoStop;

+

+                // Reset original begin cuttime of first previewed clip

+                pController->mClipList[pController->mStartingClipIndex]->uiBeginCutTime =

+                 pController->mFirstPreviewClipBeginTime;

+                // Reset original end cuttime of last previewed clip

+                pController->mClipList[pController->mNumberClipsToPreview-1]->uiEndCutTime =

+                 pController->mLastPreviewClipEndTime;

+

+                // Return a warning to M4OSA thread handler

+                // so that thread is moved from executing state to open state

+                return M4WAR_NO_MORE_STREAM;

+            }

+        }

+

+        index=pController->mCurrentClipNumber;

+        if(pController->mCurrentClipNumber == pController->mStartingClipIndex) {

+            pController->mCurrentPlayedDuration +=

+             pController->mVideoStoryBoardTimeMsUptoFirstPreviewClip;

+

+            pController->mCurrentClipDuration =

+             pController->mClipList[pController->mCurrentClipNumber]->uiEndCutTime

+              - pController->mFirstPreviewClipBeginTime;

+

+            preparePlayer((void*)pController, pController->mCurrentPlayer, index);

+        }

+        else {

+            pController->mCurrentPlayedDuration +=

+             pController->mCurrentClipDuration;

+

+            pController->mCurrentClipDuration =

+             pController->mClipList[pController->mCurrentClipNumber]->uiEndCutTime -

+             pController->mClipList[pController->mCurrentClipNumber]->uiBeginCutTime;

+        }

+

+        pController->mVePlayer[pController->mCurrentPlayer]->setStoryboardStartTime(

+         pController->mCurrentPlayedDuration);

+        LOGV("threadProc: setStoryboardStartTime");

+

+        // Set the next clip duration for Audio mix here

+        if(pController->mCurrentClipNumber != pController->mStartingClipIndex) {

+

+            pController->mVePlayer[pController->mCurrentPlayer]->setAudioMixStoryBoardParam(

+             pController->mCurrentPlayedDuration,

+             pController->mClipList[index]->uiBeginCutTime,

+             pController->mClipList[index]->ClipProperties.uiClipAudioVolumePercentage);

+

+            LOGV("threadProc: setAudioMixStoryBoardParam fromMS %d \

+             ClipBeginTime %d", pController->mCurrentPlayedDuration +

+             pController->mClipList[index]->uiBeginCutTime,

+             pController->mClipList[index]->uiBeginCutTime,

+             pController->mClipList[index]->ClipProperties.uiClipAudioVolumePercentage);

+        }

+

+        pController->mVePlayer[pController->mCurrentPlayer]->start();

+        LOGV("threadProc: started");

+

+        pController->mPlayerState = VePlayerBusy;

+

+    }

+    else if(pController->mPlayerState == VePlayerAutoStop) {

+        LOGV("Preview completed..auto stop the player");

+    }

+    else {

+        if(!pController->bStopThreadInProgress) {

+            LOGV("threadProc: state busy...wait for sem");

+            err = M4OSA_semaphoreWait(pController->mSemThreadWait,

+             M4OSA_WAIT_FOREVER);

+        }

+        LOGV("threadProc: sem wait returned err = 0x%x", err);

+    }

+

+    //Always return M4NO_ERROR to ensure the thread keeps running

+    return M4NO_ERROR;

+}

+

+void VideoEditorPreviewController::notify(

+    void* cookie, int msg, int ext1, int ext2)

+{

+    VideoEditorPreviewController *pController =

+     (VideoEditorPreviewController *)cookie;

+

+    M4OSA_ERR err = M4NO_ERROR;

+    uint32_t clipDuration = 0;

+    switch (msg) {

+        case MEDIA_NOP: // interface test message

+            LOGV("MEDIA_NOP");

+            break;

+        case MEDIA_PREPARED:

+            LOGV("MEDIA_PREPARED");

+            break;

+        case MEDIA_PLAYBACK_COMPLETE:

+            LOGV("notify:MEDIA_PLAYBACK_COMPLETE");

+            pController->mPlayerState = VePlayerIdle;

+

+            //send progress callback with last frame timestamp

+            if(pController->mCurrentClipNumber ==

+             pController->mStartingClipIndex) {

+                clipDuration =

+                 pController->mClipList[pController->mCurrentClipNumber]->uiEndCutTime

+                  - pController->mFirstPreviewClipBeginTime;

+            }

+            else {

+                clipDuration =

+                 pController->mClipList[pController->mCurrentClipNumber]->uiEndCutTime

+                  - pController->mClipList[pController->mCurrentClipNumber]->uiBeginCutTime;

+            }

+            pController->mJniCallback(

+             pController->mJniCookie, MSG_TYPE_PROGRESS_INDICATION,

+             (clipDuration+pController->mCurrentPlayedDuration));

+

+            M4OSA_semaphorePost(pController->mSemThreadWait);

+            break;

+         case MEDIA_ERROR:

+          // Always log errors.

+          // ext1: Media framework error code.

+          // ext2: Implementation dependant error code.

+            LOGE("MEDIA_ERROR; error (%d, %d)", ext1, ext2);

+            if(pController->mJniCallback != NULL) {

+                pController->mJniCallback(pController->mJniCookie,

+                 MSG_TYPE_PLAYER_ERROR, ext1);

+            }

+            break;

+        case MEDIA_INFO:

+            // ext1: Media framework error code.

+            // ext2: Implementation dependant error code.

+            //LOGW("MEDIA_INFO; info/warning (%d, %d)", ext1, ext2);

+            if(pController->mJniCallback != NULL) {

+                pController->mJniCallback(pController->mJniCookie,

+                 MSG_TYPE_PROGRESS_INDICATION, ext2);

+            }

+            break;

+        case MEDIA_SEEK_COMPLETE:

+            LOGV("MEDIA_SEEK_COMPLETE; Received seek complete");

+            break;

+        case MEDIA_BUFFERING_UPDATE:

+            LOGV("MEDIA_BUFFERING_UPDATE; buffering %d", ext1);

+            break;

+        case MEDIA_SET_VIDEO_SIZE:

+            LOGV("MEDIA_SET_VIDEO_SIZE; New video size %d x %d", ext1, ext2);

+            break;

+        case 0xAAAAAAAA:

+            LOGV("VIDEO PLAYBACK ALMOST over, prepare next player");

+

+            // Select next player and prepare it

+            // If there is a clip after this one

+            if ((pController->mCurrentClipNumber+1) <

+             pController->mNumberClipsToPreview) {

+

+                pController->mCurrentPlayer++;

+                if (pController->mCurrentPlayer >= NBPLAYER_INSTANCES) {

+                    pController->mCurrentPlayer = 0;

+                }

+                // Prepare the first clip to be played

+                preparePlayer((void*)pController, pController->mCurrentPlayer,

+                    pController->mCurrentClipNumber+1);

+            }

+            break;

+        default:

+            LOGV("unrecognized message: (%d, %d, %d)", msg, ext1, ext2);

+            break;

+    }

+}

+

+void VideoEditorPreviewController::setVideoEffectType(

+    M4VSS3GPP_VideoEffectType type, M4OSA_Bool enable) {

+

+    M4OSA_UInt32 effect = VIDEO_EFFECT_NONE;

+

+    // map M4VSS3GPP_VideoEffectType to local enum

+    switch(type) {

+        case M4VSS3GPP_kVideoEffectType_FadeFromBlack:

+            effect = VIDEO_EFFECT_FADEFROMBLACK;

+            break;

+

+        case M4VSS3GPP_kVideoEffectType_FadeToBlack:

+            effect = VIDEO_EFFECT_FADETOBLACK;

+            break;

+

+        case M4VSS3GPP_kVideoEffectType_CurtainOpening:

+            effect = VIDEO_EFFECT_CURTAINOPEN;

+            break;

+

+        case M4VSS3GPP_kVideoEffectType_CurtainClosing:

+            effect = VIDEO_EFFECT_CURTAINCLOSE;

+            break;

+

+        case M4xVSS_kVideoEffectType_BlackAndWhite:

+            effect = VIDEO_EFFECT_BLACKANDWHITE;

+            break;

+

+        case M4xVSS_kVideoEffectType_Pink:

+            effect = VIDEO_EFFECT_PINK;

+            break;

+

+        case M4xVSS_kVideoEffectType_Green:

+            effect = VIDEO_EFFECT_GREEN;

+            break;

+

+        case M4xVSS_kVideoEffectType_Sepia:

+            effect = VIDEO_EFFECT_SEPIA;

+            break;

+

+        case M4xVSS_kVideoEffectType_Negative:

+            effect = VIDEO_EFFECT_NEGATIVE;

+            break;

+

+        case M4xVSS_kVideoEffectType_Framing:

+            effect = VIDEO_EFFECT_FRAMING;

+            break;

+

+        case M4xVSS_kVideoEffectType_Fifties:

+            effect = VIDEO_EFFECT_FIFTIES;

+            break;

+

+        case M4xVSS_kVideoEffectType_ColorRGB16:

+            effect = VIDEO_EFFECT_COLOR_RGB16;

+            break;

+

+        case M4xVSS_kVideoEffectType_Gradient:

+            effect = VIDEO_EFFECT_GRADIENT;

+            break;

+

+        default:

+            effect = VIDEO_EFFECT_NONE;

+            break;

+    }

+

+    if(enable == M4OSA_TRUE) {

+        // If already set, then no need to set again

+        if(!(mCurrentVideoEffect & effect))

+            mCurrentVideoEffect |= effect;

+            if(effect == VIDEO_EFFECT_FIFTIES) {

+                mIsFiftiesEffectStarted = true;

+            }

+    }

+    else  {

+        // Reset only if already set

+        if(mCurrentVideoEffect & effect)

+            mCurrentVideoEffect &= ~effect;

+    }

+

+    return;

+}

+

+

+M4OSA_ERR VideoEditorPreviewController::applyVideoEffect(

+    M4OSA_Void * dataPtr, M4OSA_UInt32 colorFormat, M4OSA_UInt32 videoWidth,

+    M4OSA_UInt32 videoHeight, M4OSA_UInt32 timeMs, M4OSA_Void* outPtr) {

+

+    M4OSA_ERR err = M4NO_ERROR;

+    vePostProcessParams postProcessParams;

+

+    postProcessParams.vidBuffer = (M4VIFI_UInt8*)dataPtr;

+    postProcessParams.videoWidth = videoWidth;

+    postProcessParams.videoHeight = videoHeight;

+    postProcessParams.timeMs = timeMs;

+    postProcessParams.timeOffset = 0; //Since timeMS already takes care of offset in this case

+    postProcessParams.effectsSettings = mEffectsSettings;

+    postProcessParams.numberEffects = mNumberEffects;

+    postProcessParams.outVideoWidth = mOutputVideoWidth;

+    postProcessParams.outVideoHeight = mOutputVideoHeight;

+    postProcessParams.currentVideoEffect = mCurrentVideoEffect;

+    postProcessParams.renderingMode = mRenderingMode;

+    if(mIsFiftiesEffectStarted == M4OSA_TRUE) {

+        postProcessParams.isFiftiesEffectStarted = M4OSA_TRUE;

+        mIsFiftiesEffectStarted = M4OSA_FALSE;

+    }

+    else {

+       postProcessParams.isFiftiesEffectStarted = M4OSA_FALSE;

+    }

+    //postProcessParams.renderer = mTarget;

+    postProcessParams.overlayFrameRGBBuffer = NULL;

+    postProcessParams.overlayFrameYUVBuffer = NULL;

+

+    mTarget->getBufferYV12(&(postProcessParams.pOutBuffer), &(postProcessParams.outBufferStride));

+

+    err = applyEffectsAndRenderingMode(&postProcessParams);

+    return err;

+}

+

+M4OSA_ERR VideoEditorPreviewController::setPreviewFrameRenderingMode(

+    M4xVSS_MediaRendering mode, M4VIDEOEDITING_VideoFrameSize outputVideoSize) {

+

+    //LOGV("setMediaRenderingMode: outputVideoSize = %d", outputVideoSize);

+    mRenderingMode = mode;

+

+    switch(outputVideoSize) {

+        case M4VIDEOEDITING_kSQCIF:

+            mOutputVideoWidth = 128;

+            mOutputVideoHeight = 96;

+            break;

+

+        case M4VIDEOEDITING_kQQVGA:

+            mOutputVideoWidth = 160;

+            mOutputVideoHeight = 120;

+            break;

+

+        case M4VIDEOEDITING_kQCIF:

+            mOutputVideoWidth = 176;

+            mOutputVideoHeight = 144;

+            break;

+

+        case M4VIDEOEDITING_kQVGA:

+            mOutputVideoWidth = 320;

+            mOutputVideoHeight = 240;

+            break;

+

+        case M4VIDEOEDITING_kCIF:

+            mOutputVideoWidth = 352;

+            mOutputVideoHeight = 288;

+            break;

+

+        case M4VIDEOEDITING_kVGA:

+            mOutputVideoWidth = 640;

+            mOutputVideoHeight = 480;

+            break;

+

+        case M4VIDEOEDITING_kWVGA:

+            mOutputVideoWidth = 800;

+            mOutputVideoHeight = 480;

+            break;

+

+        case M4VIDEOEDITING_kNTSC:

+            mOutputVideoWidth = 720;

+            mOutputVideoHeight = 480;

+            break;

+

+        case M4VIDEOEDITING_k640_360:

+            mOutputVideoWidth = 640;

+            mOutputVideoHeight = 360;

+            break;

+

+        case M4VIDEOEDITING_k854_480:

+            mOutputVideoWidth = 854;

+            mOutputVideoHeight = 480;

+            break;

+

+        case M4VIDEOEDITING_kHD1280:

+            mOutputVideoWidth = 1280;

+            mOutputVideoHeight = 720;

+            break;

+

+        case M4VIDEOEDITING_kHD1080:

+            mOutputVideoWidth = 1080;

+            mOutputVideoHeight = 720;

+            break;

+

+        case M4VIDEOEDITING_kHD960:

+            mOutputVideoWidth = 960;

+            mOutputVideoHeight = 720;

+            break;

+

+        default:

+            mOutputVideoWidth = 0;

+            mOutputVideoHeight = 0;

+            break;

+    }

+

+    return OK;

+}

+

+M4OSA_ERR VideoEditorPreviewController::doImageRenderingMode(

+    M4OSA_Void * dataPtr, M4OSA_UInt32 colorFormat, M4OSA_UInt32 videoWidth,

+    M4OSA_UInt32 videoHeight, M4OSA_Void* outPtr) {

+

+    M4OSA_ERR err = M4NO_ERROR;

+    M4VIFI_ImagePlane planeIn[3], planeOut[3];

+    M4VIFI_UInt8 *inBuffer = M4OSA_NULL;

+    M4OSA_UInt32 outputBufferWidth =0, outputBufferHeight=0;

+

+    //frameSize = (videoWidth*videoHeight*3) >> 1;

+    inBuffer = (M4OSA_UInt8 *)dataPtr;

+

+    // In plane

+    prepareYUV420ImagePlane(planeIn, videoWidth,

+      videoHeight, (M4VIFI_UInt8 *)inBuffer);

+

+    outputBufferWidth = mOutputVideoWidth;

+    outputBufferHeight = mOutputVideoHeight;

+

+    // Out plane

+    uint8_t* outBuffer;

+    size_t outBufferStride = 0;

+

+    LOGV("doMediaRendering CALL getBuffer()");

+    mTarget->getBufferYV12(&outBuffer, &outBufferStride);

+

+    // Set the output YUV420 plane to be compatible with YV12 format

+    //In YV12 format, sizes must be even

+    M4OSA_UInt32 yv12PlaneWidth = ((mOutputVideoWidth +1)>>1)<<1;

+    M4OSA_UInt32 yv12PlaneHeight = ((mOutputVideoHeight+1)>>1)<<1;

+

+    prepareYV12ImagePlane(planeOut, yv12PlaneWidth, yv12PlaneHeight,

+     (M4OSA_UInt32)outBufferStride, (M4VIFI_UInt8 *)outBuffer);

+

+    err = applyRenderingMode(planeIn, planeOut, mRenderingMode);

+    if(err != M4NO_ERROR) {

+        LOGE("doImageRenderingMode: applyRenderingMode returned err=0x%x", err);

+    }

+    return err;

+}

+

+} //namespace android

diff --git a/libvideoeditor/lvpp/VideoEditorPreviewController.h b/libvideoeditor/lvpp/VideoEditorPreviewController.h
new file mode 100755
index 0000000..637ddb2
--- /dev/null
+++ b/libvideoeditor/lvpp/VideoEditorPreviewController.h
@@ -0,0 +1,144 @@
+/*

+ * Copyright (C) 2011 NXP Software

+ * Copyright (C) 2011 The Android Open Source Project

+ *

+ * Licensed under the Apache License, Version 2.0 (the "License");

+ * you may not use this file except in compliance with the License.

+ * You may obtain a copy of the License at

+ *

+ *      http://www.apache.org/licenses/LICENSE-2.0

+ *

+ * Unless required by applicable law or agreed to in writing, software

+ * distributed under the License is distributed on an "AS IS" BASIS,

+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * See the License for the specific language governing permissions and

+ * limitations under the License.

+ */

+

+#ifndef ANDROID_VE_PREVIEWCONTROLLER_H

+#define ANDROID_VE_PREVIEWCONTROLLER_H

+

+#include <utils/Log.h>

+#include "VideoEditorPlayer.h"

+#include "M4OSA_Semaphore.h"

+#include "M4OSA_Thread.h"

+#include "M4OSA_Clock.h"

+#include "M4OSA_Time.h"

+#include "M4xVSS_API.h"

+#include "M4xVSS_Internal.h"

+#include "M4VFL_transition.h"

+#include "VideoEditorTools.h"

+#include "VideoEditorThumbnailMain.h"

+#include "VideoEditorMain.h"

+

+#include "OMX_IVCommon.h"

+#include "mediaplayer.h"

+#include <surfaceflinger/Surface.h>

+#include <surfaceflinger/ISurface.h>

+#include <surfaceflinger/ISurfaceComposer.h>

+#include <surfaceflinger/SurfaceComposerClient.h>

+#include <media/stagefright/MediaBuffer.h>

+#include "PreviewRenderer.h"

+

+

+#define NBPLAYER_INSTANCES 2

+

+namespace android {

+

+typedef enum {

+    VePlayerIdle=0,

+    VePlayerBusy,

+    VePlayerAutoStop

+} VePlayerState;

+

+

+// Callback mechanism from PreviewController to Jni  */

+typedef void (*jni_progress_callback_fct)(void* cookie, M4OSA_UInt32 msgType, M4OSA_UInt32 argc);

+

+class VideoEditorPreviewController {

+

+public:

+    VideoEditorPreviewController();

+    virtual ~VideoEditorPreviewController();

+

+    M4OSA_ERR loadEditSettings(M4VSS3GPP_EditSettings* pSettings,

+     M4xVSS_AudioMixingSettings* bgmSettings);

+

+    M4OSA_ERR setSurface(const sp<Surface> &surface);

+

+    M4OSA_ERR startPreview(M4OSA_UInt32 fromMS, M4OSA_Int32 toMs,

+     M4OSA_UInt16 callBackAfterFrameCount, M4OSA_Bool loop) ;

+

+    M4OSA_ERR stopPreview();

+

+    M4OSA_ERR renderPreviewFrame(const sp<Surface> &surface,

+     VideoEditor_renderPreviewFrameStr* pFrameInfo);

+

+    M4OSA_Void setJniCallback(void* cookie,

+     jni_progress_callback_fct callbackFct);

+

+    M4OSA_ERR setPreviewFrameRenderingMode(M4xVSS_MediaRendering mode,

+     M4VIDEOEDITING_VideoFrameSize outputVideoSize);

+

+private:

+    sp<VideoEditorPlayer> mVePlayer[NBPLAYER_INSTANCES];

+    int mCurrentPlayer; //Instance of the player currently being used

+    sp<ISurface> mISurface;

+    sp<Surface>  mSurface;

+    mutable Mutex mLock;

+    M4OSA_Context mThreadContext;

+    VePlayerState mPlayerState;

+    M4VSS3GPP_ClipSettings **mClipList; //Pointer to an array of clip settings

+    M4OSA_UInt32 mNumberClipsInStoryBoard;

+    M4OSA_UInt32 mNumberClipsToPreview;

+    M4OSA_UInt32 mStartingClipIndex;

+    M4OSA_Bool mPreviewLooping;

+    M4OSA_UInt32 mCallBackAfterFrameCnt;

+    M4VSS3GPP_EffectSettings* mEffectsSettings;

+    M4OSA_UInt32 mNumberEffects;

+    M4OSA_Int32 mCurrentClipNumber;

+    M4OSA_UInt32 mClipTotalDuration;

+    M4OSA_UInt32 mCurrentVideoEffect;

+    M4xVSS_AudioMixingSettings* mBackgroundAudioSetting;

+    M4OSA_Context mAudioMixPCMFileHandle;

+    PreviewRenderer *mTarget;

+    M4OSA_Context mJniCookie;

+    jni_progress_callback_fct mJniCallback;

+    VideoEditor_renderPreviewFrameStr mFrameStr;

+    M4OSA_UInt32 mCurrentPlayedDuration;

+    M4OSA_UInt32 mCurrentClipDuration;

+    M4VIDEOEDITING_VideoFrameSize mOutputVideoSize;

+    M4OSA_UInt32 mFirstPreviewClipBeginTime;

+    M4OSA_UInt32 mLastPreviewClipEndTime;

+    M4OSA_UInt32 mVideoStoryBoardTimeMsUptoFirstPreviewClip;

+

+    M4xVSS_MediaRendering mRenderingMode;

+    uint32_t mOutputVideoWidth;

+    uint32_t mOutputVideoHeight;

+    bool bStopThreadInProgress;

+    M4OSA_Context mSemThreadWait;

+    bool mIsFiftiesEffectStarted;

+

+    M4VIFI_UInt8*  mFrameRGBBuffer;

+    M4VIFI_UInt8*  mFrameYUVBuffer;

+    static M4OSA_ERR preparePlayer(void* param, int playerInstance, int index);

+    static M4OSA_ERR threadProc(M4OSA_Void* param);

+    static void notify(void* cookie, int msg, int ext1, int ext2);

+

+    void setVideoEffectType(M4VSS3GPP_VideoEffectType type, M4OSA_Bool enable);

+

+    M4OSA_ERR applyVideoEffect(M4OSA_Void * dataPtr, M4OSA_UInt32 colorFormat,

+     M4OSA_UInt32 videoWidth, M4OSA_UInt32 videoHeight,

+     M4OSA_UInt32 timeMs, M4OSA_Void* outPtr);

+

+    M4OSA_ERR doImageRenderingMode(M4OSA_Void * dataPtr,

+     M4OSA_UInt32 colorFormat, M4OSA_UInt32 videoWidth,

+     M4OSA_UInt32 videoHeight, M4OSA_Void* outPtr);

+

+    VideoEditorPreviewController(const VideoEditorPreviewController &);

+    VideoEditorPreviewController &operator=(const VideoEditorPreviewController &);

+};

+

+}

+

+#endif //ANDROID_VE_PREVIEWCONTROLLER_H

diff --git a/libvideoeditor/lvpp/VideoEditorSRC.cpp b/libvideoeditor/lvpp/VideoEditorSRC.cpp
new file mode 100755
index 0000000..56f713b
--- /dev/null
+++ b/libvideoeditor/lvpp/VideoEditorSRC.cpp
@@ -0,0 +1,391 @@
+/*

+ * Copyright (C) 2011 NXP Software

+ * Copyright (C) 2011 The Android Open Source Project

+ *

+ * Licensed under the Apache License, Version 2.0 (the "License");

+ * you may not use this file except in compliance with the License.

+ * You may obtain a copy of the License at

+ *

+ *      http://www.apache.org/licenses/LICENSE-2.0

+ *

+ * Unless required by applicable law or agreed to in writing, software

+ * distributed under the License is distributed on an "AS IS" BASIS,

+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * See the License for the specific language governing permissions and

+ * limitations under the License.

+ */

+

+#define LOG_NDEBUG 1

+#define LOG_TAG "VEAudioSource"

+#include <utils/Log.h>

+

+

+#include "VideoEditorSRC.h"

+#include <media/stagefright/MetaData.h>

+#include <media/stagefright/MediaDebug.h>

+#include "AudioMixer.h"

+

+

+namespace android {

+

+VideoEditorSRC::VideoEditorSRC(

+        const sp<MediaSource> &source) {

+

+    LOGV("VideoEditorSRC::Create");

+    mSource = source;

+    mResampler = NULL;

+    mBitDepth = 16;

+    mChannelCnt = 0;

+    mSampleRate = 0;

+    mOutputSampleRate = DEFAULT_SAMPLING_FREQ;

+    mStarted = false;

+    mIsResamplingRequired = false;

+    mIsChannelConvertionRequired = false;

+    mInitialTimeStampUs = -1;

+    mAccuOutBufferSize  = 0;

+    mSeekTimeUs = -1;

+    mLeftover = 0;

+    mLastReadSize = 0;

+#ifndef FROYO

+    mSeekMode =  ReadOptions::SEEK_PREVIOUS_SYNC;

+#endif

+

+    mOutputFormat = new MetaData;

+

+    // Input Source validation

+    sp<MetaData> format = mSource->getFormat();

+    const char *mime;

+    bool success = format->findCString(kKeyMIMEType, &mime);

+    CHECK(success);

+    CHECK(!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW));

+

+    //set the meta data of the output after convertion.

+    if(mOutputFormat != NULL) {

+        mOutputFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_RAW);

+        mOutputFormat->setInt32(kKeySampleRate, DEFAULT_SAMPLING_FREQ);

+

+        //by default we convert all data to stereo

+        mOutputFormat->setInt32(kKeyChannelCount, 2);

+    } else {

+        LOGE("Meta data was not allocated.");

+    }

+

+    // Allocate a  1 sec buffer (test only, to be refined)

+    mInterframeBufferPosition = 0;

+    pInterframeBuffer = new uint8_t[DEFAULT_SAMPLING_FREQ * 2 * 2]; //stereo=2 * bytespersample=2

+

+

+}

+

+VideoEditorSRC::~VideoEditorSRC(){

+    if (mStarted == true)

+        stop();

+

+    if(mOutputFormat != NULL) {

+        mOutputFormat.clear();

+        mOutputFormat = NULL;

+    }

+

+    if (pInterframeBuffer != NULL){

+        delete pInterframeBuffer;

+        pInterframeBuffer = NULL;

+    }

+}

+

+void VideoEditorSRC::setResampling(int32_t sampleRate) {

+    Mutex::Autolock autoLock(mLock);

+    LOGV("VideoEditorSRC::setResampling called with samplreRate = %d", sampleRate);

+    if(sampleRate != DEFAULT_SAMPLING_FREQ) { //default case

+        LOGV("VideoEditor Audio resampler, freq set is other than default");

+        CHECK(mOutputFormat->setInt32(kKeySampleRate, DEFAULT_SAMPLING_FREQ));

+    }

+    mOutputSampleRate = sampleRate;

+    return;

+}

+

+// debug

+FILE *fp;

+

+

+status_t  VideoEditorSRC::start (MetaData *params) {

+    Mutex::Autolock autoLock(mLock);

+

+    CHECK(!mStarted);

+    LOGV(" VideoEditorSRC:start() called");

+

+    sp<MetaData> format = mSource->getFormat();

+    const char *mime;

+    bool success = format->findCString(kKeyMIMEType, &mime);

+    CHECK(success);

+    CHECK(!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW));

+

+    success = format->findInt32(kKeySampleRate, &mSampleRate);

+    CHECK(success);

+

+    int32_t numChannels;

+    success = format->findInt32(kKeyChannelCount, &mChannelCnt);

+    CHECK(success);

+

+    mInputFrameSize = mChannelCnt * 2; //2 is the byte depth

+    if(mSampleRate != mOutputSampleRate) {

+        LOGV("Resampling required (%d != %d)", mSampleRate, mOutputSampleRate);

+        mIsResamplingRequired = true;

+        LOGV("Create resampler %d %d %d", mBitDepth, mChannelCnt, mOutputSampleRate);

+

+        mResampler = AudioResampler::create(

+                        mBitDepth, mChannelCnt, mOutputSampleRate, AudioResampler::DEFAULT);

+

+        if(mResampler == NULL) {

+            return NO_MEMORY;

+        }

+        LOGV("Set input rate %d", mSampleRate);

+        mResampler->setSampleRate(mSampleRate);

+        mResampler->setVolume(UNITY_GAIN, UNITY_GAIN);

+

+    } else {

+        if(mChannelCnt != 2) { //we always make sure to provide stereo

+            LOGV("Only Channel convertion required");

+            mIsChannelConvertionRequired = true;

+        }

+    }

+    mSeekTimeUs = -1;

+#ifndef FROYO

+    mSeekMode =  ReadOptions::SEEK_PREVIOUS_SYNC;

+#endif

+    mStarted = true;

+    mSource->start();

+

+    //+ debug

+    fp = fopen("/sdcard/output.pcm", "wb");

+

+    return OK;

+}

+

+status_t VideoEditorSRC::stop() {

+

+    Mutex::Autolock autoLock(mLock);

+    LOGV("VideoEditorSRC::stop()");

+    mSource->stop();

+    if(mResampler != NULL) {

+        delete mResampler;

+        mResampler = NULL;

+    }

+    mStarted = false;

+    mInitialTimeStampUs = -1;

+    mAccuOutBufferSize  = 0;

+    mLeftover = 0;

+    mLastReadSize = 0;

+

+    //+ debug

+    fclose(fp);

+    return OK;

+}

+

+sp<MetaData> VideoEditorSRC::getFormat() {

+    LOGV("AudioSRC getFormat");

+    //Mutex::Autolock autoLock(mLock);

+    return mOutputFormat;

+}

+

+status_t VideoEditorSRC::read (

+        MediaBuffer **buffer_out, const ReadOptions *options) {

+    Mutex::Autolock autoLock(mLock);

+    *buffer_out = NULL;

+    int32_t leftover = 0;

+

+    LOGV("VideoEditorSRC::read");

+

+    if (!mStarted) {

+        return ERROR_END_OF_STREAM;

+    }

+

+    if(mIsResamplingRequired == true) {

+

+        LOGV("mIsResamplingRequired = true");

+

+        // Store the seek parameters

+        int64_t seekTimeUs;

+#ifndef FROYO

+        ReadOptions::SeekMode mode = ReadOptions::SEEK_PREVIOUS_SYNC;

+        if (options && options->getSeekTo(&seekTimeUs, &mode)) {

+#else

+        if (options && options->getSeekTo(&seekTimeUs)) {

+#endif

+            LOGV("read Seek %lld", seekTimeUs);

+            mInitialTimeStampUs = -1;

+            mSeekTimeUs = seekTimeUs;

+#ifndef FROYO

+            mSeekMode = mode;

+#else

+            mReadOptions = *options;

+#endif

+        }

+

+        // We ask for 1024 frames in output

+        size_t outFrameCnt = 1024;

+        int32_t outBufferSize = (outFrameCnt) * 2 * sizeof(int16_t); //out is always 2 channels & 16 bits

+        int64_t outDurationUs = (outBufferSize * 1000000) /(mOutputSampleRate * 2 * sizeof(int16_t)); //2 channels out * 2 bytes per sample

+        LOGV("outBufferSize            %d", outBufferSize);

+        LOGV("outFrameCnt              %d", outFrameCnt);

+

+        pTmpBuffer = (int32_t*)malloc(outFrameCnt * 2 * sizeof(int32_t)); //out is always 2 channels and resampler out is 32 bits

+        memset(pTmpBuffer, 0x00, outFrameCnt * 2 * sizeof(int32_t));

+        // Resample to target quality

+        mResampler->resample(pTmpBuffer, outFrameCnt, this);

+        int16_t *reSampledBuffer = (int16_t*)malloc(outBufferSize);

+        memset(reSampledBuffer, 0x00, outBufferSize);

+

+        // Convert back to 16 bits

+        AudioMixer::ditherAndClamp((int32_t*)reSampledBuffer, pTmpBuffer, outFrameCnt);

+        LOGV("Resampled buffer size %d", outFrameCnt* 2 * sizeof(int16_t));

+

+        // Create new MediaBuffer

+        mCopyBuffer = new MediaBuffer((void*)reSampledBuffer, outBufferSize);

+

+        // Compute and set the new timestamp

+        sp<MetaData> to = mCopyBuffer->meta_data();

+        int64_t totalOutDurationUs = (mAccuOutBufferSize * 1000000) /(mOutputSampleRate * 2 * 2); //2 channels out * 2 bytes per sample

+        int64_t timeUs = mInitialTimeStampUs + totalOutDurationUs;

+        to->setInt64(kKeyTime, timeUs);

+        LOGV("buffer duration %lld   timestamp %lld   init %lld", outDurationUs, timeUs, mInitialTimeStampUs);

+

+        // update the accumulate size

+        mAccuOutBufferSize += outBufferSize;

+

+        mCopyBuffer->set_range(0, outBufferSize);

+        *buffer_out = mCopyBuffer;

+

+        free(pTmpBuffer);

+

+    } else if(mIsChannelConvertionRequired == true) {

+        //TODO convert to stereo here.

+    } else {

+        //LOGI("Resampling not required");

+        MediaBuffer *aBuffer;

+        status_t err = mSource->read(&aBuffer, options);

+        LOGV("mSource->read returned %d", err);

+        if(err != OK) {

+            *buffer_out = NULL;

+            mStarted = false;

+            return err;

+        }

+        *buffer_out = aBuffer;

+    }

+

+    return OK;

+}

+

+status_t VideoEditorSRC::getNextBuffer(AudioBufferProvider::Buffer *pBuffer) {

+    LOGV("Requesting        %d", pBuffer->frameCount);

+    uint32_t availableFrames;

+    bool lastBuffer = false;

+    MediaBuffer *aBuffer;

+

+

+    //update the internal buffer

+    // Store the leftover at the beginning of the local buffer

+    if (mLeftover > 0) {

+        LOGV("Moving mLeftover =%d  from  %d", mLeftover, mLastReadSize);

+        if (mLastReadSize > 0) {

+            memcpy(pInterframeBuffer, (uint8_t*) (pInterframeBuffer + mLastReadSize), mLeftover);

+        }

+        mInterframeBufferPosition = mLeftover;

+    }

+    else {

+        mInterframeBufferPosition = 0;

+    }

+

+    availableFrames = mInterframeBufferPosition / (mChannelCnt*2);

+

+    while ((availableFrames < pBuffer->frameCount)&&(mStarted)) {

+        // if we seek, reset the initial time stamp and accumulated time

+#ifndef FROYO

+        ReadOptions options;

+        if (mSeekTimeUs >= 0) {

+            LOGV("%p cacheMore_l Seek requested = %lld", this, mSeekTimeUs);

+            ReadOptions::SeekMode mode = mSeekMode;

+            options.setSeekTo(mSeekTimeUs, mode);

+            mSeekTimeUs = -1;

+        }

+#else

+        ReadOptions options;

+        if (mSeekTimeUs >= 0) {

+            LOGV("%p cacheMore_l Seek requested = %lld", this, mSeekTimeUs);

+            options = mReadOptions;

+            mSeekTimeUs = -1;

+        }

+#endif

+        /* The first call to read() will require to buffer twice as much data */

+        /* This will be needed by the resampler */

+        status_t err = mSource->read(&aBuffer, &options);

+        LOGV("mSource->read returned %d", err);

+        if(err != OK) {

+            if (mInterframeBufferPosition == 0) {

+                mStarted = false;

+            }

+            //Empty the internal buffer if there is no more data left in the source

+            else {

+                lastBuffer = true;

+                mInputByteBuffer = pInterframeBuffer;

+                //clear the end of the buffer, just in case

+                memset(pInterframeBuffer+mInterframeBufferPosition, 0x00, DEFAULT_SAMPLING_FREQ * 2 * 2 - mInterframeBufferPosition);

+                mStarted = false;

+            }

+        }

+        else {

+            //copy the buffer

+            memcpy((uint8_t*) (pInterframeBuffer + mInterframeBufferPosition), (uint8_t*) (aBuffer->data() + aBuffer->range_offset()), aBuffer->range_length());

+            LOGV("Read from buffer  %d", aBuffer->range_length());

+

+            mInterframeBufferPosition += aBuffer->range_length();

+            LOGV("Stored            %d", mInterframeBufferPosition);

+

+            // Get the time stamp of the first buffer

+            if (mInitialTimeStampUs == -1) {

+                int64_t curTS;

+                sp<MetaData> from = aBuffer->meta_data();

+                from->findInt64(kKeyTime, &curTS);

+                LOGV("setting mInitialTimeStampUs to %lld", mInitialTimeStampUs);

+                mInitialTimeStampUs = curTS;

+            }

+

+            // release the buffer

+            aBuffer->release();

+        }

+        availableFrames = mInterframeBufferPosition / (mChannelCnt*2);

+        LOGV("availableFrames   %d", availableFrames);

+    }

+

+    if (lastBuffer) {

+        pBuffer->frameCount = availableFrames;

+    }

+

+    //update the input buffer

+    pBuffer->raw        = (void*)(pInterframeBuffer);

+

+    // Update how many bytes are left

+    // (actualReadSize is updated in getNextBuffer() called from resample())

+    int32_t actualReadSize = pBuffer->frameCount * mChannelCnt * 2;

+    mLeftover = mInterframeBufferPosition - actualReadSize;

+    LOGV("mLeftover         %d", mLeftover);

+

+    mLastReadSize = actualReadSize;

+

+    //+ debug

+    //pBuffer->frameCount = 1024;

+    fwrite(pBuffer->raw, 1, pBuffer->frameCount * mChannelCnt * sizeof(int16_t), fp);

+

+    LOGV("inFrameCount     %d", pBuffer->frameCount);

+

+    return OK;

+}

+

+

+void VideoEditorSRC::releaseBuffer(AudioBufferProvider::Buffer *pBuffer) {

+    if(pBuffer->raw != NULL) {

+        pBuffer->raw = NULL;

+    }

+    pBuffer->frameCount = 0;

+}

+

+} //namespce android

diff --git a/libvideoeditor/lvpp/VideoEditorSRC.h b/libvideoeditor/lvpp/VideoEditorSRC.h
new file mode 100755
index 0000000..3c557a4
--- /dev/null
+++ b/libvideoeditor/lvpp/VideoEditorSRC.h
@@ -0,0 +1,105 @@
+/*

+ * Copyright (C) 2011 NXP Software

+ * Copyright (C) 2011 The Android Open Source Project

+ *

+ * Licensed under the Apache License, Version 2.0 (the "License");

+ * you may not use this file except in compliance with the License.

+ * You may obtain a copy of the License at

+ *

+ *      http://www.apache.org/licenses/LICENSE-2.0

+ *

+ * Unless required by applicable law or agreed to in writing, software

+ * distributed under the License is distributed on an "AS IS" BASIS,

+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * See the License for the specific language governing permissions and

+ * limitations under the License.

+ */

+

+#include <utils/RefBase.h>

+#include <media/stagefright/MediaErrors.h>

+

+#include <media/stagefright/MediaSource.h>

+#include <media/stagefright/MediaBuffer.h>

+#include <media/stagefright/MediaBufferGroup.h>

+#include <media/stagefright/MediaDefs.h>

+

+#include "AudioBufferProvider.h"

+#include "AudioResampler.h"

+

+namespace android {

+//struct MediaSource;

+

+

+class VideoEditorSRC : public MediaSource , public AudioBufferProvider {

+

+    public:

+        VideoEditorSRC(

+            const sp<MediaSource> &source);

+

+        virtual status_t start (MetaData *params = NULL);

+        virtual status_t stop();

+        virtual sp<MetaData> getFormat();

+        virtual status_t read (

+            MediaBuffer **buffer, const ReadOptions *options = NULL);

+

+        virtual status_t getNextBuffer(Buffer* buffer);

+        virtual void releaseBuffer(Buffer* buffer);

+

+        void setResampling(int32_t sampleRate=kFreq32000Hz);

+

+    enum { //Sampling freq

+        kFreq8000Hz = 8000,

+     kFreq11025Hz = 11025,

+     kFreq12000Hz = 12000,

+     kFreq16000Hz = 16000,

+     kFreq22050Hz = 22050,

+     kFreq240000Hz = 24000,

+     kFreq32000Hz = 32000,

+     kFreq44100 = 44100,

+     kFreq48000 = 48000,

+    };

+

+    static const uint16_t UNITY_GAIN = 0x1000;

+    static const int32_t DEFAULT_SAMPLING_FREQ = (int32_t)kFreq32000Hz; // kFreq44100;

+

+    protected :

+        virtual ~VideoEditorSRC();

+    private:

+

+        VideoEditorSRC();

+        VideoEditorSRC &operator=(const VideoEditorSRC &);

+

+        AudioResampler        *mResampler;

+        AudioBufferProvider  *mbufferProvider;

+        sp<MediaSource>      mSource;

+        MediaBuffer      *mCopyBuffer;

+        MediaBufferGroup *mGroup;

+        uint8_t *mInputByteBuffer;

+        int32_t mInputBufferLength;

+        int mInputFrameSize;

+        int mBitDepth;

+        int mChannelCnt;

+        int mSampleRate;

+        int32_t mOutputSampleRate;

+        bool mStarted;

+        bool mIsResamplingRequired;

+        bool mIsChannelConvertionRequired; // for mono to stereo

+        sp<MetaData> mOutputFormat;

+        Mutex mLock;

+        int32_t *pTmpBuffer;

+

+        uint8_t* pInterframeBuffer;

+        int32_t mInterframeBufferPosition;

+        int32_t mLeftover;

+        int32_t mLastReadSize ;

+

+        int64_t mInitialTimeStampUs;

+        int64_t mAccuOutBufferSize;

+

+        int64_t mSeekTimeUs;

+        ReadOptions::SeekMode mSeekMode;

+

+};

+

+} //namespce android

+

diff --git a/libvideoeditor/lvpp/VideoEditorTools.cpp b/libvideoeditor/lvpp/VideoEditorTools.cpp
new file mode 100755
index 0000000..99fc6b5
--- /dev/null
+++ b/libvideoeditor/lvpp/VideoEditorTools.cpp
@@ -0,0 +1,3774 @@
+/*

+ * Copyright (C) 2011 NXP Software

+ * Copyright (C) 2011 The Android Open Source Project

+ *

+ * Licensed under the Apache License, Version 2.0 (the "License");

+ * you may not use this file except in compliance with the License.

+ * You may obtain a copy of the License at

+ *

+ *      http://www.apache.org/licenses/LICENSE-2.0

+ *

+ * Unless required by applicable law or agreed to in writing, software

+ * distributed under the License is distributed on an "AS IS" BASIS,

+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * See the License for the specific language governing permissions and

+ * limitations under the License.

+ */

+

+#include "VideoEditorTools.h"

+#include "PreviewRenderer.h"

+/*+ Handle the image files here */

+#include <utils/Log.h>

+/*- Handle the image files here */

+

+const M4VIFI_UInt8   M4VIFI_ClipTable[1256]

+= {

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

+0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x02, 0x03,

+0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b,

+0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13,

+0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b,

+0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23,

+0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b,

+0x2c, 0x2d, 0x2e, 0x2f, 0x30, 0x31, 0x32, 0x33,

+0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x3b,

+0x3c, 0x3d, 0x3e, 0x3f, 0x40, 0x41, 0x42, 0x43,

+0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x4a, 0x4b,

+0x4c, 0x4d, 0x4e, 0x4f, 0x50, 0x51, 0x52, 0x53,

+0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5a, 0x5b,

+0x5c, 0x5d, 0x5e, 0x5f, 0x60, 0x61, 0x62, 0x63,

+0x64, 0x65, 0x66, 0x67, 0x68, 0x69, 0x6a, 0x6b,

+0x6c, 0x6d, 0x6e, 0x6f, 0x70, 0x71, 0x72, 0x73,

+0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7a, 0x7b,

+0x7c, 0x7d, 0x7e, 0x7f, 0x80, 0x81, 0x82, 0x83,

+0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8a, 0x8b,

+0x8c, 0x8d, 0x8e, 0x8f, 0x90, 0x91, 0x92, 0x93,

+0x94, 0x95, 0x96, 0x97, 0x98, 0x99, 0x9a, 0x9b,

+0x9c, 0x9d, 0x9e, 0x9f, 0xa0, 0xa1, 0xa2, 0xa3,

+0xa4, 0xa5, 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xab,

+0xac, 0xad, 0xae, 0xaf, 0xb0, 0xb1, 0xb2, 0xb3,

+0xb4, 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xbb,

+0xbc, 0xbd, 0xbe, 0xbf, 0xc0, 0xc1, 0xc2, 0xc3,

+0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xcb,

+0xcc, 0xcd, 0xce, 0xcf, 0xd0, 0xd1, 0xd2, 0xd3,

+0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xdb,

+0xdc, 0xdd, 0xde, 0xdf, 0xe0, 0xe1, 0xe2, 0xe3,

+0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea, 0xeb,

+0xec, 0xed, 0xee, 0xef, 0xf0, 0xf1, 0xf2, 0xf3,

+0xf4, 0xf5, 0xf6, 0xf7, 0xf8, 0xf9, 0xfa, 0xfb,

+0xfc, 0xfd, 0xfe, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff

+};

+

+/* Division table for ( 65535/x ); x = 0 to 512 */

+const M4VIFI_UInt16  M4VIFI_DivTable[512]

+= {

+0, 65535, 32768, 21845, 16384, 13107, 10922, 9362,

+8192, 7281, 6553, 5957, 5461, 5041, 4681, 4369,

+4096, 3855, 3640, 3449, 3276, 3120, 2978, 2849,

+2730, 2621, 2520, 2427, 2340, 2259, 2184, 2114,

+2048, 1985, 1927, 1872, 1820, 1771, 1724, 1680,

+1638, 1598, 1560, 1524, 1489, 1456, 1424, 1394,

+1365, 1337, 1310, 1285, 1260, 1236, 1213, 1191,

+1170, 1149, 1129, 1110, 1092, 1074, 1057, 1040,

+1024, 1008, 992, 978, 963, 949, 936, 923,

+910, 897, 885, 873, 862, 851, 840, 829,

+819, 809, 799, 789, 780, 771, 762, 753,

+744, 736, 728, 720, 712, 704, 697, 689,

+682, 675, 668, 661, 655, 648, 642, 636,

+630, 624, 618, 612, 606, 601, 595, 590,

+585, 579, 574, 569, 564, 560, 555, 550,

+546, 541, 537, 532, 528, 524, 520, 516,

+512, 508, 504, 500, 496, 492, 489, 485,

+481, 478, 474, 471, 468, 464, 461, 458,

+455, 451, 448, 445, 442, 439, 436, 434,

+431, 428, 425, 422, 420, 417, 414, 412,

+409, 407, 404, 402, 399, 397, 394, 392,

+390, 387, 385, 383, 381, 378, 376, 374,

+372, 370, 368, 366, 364, 362, 360, 358,

+356, 354, 352, 350, 348, 346, 344, 343,

+341, 339, 337, 336, 334, 332, 330, 329,

+327, 326, 324, 322, 321, 319, 318, 316,

+315, 313, 312, 310, 309, 307, 306, 304,

+303, 302, 300, 299, 297, 296, 295, 293,

+292, 291, 289, 288, 287, 286, 284, 283,

+282, 281, 280, 278, 277, 276, 275, 274,

+273, 271, 270, 269, 268, 267, 266, 265,

+264, 263, 262, 261, 260, 259, 258, 257,

+256, 255, 254, 253, 252, 251, 250, 249,

+248, 247, 246, 245, 244, 243, 242, 241,

+240, 240, 239, 238, 237, 236, 235, 234,

+234, 233, 232, 231, 230, 229, 229, 228,

+227, 226, 225, 225, 224, 223, 222, 222,

+221, 220, 219, 219, 218, 217, 217, 216,

+215, 214, 214, 213, 212, 212, 211, 210,

+210, 209, 208, 208, 207, 206, 206, 205,

+204, 204, 203, 202, 202, 201, 201, 200,

+199, 199, 198, 197, 197, 196, 196, 195,

+195, 194, 193, 193, 192, 192, 191, 191,

+190, 189, 189, 188, 188, 187, 187, 186,

+186, 185, 185, 184, 184, 183, 183, 182,

+182, 181, 181, 180, 180, 179, 179, 178,

+178, 177, 177, 176, 176, 175, 175, 174,

+174, 173, 173, 172, 172, 172, 171, 171,

+170, 170, 169, 169, 168, 168, 168, 167,

+167, 166, 166, 165, 165, 165, 164, 164,

+163, 163, 163, 162, 162, 161, 161, 161,

+160, 160, 159, 159, 159, 158, 158, 157,

+157, 157, 156, 156, 156, 155, 155, 154,

+154, 154, 153, 153, 153, 152, 152, 152,

+151, 151, 151, 150, 150, 149, 149, 149,

+148, 148, 148, 147, 147, 147, 146, 146,

+146, 145, 145, 145, 144, 144, 144, 144,

+143, 143, 143, 142, 142, 142, 141, 141,

+141, 140, 140, 140, 140, 139, 139, 139,

+138, 138, 138, 137, 137, 137, 137, 136,

+136, 136, 135, 135, 135, 135, 134, 134,

+134, 134, 133, 133, 133, 132, 132, 132,

+132, 131, 131, 131, 131, 130, 130, 130,

+130, 129, 129, 129, 129, 128, 128, 128

+};

+

+const M4VIFI_Int32  const_storage1[8]

+= {

+0x00002568, 0x00003343,0x00000649,0x00000d0f, 0x0000D86C, 0x0000D83B, 0x00010000, 0x00010000

+};

+

+const M4VIFI_Int32  const_storage[8]

+= {

+0x00002568, 0x00003343, 0x1BF800, 0x00000649, 0x00000d0f, 0x110180, 0x40cf, 0x22BE00

+};

+

+

+const M4VIFI_UInt16  *M4VIFI_DivTable_zero

+ = &M4VIFI_DivTable[0];

+

+const M4VIFI_UInt8   *M4VIFI_ClipTable_zero

+ = &M4VIFI_ClipTable[500];

+

+M4VIFI_UInt8 M4VIFI_YUV420PlanarToYUV420Semiplanar(void *user_data,

+    M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane *PlaneOut ) {

+

+    M4VIFI_UInt32 i;

+    M4VIFI_UInt8 *p_buf_src, *p_buf_dest, *p_buf_src_u, *p_buf_src_v;

+    M4VIFI_UInt8    return_code = M4VIFI_OK;

+

+    /* the filter is implemented with the assumption that the width is equal to stride */

+    if(PlaneIn[0].u_width != PlaneIn[0].u_stride)

+        return M4VIFI_INVALID_PARAM;

+

+    /* The input Y Plane is the same as the output Y Plane */

+    p_buf_src = &(PlaneIn[0].pac_data[PlaneIn[0].u_topleft]);

+    p_buf_dest = &(PlaneOut[0].pac_data[PlaneOut[0].u_topleft]);

+    M4OSA_memcpy((M4OSA_Int8*)p_buf_dest,(M4OSA_Int8*)p_buf_src ,

+        PlaneOut[0].u_width * PlaneOut[0].u_height);

+

+    /* The U and V components are planar. The need to be made interleaved */

+    p_buf_src_u = &(PlaneIn[1].pac_data[PlaneIn[1].u_topleft]);

+    p_buf_src_v = &(PlaneIn[2].pac_data[PlaneIn[2].u_topleft]);

+    p_buf_dest  = &(PlaneOut[1].pac_data[PlaneOut[1].u_topleft]);

+

+    for(i = 0; i < PlaneOut[1].u_width*PlaneOut[1].u_height; i++)

+    {

+        *p_buf_dest++ = *p_buf_src_u++;

+        *p_buf_dest++ = *p_buf_src_v++;

+    }

+    return return_code;

+}

+

+M4VIFI_UInt8 M4VIFI_SemiplanarYUV420toYUV420(void *user_data,

+    M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane *PlaneOut ) {

+

+     M4VIFI_UInt32 i;

+     M4VIFI_UInt8 *p_buf_src, *p_buf_dest, *p_buf_src_u, *p_buf_src_v;

+     M4VIFI_UInt8 *p_buf_dest_u,*p_buf_dest_v,*p_buf_src_uv;

+     M4VIFI_UInt8     return_code = M4VIFI_OK;

+

+     /* the filter is implemented with the assumption that the width is equal to stride */

+     if(PlaneIn[0].u_width != PlaneIn[0].u_stride)

+        return M4VIFI_INVALID_PARAM;

+

+     /* The input Y Plane is the same as the output Y Plane */

+     p_buf_src = &(PlaneIn[0].pac_data[PlaneIn[0].u_topleft]);

+     p_buf_dest = &(PlaneOut[0].pac_data[PlaneOut[0].u_topleft]);

+     M4OSA_memcpy((M4OSA_Int8*)p_buf_dest,(M4OSA_Int8*)p_buf_src ,

+         PlaneOut[0].u_width * PlaneOut[0].u_height);

+

+     /* The U and V components are planar. The need to be made interleaved */

+     p_buf_src_uv = &(PlaneIn[1].pac_data[PlaneIn[1].u_topleft]);

+     p_buf_dest_u  = &(PlaneOut[1].pac_data[PlaneOut[1].u_topleft]);

+     p_buf_dest_v  = &(PlaneOut[2].pac_data[PlaneOut[2].u_topleft]);

+

+     for(i = 0; i < PlaneOut[1].u_width*PlaneOut[1].u_height; i++)

+     {

+        *p_buf_dest_u++ = *p_buf_src_uv++;

+        *p_buf_dest_v++ = *p_buf_src_uv++;

+     }

+     return return_code;

+}

+

+

+/**

+ ******************************************************************************

+ * prototype    M4VSS3GPP_externalVideoEffectColor(M4OSA_Void *pFunctionContext,

+ *                                                  M4VIFI_ImagePlane *PlaneIn,

+ *                                                  M4VIFI_ImagePlane *PlaneOut,

+ *                                                  M4VSS3GPP_ExternalProgress *pProgress,

+ *                                                  M4OSA_UInt32 uiEffectKind)

+ *

+ * @brief   This function apply a color effect on an input YUV420 planar frame

+ * @note

+ * @param   pFunctionContext(IN) Contains which color to apply (not very clean ...)

+ * @param   PlaneIn         (IN) Input YUV420 planar

+ * @param   PlaneOut        (IN/OUT) Output YUV420 planar

+ * @param   pProgress       (IN/OUT) Progress indication (0-100)

+ * @param   uiEffectKind    (IN) Unused

+ *

+ * @return  M4VIFI_OK:  No error

+ ******************************************************************************

+*/

+M4OSA_ERR M4VSS3GPP_externalVideoEffectColor(M4OSA_Void *pFunctionContext,

+            M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane *PlaneOut,

+            M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiEffectKind) {

+

+    M4VIFI_Int32 plane_number;

+    M4VIFI_UInt32 i,j;

+    M4VIFI_UInt8 *p_buf_src, *p_buf_dest;

+    M4xVSS_ColorStruct* ColorContext = (M4xVSS_ColorStruct*)pFunctionContext;

+

+    for (plane_number = 0; plane_number < 3; plane_number++)

+    {

+        p_buf_src =

+         &(PlaneIn[plane_number].pac_data[PlaneIn[plane_number].u_topleft]);

+

+        p_buf_dest =

+         &(PlaneOut[plane_number].pac_data[PlaneOut[plane_number].u_topleft]);

+        for (i = 0; i < PlaneOut[plane_number].u_height; i++)

+        {

+            /**

+             * Chrominance */

+            if(plane_number==1 || plane_number==2)

+            {

+                //switch ((M4OSA_UInt32)pFunctionContext) // commented because a structure for the effects context exist

+                switch (ColorContext->colorEffectType)

+                {

+                case M4xVSS_kVideoEffectType_BlackAndWhite:

+                    M4OSA_memset((M4OSA_MemAddr8)p_buf_dest,

+                     PlaneIn[plane_number].u_width, 128);

+                    break;

+                case M4xVSS_kVideoEffectType_Pink:

+                    M4OSA_memset((M4OSA_MemAddr8)p_buf_dest,

+                     PlaneIn[plane_number].u_width, 255);

+                    break;

+                case M4xVSS_kVideoEffectType_Green:

+                    M4OSA_memset((M4OSA_MemAddr8)p_buf_dest,

+                     PlaneIn[plane_number].u_width, 0);

+                    break;

+                case M4xVSS_kVideoEffectType_Sepia:

+                    if(plane_number==1)

+                    {

+                        M4OSA_memset((M4OSA_MemAddr8)p_buf_dest,

+                         PlaneIn[plane_number].u_width, 117);

+                    }

+                    else

+                    {

+                        M4OSA_memset((M4OSA_MemAddr8)p_buf_dest,

+                         PlaneIn[plane_number].u_width, 139);

+                    }

+                    break;

+                case M4xVSS_kVideoEffectType_Negative:

+                    M4OSA_memcpy((M4OSA_MemAddr8)p_buf_dest,

+                     (M4OSA_MemAddr8)p_buf_src ,PlaneOut[plane_number].u_width);

+                    break;

+

+                case M4xVSS_kVideoEffectType_ColorRGB16:

+                    {

+                        M4OSA_UInt16 r = 0,g = 0,b = 0,y = 0,u = 0,v = 0;

+

+                        /*first get the r, g, b*/

+                        b = (ColorContext->rgb16ColorData &  0x001f);

+                        g = (ColorContext->rgb16ColorData &  0x07e0)>>5;

+                        r = (ColorContext->rgb16ColorData &  0xf800)>>11;

+

+                        /*keep y, but replace u and v*/

+                        if(plane_number==1)

+                        {

+                            /*then convert to u*/

+                            u = U16(r, g, b);

+                            M4OSA_memset((M4OSA_MemAddr8)p_buf_dest,

+                             PlaneIn[plane_number].u_width, (M4OSA_UInt8)u);

+                        }

+                        if(plane_number==2)

+                        {

+                            /*then convert to v*/

+                            v = V16(r, g, b);

+                            M4OSA_memset((M4OSA_MemAddr8)p_buf_dest,

+                             PlaneIn[plane_number].u_width, (M4OSA_UInt8)v);

+                        }

+                    }

+                    break;

+                case M4xVSS_kVideoEffectType_Gradient:

+                    {

+                        M4OSA_UInt16 r = 0,g = 0,b = 0,y = 0,u = 0,v = 0;

+

+                        /*first get the r, g, b*/

+                        b = (ColorContext->rgb16ColorData &  0x001f);

+                        g = (ColorContext->rgb16ColorData &  0x07e0)>>5;

+                        r = (ColorContext->rgb16ColorData &  0xf800)>>11;

+

+                        /*for color gradation*/

+                        b = (M4OSA_UInt16)( b - ((b*i)/PlaneIn[plane_number].u_height));

+                        g = (M4OSA_UInt16)(g - ((g*i)/PlaneIn[plane_number].u_height));

+                        r = (M4OSA_UInt16)(r - ((r*i)/PlaneIn[plane_number].u_height));

+

+                        /*keep y, but replace u and v*/

+                        if(plane_number==1)

+                        {

+                            /*then convert to u*/

+                            u = U16(r, g, b);

+                            M4OSA_memset((M4OSA_MemAddr8)p_buf_dest,

+                             PlaneIn[plane_number].u_width, (M4OSA_UInt8)u);

+                        }

+                        if(plane_number==2)

+                        {

+                            /*then convert to v*/

+                            v = V16(r, g, b);

+                            M4OSA_memset((M4OSA_MemAddr8)p_buf_dest,

+                             PlaneIn[plane_number].u_width, (M4OSA_UInt8)v);

+                        }

+                    }

+                    break;

+                }

+            }

+            /**

+             * Luminance */

+            else

+            {

+                //switch ((M4OSA_UInt32)pFunctionContext)// commented because a structure for the effects context exist

+                switch (ColorContext->colorEffectType)

+                {

+                case M4xVSS_kVideoEffectType_Negative:

+                    for(j=0;j<PlaneOut[plane_number].u_width;j++)

+                    {

+                            p_buf_dest[j] = 255 - p_buf_src[j];

+                    }

+                    break;

+                default:

+                    M4OSA_memcpy((M4OSA_MemAddr8)p_buf_dest,

+                     (M4OSA_MemAddr8)p_buf_src ,PlaneOut[plane_number].u_width);

+                    break;

+                }

+            }

+            p_buf_src += PlaneIn[plane_number].u_stride;

+            p_buf_dest += PlaneOut[plane_number].u_stride;

+        }

+    }

+

+    return M4VIFI_OK;

+}

+

+/**

+ ******************************************************************************

+ * prototype    M4VSS3GPP_externalVideoEffectFraming(M4OSA_Void *pFunctionContext,

+ *                                                  M4VIFI_ImagePlane *PlaneIn,

+ *                                                  M4VIFI_ImagePlane *PlaneOut,

+ *                                                  M4VSS3GPP_ExternalProgress *pProgress,

+ *                                                  M4OSA_UInt32 uiEffectKind)

+ *

+ * @brief   This function add a fixed or animated image on an input YUV420 planar frame

+ * @note

+ * @param   pFunctionContext(IN) Contains which color to apply (not very clean ...)

+ * @param   PlaneIn         (IN) Input YUV420 planar

+ * @param   PlaneOut        (IN/OUT) Output YUV420 planar

+ * @param   pProgress       (IN/OUT) Progress indication (0-100)

+ * @param   uiEffectKind    (IN) Unused

+ *

+ * @return  M4VIFI_OK:  No error

+ ******************************************************************************

+*/

+M4OSA_ERR M4VSS3GPP_externalVideoEffectFraming(

+            M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn[3],

+            M4VIFI_ImagePlane *PlaneOut, M4VSS3GPP_ExternalProgress *pProgress,

+            M4OSA_UInt32 uiEffectKind ) {

+

+    M4VIFI_UInt32 x,y;

+

+    M4VIFI_UInt8 *p_in_Y = PlaneIn[0].pac_data;

+    M4VIFI_UInt8 *p_in_U = PlaneIn[1].pac_data;

+    M4VIFI_UInt8 *p_in_V = PlaneIn[2].pac_data;

+

+    M4xVSS_FramingStruct* Framing = M4OSA_NULL;

+    M4xVSS_FramingStruct* currentFraming = M4OSA_NULL;

+    M4VIFI_UInt8 *FramingRGB = M4OSA_NULL;

+

+    M4VIFI_UInt8 *p_out0;

+    M4VIFI_UInt8 *p_out1;

+    M4VIFI_UInt8 *p_out2;

+

+    M4VIFI_UInt32 topleft[2];

+

+    M4OSA_UInt8 transparent1 =

+     (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8);

+    M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR;

+

+#ifndef DECODE_GIF_ON_SAVING

+    Framing = (M4xVSS_FramingStruct *)userData;

+    currentFraming = (M4xVSS_FramingStruct *)Framing->pCurrent;

+    FramingRGB = Framing->FramingRgb->pac_data;

+#endif /*DECODE_GIF_ON_SAVING*/

+

+#ifdef DECODE_GIF_ON_SAVING

+    M4OSA_ERR err;

+    Framing =

+     (M4xVSS_FramingStruct *)((M4xVSS_FramingContext*)userData)->aFramingCtx;

+    if(Framing == M4OSA_NULL)

+    {

+        ((M4xVSS_FramingContext*)userData)->clipTime = pProgress->uiOutputTime;

+        err = M4xVSS_internalDecodeGIF(userData);

+        if(M4NO_ERROR != err)

+        {

+            M4OSA_TRACE1_1("M4VSS3GPP_externalVideoEffectFraming: \

+                Error in M4xVSS_internalDecodeGIF: 0x%x", err);

+            return err;

+        }

+        Framing =

+         (M4xVSS_FramingStruct *)((M4xVSS_FramingContext*)userData)->aFramingCtx;

+        /* Initializes first GIF time */

+        ((M4xVSS_FramingContext*)userData)->current_gif_time =

+          pProgress->uiOutputTime;

+    }

+    currentFraming = (M4xVSS_FramingStruct *)Framing;

+    FramingRGB = Framing->FramingRgb->pac_data;

+#endif /*DECODE_GIF_ON_SAVING*/

+

+    /**

+     * Initialize input / output plane pointers */

+    p_in_Y += PlaneIn[0].u_topleft;

+    p_in_U += PlaneIn[1].u_topleft;

+    p_in_V += PlaneIn[2].u_topleft;

+

+    p_out0 = PlaneOut[0].pac_data;

+    p_out1 = PlaneOut[1].pac_data;

+    p_out2 = PlaneOut[2].pac_data;

+

+    /**

+     * Depending on time, initialize Framing frame to use */

+    if(Framing->previousClipTime == -1)

+    {

+        Framing->previousClipTime = pProgress->uiOutputTime;

+    }

+

+    /**

+     * If the current clip time has reach the duration of one frame of the framing picture

+     * we need to step to next framing picture */

+#ifdef DECODE_GIF_ON_SAVING

+    if(((M4xVSS_FramingContext*)userData)->b_animated == M4OSA_TRUE)

+    {

+        while((((M4xVSS_FramingContext*)userData)->current_gif_time + currentFraming->duration) < pProgress->uiOutputTime)

+        {

+            ((M4xVSS_FramingContext*)userData)->clipTime =

+             pProgress->uiOutputTime;

+

+            err = M4xVSS_internalDecodeGIF(userData);

+            if(M4NO_ERROR != err)

+            {

+                M4OSA_TRACE1_1("M4VSS3GPP_externalVideoEffectFraming: Error in M4xVSS_internalDecodeGIF: 0x%x", err);

+                return err;

+            }

+            if(currentFraming->duration != 0)

+            {

+                ((M4xVSS_FramingContext*)userData)->current_gif_time += currentFraming->duration;

+            }

+            else

+            {

+                ((M4xVSS_FramingContext*)userData)->current_gif_time +=

+                 pProgress->uiOutputTime - Framing->previousClipTime;

+            }

+            Framing = (M4xVSS_FramingStruct *)((M4xVSS_FramingContext*)userData)->aFramingCtx;

+            currentFraming = (M4xVSS_FramingStruct *)Framing;

+            FramingRGB = Framing->FramingRgb->pac_data;

+        }

+    }

+#else

+            Framing->pCurrent = currentFraming->pNext;

+            currentFraming = (M4xVSS_FramingStruct*)Framing->pCurrent;

+#endif /*DECODE_GIF_ON_SAVING*/

+

+    Framing->previousClipTime = pProgress->uiOutputTime;

+    FramingRGB = currentFraming->FramingRgb->pac_data;

+    topleft[0] = currentFraming->topleft_x;

+    topleft[1] = currentFraming->topleft_y;

+

+    for( x=0 ;x < PlaneIn[0].u_height ; x++)

+    {

+        for( y=0 ;y < PlaneIn[0].u_width ; y++)

+        {

+            /**

+             * To handle framing with input size != output size

+             * Framing is applyed if coordinates matches between framing/topleft and input plane */

+            if( y < (topleft[0] + currentFraming->FramingYuv[0].u_width)  &&

+                y >= topleft[0] &&

+                x < (topleft[1] + currentFraming->FramingYuv[0].u_height) &&

+                x >= topleft[1])

+            {

+

+                /*Alpha blending support*/

+                M4OSA_Float alphaBlending = 1;

+#ifdef DECODE_GIF_ON_SAVING

+                M4xVSS_internalEffectsAlphaBlending* alphaBlendingStruct =

+                 (M4xVSS_internalEffectsAlphaBlending*)((M4xVSS_FramingContext*)userData)->alphaBlendingStruct;

+#else

+                M4xVSS_internalEffectsAlphaBlending* alphaBlendingStruct =

+                 (M4xVSS_internalEffectsAlphaBlending*)((M4xVSS_FramingStruct*)userData)->alphaBlendingStruct;

+#endif //#ifdef DECODE_GIF_ON_SAVING

+

+                if(alphaBlendingStruct != M4OSA_NULL)

+                {

+                    if(pProgress->uiProgress >= 0 && pProgress->uiProgress < (M4OSA_UInt32)(alphaBlendingStruct->m_fadeInTime*10))

+                    {

+                        alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle - alphaBlendingStruct->m_start)*pProgress->uiProgress/(alphaBlendingStruct->m_fadeInTime*10));

+                        alphaBlending += alphaBlendingStruct->m_start;

+                        alphaBlending /= 100;

+                    }

+                    else if(pProgress->uiProgress >= (M4OSA_UInt32)(alphaBlendingStruct->m_fadeInTime*10) && pProgress->uiProgress < 1000 - (M4OSA_UInt32)(alphaBlendingStruct->m_fadeOutTime*10))

+                    {

+                        alphaBlending = (M4OSA_Float)((M4OSA_Float)alphaBlendingStruct->m_middle/100);

+                    }

+                    else if(pProgress->uiProgress >= 1000 - (M4OSA_UInt32)(alphaBlendingStruct->m_fadeOutTime*10))

+                    {

+                        alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle - alphaBlendingStruct->m_end))*(1000 - pProgress->uiProgress)/(alphaBlendingStruct->m_fadeOutTime*10);

+                        alphaBlending += alphaBlendingStruct->m_end;

+                        alphaBlending /= 100;

+                    }

+                }

+

+                /**/

+

+                if((*(FramingRGB)==transparent1) && (*(FramingRGB+1)==transparent2))

+                {

+                    *( p_out0+y+x*PlaneOut[0].u_stride)=(*(p_in_Y+y+x*PlaneIn[0].u_stride));

+                    *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)=(*(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride));

+                    *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)=(*(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride));

+                }

+                else

+                {

+                    *( p_out0+y+x*PlaneOut[0].u_stride)=(*(currentFraming->FramingYuv[0].pac_data+(y-topleft[0])+(x-topleft[1])*currentFraming->FramingYuv[0].u_stride))*alphaBlending;

+                    *( p_out0+y+x*PlaneOut[0].u_stride)+=(*(p_in_Y+y+x*PlaneIn[0].u_stride))*(1-alphaBlending);

+                    *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)=(*(currentFraming->FramingYuv[1].pac_data+((y-topleft[0])>>1)+((x-topleft[1])>>1)*currentFraming->FramingYuv[1].u_stride))*alphaBlending;

+                    *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)+=(*(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride))*(1-alphaBlending);

+                    *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)=(*(currentFraming->FramingYuv[2].pac_data+((y-topleft[0])>>1)+((x-topleft[1])>>1)*currentFraming->FramingYuv[2].u_stride))*alphaBlending;

+                    *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)+=(*(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride))*(1-alphaBlending);

+                }

+                if( PlaneIn[0].u_width < (topleft[0] + currentFraming->FramingYuv[0].u_width) &&

+                    y == PlaneIn[0].u_width-1)

+                {

+                    FramingRGB = FramingRGB + 2 * (topleft[0] + currentFraming->FramingYuv[0].u_width - PlaneIn[0].u_width + 1);

+                }

+                else

+                {

+                    FramingRGB = FramingRGB + 2;

+                }

+            }

+            /**

+             * Just copy input plane to output plane */

+            else

+            {

+                *( p_out0+y+x*PlaneOut[0].u_stride)=*(p_in_Y+y+x*PlaneIn[0].u_stride);

+                *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)=*(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride);

+                *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)=*(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride);

+            }

+        }

+    }

+

+#ifdef DECODE_GIF_ON_SAVING

+    if(pProgress->bIsLast == M4OSA_TRUE

+        && (M4OSA_Bool)((M4xVSS_FramingContext*)userData)->b_IsFileGif == M4OSA_TRUE)

+    {

+        M4xVSS_internalDecodeGIF_Cleaning((M4xVSS_FramingContext*)userData);

+    }

+#endif /*DECODE_GIF_ON_SAVING*/

+    return M4VIFI_OK;

+}

+

+

+/**

+ ******************************************************************************

+ * prototype    M4VSS3GPP_externalVideoEffectFifties(M4OSA_Void *pFunctionContext,

+ *                                                  M4VIFI_ImagePlane *PlaneIn,

+ *                                                  M4VIFI_ImagePlane *PlaneOut,

+ *                                                  M4VSS3GPP_ExternalProgress *pProgress,

+ *                                                  M4OSA_UInt32 uiEffectKind)

+ *

+ * @brief   This function make a video look as if it was taken in the fifties

+ * @note

+ * @param   pUserData       (IN) Context

+ * @param   pPlaneIn        (IN) Input YUV420 planar

+ * @param   pPlaneOut       (IN/OUT) Output YUV420 planar

+ * @param   pProgress       (IN/OUT) Progress indication (0-100)

+ * @param   uiEffectKind    (IN) Unused

+ *

+ * @return  M4VIFI_OK:          No error

+ * @return  M4ERR_PARAMETER:    pFiftiesData, pPlaneOut or pProgress are NULL (DEBUG only)

+ ******************************************************************************

+*/

+M4OSA_ERR M4VSS3GPP_externalVideoEffectFifties(

+    M4OSA_Void *pUserData, M4VIFI_ImagePlane *pPlaneIn,

+    M4VIFI_ImagePlane *pPlaneOut, M4VSS3GPP_ExternalProgress *pProgress,

+    M4OSA_UInt32 uiEffectKind )

+{

+    M4VIFI_UInt32 x, y, xShift;

+    M4VIFI_UInt8 *pInY = pPlaneIn[0].pac_data;

+    M4VIFI_UInt8 *pOutY, *pInYbegin;

+    M4VIFI_UInt8 *pInCr,* pOutCr;

+    M4VIFI_Int32 plane_number;

+

+    /* Internal context*/

+    M4xVSS_FiftiesStruct* p_FiftiesData = (M4xVSS_FiftiesStruct *)pUserData;

+

+    /* Initialize input / output plane pointers */

+    pInY += pPlaneIn[0].u_topleft;

+    pOutY = pPlaneOut[0].pac_data;

+    pInYbegin  = pInY;

+

+    /* Initialize the random */

+    if(p_FiftiesData->previousClipTime < 0)

+    {

+        M4OSA_randInit();

+        M4OSA_rand((M4OSA_Int32*)&(p_FiftiesData->shiftRandomValue), (pPlaneIn[0].u_height) >> 4);

+        M4OSA_rand((M4OSA_Int32*)&(p_FiftiesData->stripeRandomValue), (pPlaneIn[0].u_width)<< 2);

+        p_FiftiesData->previousClipTime = pProgress->uiOutputTime;

+    }

+

+    /* Choose random values if we have reached the duration of a partial effect */

+    else if( (pProgress->uiOutputTime - p_FiftiesData->previousClipTime) > p_FiftiesData->fiftiesEffectDuration)

+    {

+        M4OSA_rand((M4OSA_Int32*)&(p_FiftiesData->shiftRandomValue), (pPlaneIn[0].u_height) >> 4);

+        M4OSA_rand((M4OSA_Int32*)&(p_FiftiesData->stripeRandomValue), (pPlaneIn[0].u_width)<< 2);

+        p_FiftiesData->previousClipTime = pProgress->uiOutputTime;

+    }

+

+    /* Put in Sepia the chrominance */

+    for (plane_number = 1; plane_number < 3; plane_number++)

+    {

+        pInCr  = pPlaneIn[plane_number].pac_data  + pPlaneIn[plane_number].u_topleft;

+        pOutCr = pPlaneOut[plane_number].pac_data + pPlaneOut[plane_number].u_topleft;

+

+        for (x = 0; x < pPlaneOut[plane_number].u_height; x++)

+        {

+            if (1 == plane_number)

+                M4OSA_memset((M4OSA_MemAddr8)pOutCr, pPlaneIn[plane_number].u_width, 117); /* U value */

+            else

+                M4OSA_memset((M4OSA_MemAddr8)pOutCr, pPlaneIn[plane_number].u_width, 139); /* V value */

+

+            pInCr  += pPlaneIn[plane_number].u_stride;

+            pOutCr += pPlaneOut[plane_number].u_stride;

+        }

+    }

+

+    /* Compute the new pixels values */

+    for( x = 0 ; x < pPlaneIn[0].u_height ; x++)

+    {

+        M4VIFI_UInt8 *p_outYtmp, *p_inYtmp;

+

+        /* Compute the xShift (random value) */

+        if (0 == (p_FiftiesData->shiftRandomValue % 5 ))

+            xShift = (x + p_FiftiesData->shiftRandomValue ) % (pPlaneIn[0].u_height - 1);

+        else

+            xShift = (x + (pPlaneIn[0].u_height - p_FiftiesData->shiftRandomValue) ) % (pPlaneIn[0].u_height - 1);

+

+        /* Initialize the pointers */

+        p_outYtmp = pOutY + 1;                                    /* yShift of 1 pixel */

+        p_inYtmp  = pInYbegin + (xShift * pPlaneIn[0].u_stride);  /* Apply the xShift */

+

+        for( y = 0 ; y < pPlaneIn[0].u_width ; y++)

+        {

+            /* Set Y value */

+            if (xShift > (pPlaneIn[0].u_height - 4))

+                *p_outYtmp = 40;        /* Add some horizontal black lines between the two parts of the image */

+            else if ( y == p_FiftiesData->stripeRandomValue)

+                *p_outYtmp = 90;        /* Add a random vertical line for the bulk */

+            else

+                *p_outYtmp = *p_inYtmp;

+

+

+            /* Go to the next pixel */

+            p_outYtmp++;

+            p_inYtmp++;

+

+            /* Restart at the beginning of the line for the last pixel*/

+            if (y == (pPlaneIn[0].u_width - 2))

+                p_outYtmp = pOutY;

+        }

+

+        /* Go to the next line */

+        pOutY += pPlaneOut[0].u_stride;

+    }

+

+    return M4VIFI_OK;

+}

+

+unsigned char M4VFL_modifyLumaWithScale(M4ViComImagePlane *plane_in,

+                                        M4ViComImagePlane *plane_out,

+                                        unsigned long lum_factor,

+                                        void *user_data)

+{

+    unsigned short *p_src, *p_dest, *p_src_line, *p_dest_line;

+    unsigned char *p_csrc, *p_cdest, *p_csrc_line, *p_cdest_line;

+    unsigned long pix_src;

+    unsigned long u_outpx, u_outpx2;

+    unsigned long u_width, u_stride, u_stride_out,u_height, pix;

+    long i, j;

+

+    /* copy or filter chroma */

+    u_width = plane_in[1].u_width;

+    u_height = plane_in[1].u_height;

+    u_stride = plane_in[1].u_stride;

+    u_stride_out = plane_out[1].u_stride;

+    p_cdest_line = (unsigned char *) &plane_out[1].pac_data[plane_out[1].u_topleft];

+    p_csrc_line = (unsigned char *) &plane_in[1].pac_data[plane_in[1].u_topleft];

+

+    if (lum_factor > 256)

+    {

+        p_cdest = (unsigned char *) &plane_out[2].pac_data[plane_out[2].u_topleft];

+        p_csrc = (unsigned char *) &plane_in[2].pac_data[plane_in[2].u_topleft];

+        /* copy chroma */

+        for (j = u_height; j != 0; j--)

+        {

+            for (i = u_width; i != 0; i--)

+            {

+                M4OSA_memcpy((M4OSA_MemAddr8)p_cdest_line, (M4OSA_MemAddr8)p_csrc_line, u_width);

+                M4OSA_memcpy((M4OSA_MemAddr8)p_cdest, (M4OSA_MemAddr8)p_csrc, u_width);

+            }

+            p_cdest_line += u_stride_out;

+            p_cdest += u_stride_out;

+            p_csrc_line += u_stride;

+            p_csrc += u_stride;

+        }

+    }

+    else

+    {

+        /* filter chroma */

+        pix = (1024 - lum_factor) << 7;

+        for (j = u_height; j != 0; j--)

+        {

+            p_cdest = p_cdest_line;

+            p_csrc = p_csrc_line;

+            for (i = u_width; i != 0; i--)

+            {

+                *p_cdest++ = ((pix + (*p_csrc++ & 0xFF) * lum_factor) >> LUM_FACTOR_MAX);

+            }

+            p_cdest_line += u_stride_out;

+            p_csrc_line += u_stride;

+        }

+        p_cdest_line = (unsigned char *) &plane_out[2].pac_data[plane_out[2].u_topleft];

+        p_csrc_line = (unsigned char *) &plane_in[2].pac_data[plane_in[2].u_topleft];

+        for (j = u_height; j != 0; j--)

+        {

+            p_cdest = p_cdest_line;

+            p_csrc = p_csrc_line;

+            for (i = u_width; i != 0; i--)

+            {

+                *p_cdest++ = ((pix + (*p_csrc & 0xFF) * lum_factor) >> LUM_FACTOR_MAX);

+            }

+            p_cdest_line += u_stride_out;

+            p_csrc_line += u_stride;

+        }

+    }

+    /* apply luma factor */

+    u_width = plane_in[0].u_width;

+    u_height = plane_in[0].u_height;

+    u_stride = (plane_in[0].u_stride >> 1);

+    u_stride_out = (plane_out[0].u_stride >> 1);

+    p_dest = (unsigned short *) &plane_out[0].pac_data[plane_out[0].u_topleft];

+    p_src = (unsigned short *) &plane_in[0].pac_data[plane_in[0].u_topleft];

+    p_dest_line = p_dest;

+    p_src_line = p_src;

+

+    for (j = u_height; j != 0; j--)

+    {

+        p_dest = p_dest_line;

+        p_src = p_src_line;

+        for (i = (u_width >> 1); i != 0; i--)

+        {

+            pix_src = (unsigned long) *p_src++;

+            pix = pix_src & 0xFF;

+            u_outpx = ((pix * lum_factor) >> LUM_FACTOR_MAX);

+            pix = ((pix_src & 0xFF00) >> 8);

+            u_outpx2 = (((pix * lum_factor) >> LUM_FACTOR_MAX)<< 8) ;

+            *p_dest++ = (unsigned short) (u_outpx2 | u_outpx);

+        }

+        p_dest_line += u_stride_out;

+        p_src_line += u_stride;

+    }

+

+    return 0;

+}

+

+/**

+ ******************************************************************************

+ * unsigned char M4VFL_applyCurtain(M4ViComImagePlane *plane_in, M4ViComImagePlane *plane_out, M4VFL_CurtainParam *curtain_factor, void *user_data)

+ * @author  Beatrice Nezot (PHILIPS Software Vision)

+ * @brief   This function applies a black curtain onto a YUV420 image.

+ * @note    THis function writes black lines either at the top of the image or at

+ *          the bottom of the image. The other lines are copied from the source image.

+ *          First the number of black lines is compted and is rounded to an even integer.

+ * @param   plane_in: (IN) pointer to the 3 image planes of the source image

+ * @param   plane_out: (OUT) pointer to the 3 image planes of the destination image

+ * @param   user_data: (IN) pointer to some user_data

+ * @param   curtain_factor: (IN) structure with the parameters of the curtain (nb of black lines and if at the top/bottom of the image)

+ * @return  0: there is no error

+ ******************************************************************************

+*/

+unsigned char M4VFL_applyCurtain(M4ViComImagePlane *plane_in, M4ViComImagePlane *plane_out, M4VFL_CurtainParam *curtain_factor, void *user_data)

+{

+    unsigned char *p_src, *p_srcu, *p_srcv,*p_dest, *p_destu, *p_destv;

+    unsigned long u_width, u_widthuv, u_stride_out, u_stride_out_uv,u_stride, u_stride_uv,u_height;

+    long j;

+    unsigned long nb_black_lines;

+

+    u_width = plane_in[0].u_width;

+    u_height = plane_in[0].u_height;

+    u_stride_out = plane_out[0].u_stride ;

+    u_stride_out_uv = plane_out[1].u_stride;

+    p_dest = (unsigned char *) &plane_out[0].pac_data[plane_out[0].u_topleft];

+    p_destu = (unsigned char *) &plane_out[1].pac_data[plane_out[1].u_topleft];

+    p_destv = (unsigned char *) &plane_out[2].pac_data[plane_out[2].u_topleft];

+    u_widthuv = u_width >> 1;

+    u_stride = plane_in[0].u_stride ;

+    u_stride_uv = plane_in[1].u_stride;

+

+    /* nb_black_lines is even */

+    nb_black_lines = (unsigned long) ((curtain_factor->nb_black_lines >> 1) << 1);

+

+    if (curtain_factor->top_is_black)

+    {

+        /* black lines first */

+        /* compute index of of first source pixels (Y, U and V) to copy after the black lines */

+        p_src = (unsigned char *) &plane_in[0].pac_data[plane_in[0].u_topleft + ((nb_black_lines) * plane_in[0].u_stride)];

+        p_srcu = (unsigned char *) &plane_in[1].pac_data[plane_in[1].u_topleft + (((nb_black_lines) * plane_in[1].u_stride) >> 1)];

+        p_srcv = (unsigned char *) &plane_in[2].pac_data[plane_in[2].u_topleft+ (((nb_black_lines) * plane_in[2].u_stride) >> 1)];

+

+        /* write black lines */

+        for (j = (nb_black_lines >> 1); j != 0; j--)

+        {

+            M4OSA_memset((M4OSA_MemAddr8)p_dest, u_width, 0);

+            p_dest += u_stride_out;

+            M4OSA_memset((M4OSA_MemAddr8)p_dest, u_width, 0);

+            p_dest += u_stride_out;

+            M4OSA_memset((M4OSA_MemAddr8)p_destu, u_widthuv, 128);

+            M4OSA_memset((M4OSA_MemAddr8)p_destv, u_widthuv, 128);

+            p_destu += u_stride_out_uv;

+            p_destv += u_stride_out_uv;

+        }

+

+        /* copy from source image */

+        for (j = (u_height - nb_black_lines) >> 1; j != 0; j--)

+        {

+            M4OSA_memcpy((M4OSA_MemAddr8)p_dest, (M4OSA_MemAddr8)p_src, u_width);

+            p_dest += u_stride_out;

+            p_src += u_stride;

+            M4OSA_memcpy((M4OSA_MemAddr8)p_dest, (M4OSA_MemAddr8)p_src, u_width);

+            p_dest += u_stride_out;

+            p_src += u_stride;

+            M4OSA_memcpy((M4OSA_MemAddr8)p_destu, (M4OSA_MemAddr8)p_srcu, u_widthuv);

+            M4OSA_memcpy((M4OSA_MemAddr8)p_destv, (M4OSA_MemAddr8)p_srcv, u_widthuv);

+            p_destu += u_stride_out_uv;

+            p_destv += u_stride_out_uv;

+            p_srcu += u_stride_uv;

+            p_srcv += u_stride_uv;

+        }

+    }

+    else

+    {

+        /* black lines at the bottom of the image */

+        p_src = (unsigned char *) &plane_in[0].pac_data[plane_in[0].u_topleft];

+        p_srcu = (unsigned char *) &plane_in[1].pac_data[plane_in[1].u_topleft];

+        p_srcv = (unsigned char *) &plane_in[2].pac_data[plane_in[2].u_topleft];

+

+        /* copy from source image image */

+        for (j = (nb_black_lines >> 1); j != 0; j--)

+        {

+            M4OSA_memcpy((M4OSA_MemAddr8)p_dest, (M4OSA_MemAddr8)p_src, u_width);

+            p_dest += u_stride_out;

+            p_src += u_stride;

+            M4OSA_memcpy((M4OSA_MemAddr8)p_dest, (M4OSA_MemAddr8)p_src, u_width);

+            p_dest += u_stride_out;

+            p_src += u_stride;

+            M4OSA_memcpy((M4OSA_MemAddr8)p_destu, (M4OSA_MemAddr8)p_srcu, u_widthuv);

+            M4OSA_memcpy((M4OSA_MemAddr8)p_destv, (M4OSA_MemAddr8)p_srcv, u_widthuv);

+            p_destu += u_stride_out_uv;

+            p_destv += u_stride_out_uv;

+            p_srcu += u_stride_uv;

+            p_srcv += u_stride_uv;

+        }

+

+        /* write black lines*/

+        /* the pointers to p_dest, p_destu and p_destv are used through the two loops "for" */

+        for (j = (u_height - nb_black_lines) >> 1; j != 0; j--)

+        {

+            M4OSA_memset((M4OSA_MemAddr8)p_dest, u_width, 0);

+            p_dest += u_stride_out;

+            M4OSA_memset((M4OSA_MemAddr8)p_dest, u_width, 0);

+            p_dest += u_stride_out;

+            M4OSA_memset((M4OSA_MemAddr8)p_destu, u_widthuv, 128);

+            M4OSA_memset((M4OSA_MemAddr8)p_destv, u_widthuv, 128);

+            p_destu += u_stride_out_uv;

+            p_destv += u_stride_out_uv;

+        }

+    }

+

+    return 0;

+}

+

+

+/******************************************************************************

+ * prototype    M4OSA_ERR M4xVSS_internalConvertRGBtoYUV(M4xVSS_FramingStruct* framingCtx)

+ * @brief   This function converts an RGB565 plane to YUV420 planar

+ * @note    It is used only for framing effect

+ *          It allocates output YUV planes

+ * @param   framingCtx  (IN) The framing struct containing input RGB565 plane

+ *

+ * @return  M4NO_ERROR: No error

+ * @return  M4ERR_PARAMETER: At least one of the function parameters is null

+ * @return  M4ERR_ALLOC: Allocation error (no more memory)

+ ******************************************************************************

+*/

+M4OSA_ERR M4xVSS_internalConvertRGBtoYUV(M4xVSS_FramingStruct* framingCtx)

+{

+    M4OSA_ERR err;

+

+    /**

+     * Allocate output YUV planes */

+    framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_malloc(3*sizeof(M4VIFI_ImagePlane), M4VS, (M4OSA_Char*)"M4xVSS_internalConvertRGBtoYUV: Output plane YUV");

+    if(framingCtx->FramingYuv == M4OSA_NULL)

+    {

+        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV");

+        return M4ERR_ALLOC;

+    }

+    framingCtx->FramingYuv[0].u_width = framingCtx->FramingRgb->u_width;

+    framingCtx->FramingYuv[0].u_height = framingCtx->FramingRgb->u_height;

+    framingCtx->FramingYuv[0].u_topleft = 0;

+    framingCtx->FramingYuv[0].u_stride = framingCtx->FramingRgb->u_width;

+    framingCtx->FramingYuv[0].pac_data = (M4VIFI_UInt8*)M4OSA_malloc((framingCtx->FramingYuv[0].u_width*framingCtx->FramingYuv[0].u_height*3)>>1, M4VS, (M4OSA_Char*)"Alloc for the Convertion output YUV");;

+    if(framingCtx->FramingYuv[0].pac_data == M4OSA_NULL)

+    {

+        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV");

+        return M4ERR_ALLOC;

+    }

+    framingCtx->FramingYuv[1].u_width = (framingCtx->FramingRgb->u_width)>>1;

+    framingCtx->FramingYuv[1].u_height = (framingCtx->FramingRgb->u_height)>>1;

+    framingCtx->FramingYuv[1].u_topleft = 0;

+    framingCtx->FramingYuv[1].u_stride = (framingCtx->FramingRgb->u_width)>>1;

+    framingCtx->FramingYuv[1].pac_data = framingCtx->FramingYuv[0].pac_data + framingCtx->FramingYuv[0].u_width * framingCtx->FramingYuv[0].u_height;

+    framingCtx->FramingYuv[2].u_width = (framingCtx->FramingRgb->u_width)>>1;

+    framingCtx->FramingYuv[2].u_height = (framingCtx->FramingRgb->u_height)>>1;

+    framingCtx->FramingYuv[2].u_topleft = 0;

+    framingCtx->FramingYuv[2].u_stride = (framingCtx->FramingRgb->u_width)>>1;

+    framingCtx->FramingYuv[2].pac_data = framingCtx->FramingYuv[1].pac_data + framingCtx->FramingYuv[1].u_width * framingCtx->FramingYuv[1].u_height;

+

+    /**

+     * Convert input RGB 565 to YUV 420 to be able to merge it with output video in framing effect */

+    err = M4VIFI_xVSS_RGB565toYUV420(M4OSA_NULL, framingCtx->FramingRgb, framingCtx->FramingYuv);

+    if(err != M4NO_ERROR)

+    {

+        M4OSA_TRACE1_1("M4xVSS_internalConvertRGBtoYUV: error when converting from RGB to YUV: 0x%x\n", err);

+    }

+

+    framingCtx->duration = 0;

+    framingCtx->previousClipTime = -1;

+    framingCtx->previewOffsetClipTime = -1;

+

+    /**

+     * Only one element in the chained list (no animated image with RGB buffer...) */

+    framingCtx->pCurrent = framingCtx;

+    framingCtx->pNext = framingCtx;

+

+    return M4NO_ERROR;

+}

+

+/******************************************************************************

+ * prototype    M4OSA_ERR M4xVSS_internalConvertRGB888toYUV(M4xVSS_FramingStruct* framingCtx)

+ * @brief   This function converts an RGB888 plane to YUV420 planar

+ * @note    It is used only for framing effect

+ *          It allocates output YUV planes

+ * @param   framingCtx  (IN) The framing struct containing input RGB888 plane

+ *

+ * @return  M4NO_ERROR: No error

+ * @return  M4ERR_PARAMETER: At least one of the function parameters is null

+ * @return  M4ERR_ALLOC: Allocation error (no more memory)

+ ******************************************************************************

+*/

+M4OSA_ERR M4xVSS_internalConvertRGB888toYUV(M4xVSS_FramingStruct* framingCtx)

+{

+    M4OSA_ERR err;

+

+    /**

+     * Allocate output YUV planes */

+    framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_malloc(3*sizeof(M4VIFI_ImagePlane), M4VS, (M4OSA_Char*)"M4xVSS_internalConvertRGBtoYUV: Output plane YUV");

+    if(framingCtx->FramingYuv == M4OSA_NULL)

+    {

+        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV");

+        return M4ERR_ALLOC;

+    }

+    framingCtx->FramingYuv[0].u_width = framingCtx->FramingRgb->u_width;

+    framingCtx->FramingYuv[0].u_height = framingCtx->FramingRgb->u_height;

+    framingCtx->FramingYuv[0].u_topleft = 0;

+    framingCtx->FramingYuv[0].u_stride = framingCtx->FramingRgb->u_width;

+    framingCtx->FramingYuv[0].pac_data = (M4VIFI_UInt8*)M4OSA_malloc((framingCtx->FramingYuv[0].u_width*framingCtx->FramingYuv[0].u_height*3)>>1, M4VS, (M4OSA_Char*)"Alloc for the Convertion output YUV");;

+    if(framingCtx->FramingYuv[0].pac_data == M4OSA_NULL)

+    {

+        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV");

+        return M4ERR_ALLOC;

+    }

+    framingCtx->FramingYuv[1].u_width = (framingCtx->FramingRgb->u_width)>>1;

+    framingCtx->FramingYuv[1].u_height = (framingCtx->FramingRgb->u_height)>>1;

+    framingCtx->FramingYuv[1].u_topleft = 0;

+    framingCtx->FramingYuv[1].u_stride = (framingCtx->FramingRgb->u_width)>>1;

+    framingCtx->FramingYuv[1].pac_data = framingCtx->FramingYuv[0].pac_data + framingCtx->FramingYuv[0].u_width * framingCtx->FramingYuv[0].u_height;

+    framingCtx->FramingYuv[2].u_width = (framingCtx->FramingRgb->u_width)>>1;

+    framingCtx->FramingYuv[2].u_height = (framingCtx->FramingRgb->u_height)>>1;

+    framingCtx->FramingYuv[2].u_topleft = 0;

+    framingCtx->FramingYuv[2].u_stride = (framingCtx->FramingRgb->u_width)>>1;

+    framingCtx->FramingYuv[2].pac_data = framingCtx->FramingYuv[1].pac_data + framingCtx->FramingYuv[1].u_width * framingCtx->FramingYuv[1].u_height;

+

+    /**

+     * Convert input RGB888 to YUV 420 to be able to merge it with output video in framing effect */

+    err = M4VIFI_RGB888toYUV420(M4OSA_NULL, framingCtx->FramingRgb, framingCtx->FramingYuv);

+    if(err != M4NO_ERROR)

+    {

+        M4OSA_TRACE1_1("M4xVSS_internalConvertRGBtoYUV: error when converting from RGB to YUV: 0x%x\n", err);

+    }

+

+    framingCtx->duration = 0;

+    framingCtx->previousClipTime = -1;

+    framingCtx->previewOffsetClipTime = -1;

+

+    /**

+     * Only one element in the chained list (no animated image with RGB buffer...) */

+    framingCtx->pCurrent = framingCtx;

+    framingCtx->pNext = framingCtx;

+

+    return M4NO_ERROR;

+}

+

+/**

+ ******************************************************************************

+ * M4VIFI_UInt8 M4VIFI_RGB565toYUV420 (void *pUserData,

+ *                                   M4VIFI_ImagePlane *pPlaneIn,

+ *                                   M4VIFI_ImagePlane *pPlaneOut)

+ * @author  Patrice Martinez / Philips Digital Networks - MP4Net

+ * @brief   transform RGB565 image to a YUV420 image.

+ * @note    Convert RGB565 to YUV420,

+ *          Loop on each row ( 2 rows by 2 rows )

+ *              Loop on each column ( 2 col by 2 col )

+ *                  Get 4 RGB samples from input data and build 4 output Y samples

+ *                  and each single U & V data

+ *              end loop on col

+ *          end loop on row

+ * @param   pUserData: (IN) User Specific Data

+ * @param   pPlaneIn: (IN) Pointer to RGB565 Plane

+ * @param   pPlaneOut: (OUT) Pointer to  YUV420 buffer Plane

+ * @return  M4VIFI_OK: there is no error

+ * @return  M4VIFI_ILLEGAL_FRAME_HEIGHT: YUV Plane height is ODD

+ * @return  M4VIFI_ILLEGAL_FRAME_WIDTH:  YUV Plane width is ODD

+ ******************************************************************************

+*/

+M4VIFI_UInt8    M4VIFI_xVSS_RGB565toYUV420(void *pUserData, M4VIFI_ImagePlane *pPlaneIn,

+                                                      M4VIFI_ImagePlane *pPlaneOut)

+{

+    M4VIFI_UInt32   u32_width, u32_height;

+    M4VIFI_UInt32   u32_stride_Y, u32_stride2_Y, u32_stride_U, u32_stride_V;

+    M4VIFI_UInt32   u32_stride_rgb, u32_stride_2rgb;

+    M4VIFI_UInt32   u32_col, u32_row;

+

+    M4VIFI_Int32    i32_r00, i32_r01, i32_r10, i32_r11;

+    M4VIFI_Int32    i32_g00, i32_g01, i32_g10, i32_g11;

+    M4VIFI_Int32    i32_b00, i32_b01, i32_b10, i32_b11;

+    M4VIFI_Int32    i32_y00, i32_y01, i32_y10, i32_y11;

+    M4VIFI_Int32    i32_u00, i32_u01, i32_u10, i32_u11;

+    M4VIFI_Int32    i32_v00, i32_v01, i32_v10, i32_v11;

+    M4VIFI_UInt8    *pu8_yn, *pu8_ys, *pu8_u, *pu8_v;

+    M4VIFI_UInt8    *pu8_y_data, *pu8_u_data, *pu8_v_data;

+    M4VIFI_UInt8    *pu8_rgbn_data, *pu8_rgbn;

+    M4VIFI_UInt16   u16_pix1, u16_pix2, u16_pix3, u16_pix4;

+    M4VIFI_UInt8 count_null=0;

+

+    /* Check planes height are appropriate */

+    if( (pPlaneIn->u_height != pPlaneOut[0].u_height)           ||

+        (pPlaneOut[0].u_height != (pPlaneOut[1].u_height<<1))   ||

+        (pPlaneOut[0].u_height != (pPlaneOut[2].u_height<<1)))

+    {

+        return M4VIFI_ILLEGAL_FRAME_HEIGHT;

+    }

+

+    /* Check planes width are appropriate */

+    if( (pPlaneIn->u_width != pPlaneOut[0].u_width)         ||

+        (pPlaneOut[0].u_width != (pPlaneOut[1].u_width<<1)) ||

+        (pPlaneOut[0].u_width != (pPlaneOut[2].u_width<<1)))

+    {

+        return M4VIFI_ILLEGAL_FRAME_WIDTH;

+    }

+

+    /* Set the pointer to the beginning of the output data buffers */

+    pu8_y_data = pPlaneOut[0].pac_data + pPlaneOut[0].u_topleft;

+    pu8_u_data = pPlaneOut[1].pac_data + pPlaneOut[1].u_topleft;

+    pu8_v_data = pPlaneOut[2].pac_data + pPlaneOut[2].u_topleft;

+

+    /* Set the pointer to the beginning of the input data buffers */

+    pu8_rgbn_data   = pPlaneIn->pac_data + pPlaneIn->u_topleft;

+

+    /* Get the size of the output image */

+    u32_width = pPlaneOut[0].u_width;

+    u32_height = pPlaneOut[0].u_height;

+

+    /* Set the size of the memory jumps corresponding to row jump in each output plane */

+    u32_stride_Y = pPlaneOut[0].u_stride;

+    u32_stride2_Y = u32_stride_Y << 1;

+    u32_stride_U = pPlaneOut[1].u_stride;

+    u32_stride_V = pPlaneOut[2].u_stride;

+

+    /* Set the size of the memory jumps corresponding to row jump in input plane */

+    u32_stride_rgb = pPlaneIn->u_stride;

+    u32_stride_2rgb = u32_stride_rgb << 1;

+

+

+    /* Loop on each row of the output image, input coordinates are estimated from output ones */

+    /* Two YUV rows are computed at each pass */

+    for (u32_row = u32_height ;u32_row != 0; u32_row -=2)

+    {

+        /* Current Y plane row pointers */

+        pu8_yn = pu8_y_data;

+        /* Next Y plane row pointers */

+        pu8_ys = pu8_yn + u32_stride_Y;

+        /* Current U plane row pointer */

+        pu8_u = pu8_u_data;

+        /* Current V plane row pointer */

+        pu8_v = pu8_v_data;

+

+        pu8_rgbn = pu8_rgbn_data;

+

+        /* Loop on each column of the output image */

+        for (u32_col = u32_width; u32_col != 0 ; u32_col -=2)

+        {

+            /* Get four RGB 565 samples from input data */

+            u16_pix1 = *( (M4VIFI_UInt16 *) pu8_rgbn);

+            u16_pix2 = *( (M4VIFI_UInt16 *) (pu8_rgbn + CST_RGB_16_SIZE));

+            u16_pix3 = *( (M4VIFI_UInt16 *) (pu8_rgbn + u32_stride_rgb));

+            u16_pix4 = *( (M4VIFI_UInt16 *) (pu8_rgbn + u32_stride_rgb + CST_RGB_16_SIZE));

+

+            /* Unpack RGB565 to 8bit R, G, B */

+#if 0

+            /* (x,y) */

+            GET_RGB565(i32_r00,i32_g00,i32_b00,u16_pix1);

+            /* (x+1,y) */

+            GET_RGB565(i32_r10,i32_g10,i32_b10,u16_pix2);

+            /* (x,y+1) */

+            GET_RGB565(i32_r01,i32_g01,i32_b01,u16_pix3);

+            /* (x+1,y+1) */

+            GET_RGB565(i32_r11,i32_g11,i32_b11,u16_pix4);

+#else

+            /* (x,y) */

+            GET_RGB565(i32_b00,i32_g00,i32_r00,u16_pix1);

+            /* (x+1,y) */

+            GET_RGB565(i32_b10,i32_g10,i32_r10,u16_pix2);

+            /* (x,y+1) */

+            GET_RGB565(i32_b01,i32_g01,i32_r01,u16_pix3);

+            /* (x+1,y+1) */

+            GET_RGB565(i32_b11,i32_g11,i32_r11,u16_pix4);

+#endif

+#if 1 /* Solution to avoid green effects due to transparency */

+            /* If RGB is transparent color (0, 63, 0), we transform it to white (31,63,31) */

+            if(i32_b00 == 0 && i32_g00 == 63 && i32_r00 == 0)

+            {

+                i32_b00 = 31;

+                i32_r00 = 31;

+            }

+            if(i32_b10 == 0 && i32_g10 == 63 && i32_r10 == 0)

+            {

+                i32_b10 = 31;

+                i32_r10 = 31;

+            }

+            if(i32_b01 == 0 && i32_g01 == 63 && i32_r01 == 0)

+            {

+                i32_b01 = 31;

+                i32_r01 = 31;

+            }

+            if(i32_b11 == 0 && i32_g11 == 63 && i32_r11 == 0)

+            {

+                i32_b11 = 31;

+                i32_r11 = 31;

+            }

+#endif

+            /* Convert RGB value to YUV */

+            i32_u00 = U16(i32_r00, i32_g00, i32_b00);

+            i32_v00 = V16(i32_r00, i32_g00, i32_b00);

+            /* luminance value */

+            i32_y00 = Y16(i32_r00, i32_g00, i32_b00);

+

+            i32_u10 = U16(i32_r10, i32_g10, i32_b10);

+            i32_v10 = V16(i32_r10, i32_g10, i32_b10);

+            /* luminance value */

+            i32_y10 = Y16(i32_r10, i32_g10, i32_b10);

+

+            i32_u01 = U16(i32_r01, i32_g01, i32_b01);

+            i32_v01 = V16(i32_r01, i32_g01, i32_b01);

+            /* luminance value */

+            i32_y01 = Y16(i32_r01, i32_g01, i32_b01);

+

+            i32_u11 = U16(i32_r11, i32_g11, i32_b11);

+            i32_v11 = V16(i32_r11, i32_g11, i32_b11);

+            /* luminance value */

+            i32_y11 = Y16(i32_r11, i32_g11, i32_b11);

+

+            /* Store luminance data */

+            pu8_yn[0] = (M4VIFI_UInt8)i32_y00;

+            pu8_yn[1] = (M4VIFI_UInt8)i32_y10;

+            pu8_ys[0] = (M4VIFI_UInt8)i32_y01;

+            pu8_ys[1] = (M4VIFI_UInt8)i32_y11;

+#if 0 /* Temporary solution to avoid green effects due to transparency -> To be removed */

+            count_null = 4;

+            /* Store chroma data */

+            if(i32_b00 == 0 && i32_g00 == 63 && i32_r00 == 0)

+            {

+                i32_u00 = 0;

+                i32_v00 = 0;

+                count_null --;

+            }

+            if(i32_b10 == 0 && i32_g10 == 63 && i32_r10 == 0)

+            {

+                i32_u10 = 0;

+                i32_v10 = 0;

+                count_null --;

+            }

+            if(i32_b01 == 0 && i32_g01 == 63 && i32_r01 == 0)

+            {

+                i32_u01 = 0;

+                i32_v01 = 0;

+                count_null --;

+            }

+            if(i32_b11 == 0 && i32_g11 == 63 && i32_r11 == 0)

+            {

+                i32_u11 = 0;

+                i32_v11 = 0;

+                count_null --;

+            }

+

+            if(count_null == 0)

+            {

+#endif

+            *pu8_u = (M4VIFI_UInt8)((i32_u00 + i32_u01 + i32_u10 + i32_u11 + 2) >> 2);

+            *pu8_v = (M4VIFI_UInt8)((i32_v00 + i32_v01 + i32_v10 + i32_v11 + 2) >> 2);

+#if 0 /* Temporary solution to avoid green effects due to transparency -> To be removed */

+            }

+            else

+            {

+                *pu8_u = (M4VIFI_UInt8)((i32_u00 + i32_u01 + i32_u10 + i32_u11 + 2) / count_null);

+                *pu8_v = (M4VIFI_UInt8)((i32_v00 + i32_v01 + i32_v10 + i32_v11 + 2) / count_null);

+            }

+#endif

+            /* Prepare for next column */

+            pu8_rgbn += (CST_RGB_16_SIZE<<1);

+            /* Update current Y plane line pointer*/

+            pu8_yn += 2;

+            /* Update next Y plane line pointer*/

+            pu8_ys += 2;

+            /* Update U plane line pointer*/

+            pu8_u ++;

+            /* Update V plane line pointer*/

+            pu8_v ++;

+        } /* End of horizontal scanning */

+

+        /* Prepare pointers for the next row */

+        pu8_y_data += u32_stride2_Y;

+        pu8_u_data += u32_stride_U;

+        pu8_v_data += u32_stride_V;

+        pu8_rgbn_data += u32_stride_2rgb;

+

+

+    } /* End of vertical scanning */

+

+    return M4VIFI_OK;

+}

+

+/***************************************************************************

+Proto:

+M4VIFI_UInt8    M4VIFI_RGB888toYUV420(void *pUserData, M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane PlaneOut[3]);

+Author:     Patrice Martinez / Philips Digital Networks - MP4Net

+Purpose:    filling of the YUV420 plane from a BGR24 plane

+Abstract:   Loop on each row ( 2 rows by 2 rows )

+                Loop on each column ( 2 col by 2 col )

+                    Get 4 BGR samples from input data and build 4 output Y samples and each single U & V data

+                end loop on col

+            end loop on row

+

+In:         RGB24 plane

+InOut:      none

+Out:        array of 3 M4VIFI_ImagePlane structures

+Modified:   ML: RGB function modified to BGR.

+***************************************************************************/

+M4VIFI_UInt8 M4VIFI_RGB888toYUV420(void *pUserData, M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane PlaneOut[3])

+{

+

+    M4VIFI_UInt32   u32_width, u32_height;

+    M4VIFI_UInt32   u32_stride_Y, u32_stride2_Y, u32_stride_U, u32_stride_V, u32_stride_rgb, u32_stride_2rgb;

+    M4VIFI_UInt32   u32_col, u32_row;

+

+    M4VIFI_Int32    i32_r00, i32_r01, i32_r10, i32_r11;

+    M4VIFI_Int32    i32_g00, i32_g01, i32_g10, i32_g11;

+    M4VIFI_Int32    i32_b00, i32_b01, i32_b10, i32_b11;

+    M4VIFI_Int32    i32_y00, i32_y01, i32_y10, i32_y11;

+    M4VIFI_Int32    i32_u00, i32_u01, i32_u10, i32_u11;

+    M4VIFI_Int32    i32_v00, i32_v01, i32_v10, i32_v11;

+    M4VIFI_UInt8    *pu8_yn, *pu8_ys, *pu8_u, *pu8_v;

+    M4VIFI_UInt8    *pu8_y_data, *pu8_u_data, *pu8_v_data;

+    M4VIFI_UInt8    *pu8_rgbn_data, *pu8_rgbn;

+

+    /* check sizes */

+    if( (PlaneIn->u_height != PlaneOut[0].u_height)         ||

+        (PlaneOut[0].u_height != (PlaneOut[1].u_height<<1)) ||

+        (PlaneOut[0].u_height != (PlaneOut[2].u_height<<1)))

+        return M4VIFI_ILLEGAL_FRAME_HEIGHT;

+

+    if( (PlaneIn->u_width != PlaneOut[0].u_width)       ||

+        (PlaneOut[0].u_width != (PlaneOut[1].u_width<<1))   ||

+        (PlaneOut[0].u_width != (PlaneOut[2].u_width<<1)))

+        return M4VIFI_ILLEGAL_FRAME_WIDTH;

+

+

+    /* set the pointer to the beginning of the output data buffers */

+    pu8_y_data  = PlaneOut[0].pac_data + PlaneOut[0].u_topleft;

+    pu8_u_data  = PlaneOut[1].pac_data + PlaneOut[1].u_topleft;

+    pu8_v_data  = PlaneOut[2].pac_data + PlaneOut[2].u_topleft;

+

+    /* idem for input buffer */

+    pu8_rgbn_data   = PlaneIn->pac_data + PlaneIn->u_topleft;

+

+    /* get the size of the output image */

+    u32_width   = PlaneOut[0].u_width;

+    u32_height  = PlaneOut[0].u_height;

+

+    /* set the size of the memory jumps corresponding to row jump in each output plane */

+    u32_stride_Y = PlaneOut[0].u_stride;

+    u32_stride2_Y= u32_stride_Y << 1;

+    u32_stride_U = PlaneOut[1].u_stride;

+    u32_stride_V = PlaneOut[2].u_stride;

+

+    /* idem for input plane */

+    u32_stride_rgb = PlaneIn->u_stride;

+    u32_stride_2rgb = u32_stride_rgb << 1;

+

+    /* loop on each row of the output image, input coordinates are estimated from output ones */

+    /* two YUV rows are computed at each pass */

+    for (u32_row = u32_height ;u32_row != 0; u32_row -=2)

+    {

+        /* update working pointers */

+        pu8_yn  = pu8_y_data;

+        pu8_ys  = pu8_yn + u32_stride_Y;

+

+        pu8_u   = pu8_u_data;

+        pu8_v   = pu8_v_data;

+

+        pu8_rgbn= pu8_rgbn_data;

+

+        /* loop on each column of the output image*/

+        for (u32_col = u32_width; u32_col != 0 ; u32_col -=2)

+        {

+            /* get RGB samples of 4 pixels */

+            GET_RGB24(i32_r00, i32_g00, i32_b00, pu8_rgbn, 0);

+            GET_RGB24(i32_r10, i32_g10, i32_b10, pu8_rgbn, CST_RGB_24_SIZE);

+            GET_RGB24(i32_r01, i32_g01, i32_b01, pu8_rgbn, u32_stride_rgb);

+            GET_RGB24(i32_r11, i32_g11, i32_b11, pu8_rgbn, u32_stride_rgb + CST_RGB_24_SIZE);

+

+            i32_u00 = U24(i32_r00, i32_g00, i32_b00);

+            i32_v00 = V24(i32_r00, i32_g00, i32_b00);

+            i32_y00 = Y24(i32_r00, i32_g00, i32_b00);       /* matrix luminance */

+            pu8_yn[0]= (M4VIFI_UInt8)i32_y00;

+

+            i32_u10 = U24(i32_r10, i32_g10, i32_b10);

+            i32_v10 = V24(i32_r10, i32_g10, i32_b10);

+            i32_y10 = Y24(i32_r10, i32_g10, i32_b10);

+            pu8_yn[1]= (M4VIFI_UInt8)i32_y10;

+

+            i32_u01 = U24(i32_r01, i32_g01, i32_b01);

+            i32_v01 = V24(i32_r01, i32_g01, i32_b01);

+            i32_y01 = Y24(i32_r01, i32_g01, i32_b01);

+            pu8_ys[0]= (M4VIFI_UInt8)i32_y01;

+

+            i32_u11 = U24(i32_r11, i32_g11, i32_b11);

+            i32_v11 = V24(i32_r11, i32_g11, i32_b11);

+            i32_y11 = Y24(i32_r11, i32_g11, i32_b11);

+            pu8_ys[1] = (M4VIFI_UInt8)i32_y11;

+

+            *pu8_u  = (M4VIFI_UInt8)((i32_u00 + i32_u01 + i32_u10 + i32_u11 + 2) >> 2);

+            *pu8_v  = (M4VIFI_UInt8)((i32_v00 + i32_v01 + i32_v10 + i32_v11 + 2) >> 2);

+

+            pu8_rgbn    +=  (CST_RGB_24_SIZE<<1);

+            pu8_yn      += 2;

+            pu8_ys      += 2;

+

+            pu8_u ++;

+            pu8_v ++;

+        } /* end of horizontal scanning */

+

+        pu8_y_data      += u32_stride2_Y;

+        pu8_u_data      += u32_stride_U;

+        pu8_v_data      += u32_stride_V;

+        pu8_rgbn_data   += u32_stride_2rgb;

+

+

+    } /* End of vertical scanning */

+

+    return M4VIFI_OK;

+}

+

+/** YUV420 to YUV420 */

+/**

+ *******************************************************************************************

+ * M4VIFI_UInt8 M4VIFI_YUV420toYUV420 (void *pUserData,

+ *                                     M4VIFI_ImagePlane *pPlaneIn,

+ *                                     M4VIFI_ImagePlane *pPlaneOut)

+ * @brief   Transform YUV420 image to a YUV420 image.

+ * @param   pUserData: (IN) User Specific Data (Unused - could be NULL)

+ * @param   pPlaneIn: (IN) Pointer to YUV plane buffer

+ * @param   pPlaneOut: (OUT) Pointer to YUV Plane

+ * @return  M4VIFI_OK: there is no error

+ * @return  M4VIFI_ILLEGAL_FRAME_HEIGHT: Error in plane height

+ * @return  M4VIFI_ILLEGAL_FRAME_WIDTH:  Error in plane width

+ *******************************************************************************************

+ */

+

+M4VIFI_UInt8 M4VIFI_YUV420toYUV420(void *user_data, M4VIFI_ImagePlane PlaneIn[3], M4VIFI_ImagePlane *PlaneOut )

+{

+    M4VIFI_Int32 plane_number;

+    M4VIFI_UInt32 i;

+    M4VIFI_UInt8 *p_buf_src, *p_buf_dest;

+

+    for (plane_number = 0; plane_number < 3; plane_number++)

+    {

+        p_buf_src = &(PlaneIn[plane_number].pac_data[PlaneIn[plane_number].u_topleft]);

+        p_buf_dest = &(PlaneOut[plane_number].pac_data[PlaneOut[plane_number].u_topleft]);

+        for (i = 0; i < PlaneOut[plane_number].u_height; i++)

+        {

+            M4OSA_memcpy((M4OSA_MemAddr8)p_buf_dest, (M4OSA_MemAddr8)p_buf_src ,PlaneOut[plane_number].u_width);

+            p_buf_src += PlaneIn[plane_number].u_stride;

+            p_buf_dest += PlaneOut[plane_number].u_stride;

+        }

+    }

+    return M4VIFI_OK;

+}

+

+/**

+ ***********************************************************************************************

+ * M4VIFI_UInt8 M4VIFI_ResizeBilinearYUV420toYUV420(void *pUserData, M4VIFI_ImagePlane *pPlaneIn,

+ *                                                                  M4VIFI_ImagePlane *pPlaneOut)

+ * @author  David Dana (PHILIPS Software)

+ * @brief   Resizes YUV420 Planar plane.

+ * @note    Basic structure of the function

+ *          Loop on each row (step 2)

+ *              Loop on each column (step 2)

+ *                  Get four Y samples and 1 U & V sample

+ *                  Resize the Y with corresponing U and V samples

+ *                  Place the YUV in the ouput plane

+ *              end loop column

+ *          end loop row

+ *          For resizing bilinear interpolation linearly interpolates along

+ *          each row, and then uses that result in a linear interpolation down each column.

+ *          Each estimated pixel in the output image is a weighted

+ *          combination of its four neighbours. The ratio of compression

+ *          or dilatation is estimated using input and output sizes.

+ * @param   pUserData: (IN) User Data

+ * @param   pPlaneIn: (IN) Pointer to YUV420 (Planar) plane buffer

+ * @param   pPlaneOut: (OUT) Pointer to YUV420 (Planar) plane

+ * @return  M4VIFI_OK: there is no error

+ * @return  M4VIFI_ILLEGAL_FRAME_HEIGHT: Error in height

+ * @return  M4VIFI_ILLEGAL_FRAME_WIDTH:  Error in width

+ ***********************************************************************************************

+*/

+M4VIFI_UInt8    M4VIFI_ResizeBilinearYUV420toYUV420(void *pUserData,

+                                                                M4VIFI_ImagePlane *pPlaneIn,

+                                                                M4VIFI_ImagePlane *pPlaneOut)

+{

+    M4VIFI_UInt8    *pu8_data_in, *pu8_data_out;

+    M4VIFI_UInt32   u32_plane;

+    M4VIFI_UInt32   u32_width_in, u32_width_out, u32_height_in, u32_height_out;

+    M4VIFI_UInt32   u32_stride_in, u32_stride_out;

+    M4VIFI_UInt32   u32_x_inc, u32_y_inc;

+    M4VIFI_UInt32   u32_x_accum, u32_y_accum, u32_x_accum_start;

+    M4VIFI_UInt32   u32_width, u32_height;

+    M4VIFI_UInt32   u32_y_frac;

+    M4VIFI_UInt32   u32_x_frac;

+    M4VIFI_UInt32   u32_temp_value;

+    M4VIFI_UInt8    *pu8_src_top;

+    M4VIFI_UInt8    *pu8_src_bottom;

+

+    if ( (pPlaneIn[0].u_height == pPlaneOut[0].u_height) && (pPlaneIn[0].u_width == pPlaneOut[0].u_width))

+    {

+        return M4VIFI_YUV420toYUV420(pUserData, pPlaneIn, pPlaneOut);

+    }

+

+    /* Check for the YUV width and height are even */

+    if( (IS_EVEN(pPlaneIn[0].u_height) == FALSE)    ||

+        (IS_EVEN(pPlaneOut[0].u_height) == FALSE))

+    {

+        return M4VIFI_ILLEGAL_FRAME_HEIGHT;

+    }

+

+    if( (IS_EVEN(pPlaneIn[0].u_width) == FALSE) ||

+        (IS_EVEN(pPlaneOut[0].u_width) == FALSE))

+    {

+        return M4VIFI_ILLEGAL_FRAME_WIDTH;

+    }

+

+    /* Loop on planes */

+    for(u32_plane = 0;u32_plane < PLANES;u32_plane++)

+    {

+        /* Set the working pointers at the beginning of the input/output data field */

+        pu8_data_in     = pPlaneIn[u32_plane].pac_data + pPlaneIn[u32_plane].u_topleft;

+        pu8_data_out    = pPlaneOut[u32_plane].pac_data + pPlaneOut[u32_plane].u_topleft;

+

+        /* Get the memory jump corresponding to a row jump */

+        u32_stride_in   = pPlaneIn[u32_plane].u_stride;

+        u32_stride_out  = pPlaneOut[u32_plane].u_stride;

+

+        /* Set the bounds of the active image */

+        u32_width_in    = pPlaneIn[u32_plane].u_width;

+        u32_height_in   = pPlaneIn[u32_plane].u_height;

+

+        u32_width_out   = pPlaneOut[u32_plane].u_width;

+        u32_height_out  = pPlaneOut[u32_plane].u_height;

+

+        /* Compute horizontal ratio between src and destination width.*/

+        if (u32_width_out >= u32_width_in)

+        {

+            u32_x_inc   = ((u32_width_in-1) * MAX_SHORT) / (u32_width_out-1);

+        }

+        else

+        {

+            u32_x_inc   = (u32_width_in * MAX_SHORT) / (u32_width_out);

+        }

+

+        /* Compute vertical ratio between src and destination height.*/

+        if (u32_height_out >= u32_height_in)

+        {

+            u32_y_inc   = ((u32_height_in - 1) * MAX_SHORT) / (u32_height_out-1);

+        }

+        else

+        {

+            u32_y_inc = (u32_height_in * MAX_SHORT) / (u32_height_out);

+        }

+

+        /*

+        Calculate initial accumulator value : u32_y_accum_start.

+        u32_y_accum_start is coded on 15 bits, and represents a value between 0 and 0.5

+        */

+        if (u32_y_inc >= MAX_SHORT)

+        {

+            /*

+                Keep the fractionnal part, assimung that integer  part is coded

+                on the 16 high bits and the fractionnal on the 15 low bits

+            */

+            u32_y_accum = u32_y_inc & 0xffff;

+

+            if (!u32_y_accum)

+            {

+                u32_y_accum = MAX_SHORT;

+            }

+

+            u32_y_accum >>= 1;

+        }

+        else

+        {

+            u32_y_accum = 0;

+        }

+

+

+        /*

+            Calculate initial accumulator value : u32_x_accum_start.

+            u32_x_accum_start is coded on 15 bits, and represents a value between 0 and 0.5

+        */

+        if (u32_x_inc >= MAX_SHORT)

+        {

+            u32_x_accum_start = u32_x_inc & 0xffff;

+

+            if (!u32_x_accum_start)

+            {

+                u32_x_accum_start = MAX_SHORT;

+            }

+

+            u32_x_accum_start >>= 1;

+        }

+        else

+        {

+            u32_x_accum_start = 0;

+        }

+

+        u32_height = u32_height_out;

+

+        /*

+        Bilinear interpolation linearly interpolates along each row, and then uses that

+        result in a linear interpolation donw each column. Each estimated pixel in the

+        output image is a weighted combination of its four neighbours according to the formula:

+        F(p',q')=f(p,q)R(-a)R(b)+f(p,q-1)R(-a)R(b-1)+f(p+1,q)R(1-a)R(b)+f(p+&,q+1)R(1-a)R(b-1)

+        with  R(x) = / x+1  -1 =< x =< 0 \ 1-x  0 =< x =< 1 and a (resp. b)weighting coefficient

+        is the distance from the nearest neighbor in the p (resp. q) direction

+        */

+

+        do { /* Scan all the row */

+

+            /* Vertical weight factor */

+            u32_y_frac = (u32_y_accum>>12)&15;

+

+            /* Reinit accumulator */

+            u32_x_accum = u32_x_accum_start;

+

+            u32_width = u32_width_out;

+

+            do { /* Scan along each row */

+                pu8_src_top = pu8_data_in + (u32_x_accum >> 16);

+                pu8_src_bottom = pu8_src_top + u32_stride_in;

+                u32_x_frac = (u32_x_accum >> 12)&15; /* Horizontal weight factor */

+

+                /* Weighted combination */

+                u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) +

+                                                 pu8_src_top[1]*u32_x_frac)*(16-u32_y_frac) +

+                                                (pu8_src_bottom[0]*(16-u32_x_frac) +

+                                                 pu8_src_bottom[1]*u32_x_frac)*u32_y_frac )>>8);

+

+                *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;

+

+                /* Update horizontal accumulator */

+                u32_x_accum += u32_x_inc;

+            } while(--u32_width);

+

+            pu8_data_out = pu8_data_out + u32_stride_out - u32_width_out;

+

+            /* Update vertical accumulator */

+            u32_y_accum += u32_y_inc;

+            if (u32_y_accum>>16) {

+                pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * u32_stride_in;

+                u32_y_accum &= 0xffff;

+            }

+        } while(--u32_height);

+    }

+

+    return M4VIFI_OK;

+}

+

+M4OSA_ERR applyRenderingMode(M4VIFI_ImagePlane* pPlaneIn, M4VIFI_ImagePlane* pPlaneOut, M4xVSS_MediaRendering mediaRendering)

+{

+    M4OSA_ERR err = M4NO_ERROR;

+

+    if(mediaRendering == M4xVSS_kResizing)

+    {

+        /**

+         * Call the resize filter. From the intermediate frame to the encoder image plane */

+        err = M4VIFI_ResizeBilinearYUV420toYUV420(M4OSA_NULL, pPlaneIn, pPlaneOut);

+        if (M4NO_ERROR != err)

+        {

+            M4OSA_TRACE1_1("applyRenderingMode: M4ViFilResizeBilinearYUV420toYUV420 returns 0x%x!", err);

+            return err;

+        }

+    }

+    else

+    {

+        M4AIR_Params Params;

+        M4OSA_Context m_air_context;

+        M4VIFI_ImagePlane pImagePlanesTemp[3];

+        M4VIFI_ImagePlane* pPlaneTemp;

+        M4OSA_UInt8* pOutPlaneY = pPlaneOut[0].pac_data + pPlaneOut[0].u_topleft;

+        M4OSA_UInt8* pOutPlaneU = pPlaneOut[1].pac_data + pPlaneOut[1].u_topleft;

+        M4OSA_UInt8* pOutPlaneV = pPlaneOut[2].pac_data + pPlaneOut[2].u_topleft;

+        M4OSA_UInt8* pInPlaneY;

+        M4OSA_UInt8* pInPlaneU;

+        M4OSA_UInt8* pInPlaneV;

+        M4OSA_UInt32 i;

+

+        /*to keep media aspect ratio*/

+        /*Initialize AIR Params*/

+        Params.m_inputCoord.m_x = 0;

+        Params.m_inputCoord.m_y = 0;

+        Params.m_inputSize.m_height = pPlaneIn->u_height;

+        Params.m_inputSize.m_width = pPlaneIn->u_width;

+        Params.m_outputSize.m_width = pPlaneOut->u_width;

+        Params.m_outputSize.m_height = pPlaneOut->u_height;

+        Params.m_bOutputStripe = M4OSA_FALSE;

+        Params.m_outputOrientation = M4COMMON_kOrientationTopLeft;

+

+        /**

+        Media rendering: Black borders*/

+        if(mediaRendering == M4xVSS_kBlackBorders)

+        {

+            M4OSA_memset((M4OSA_MemAddr8)pPlaneOut[0].pac_data,(pPlaneOut[0].u_height*pPlaneOut[0].u_stride),Y_PLANE_BORDER_VALUE);

+            M4OSA_memset((M4OSA_MemAddr8)pPlaneOut[1].pac_data,(pPlaneOut[1].u_height*pPlaneOut[1].u_stride),U_PLANE_BORDER_VALUE);

+            M4OSA_memset((M4OSA_MemAddr8)pPlaneOut[2].pac_data,(pPlaneOut[2].u_height*pPlaneOut[2].u_stride),V_PLANE_BORDER_VALUE);

+

+            pImagePlanesTemp[0].u_width = pPlaneOut[0].u_width;

+            pImagePlanesTemp[0].u_height = pPlaneOut[0].u_height;

+            pImagePlanesTemp[0].u_stride = pPlaneOut[0].u_width;

+            pImagePlanesTemp[0].u_topleft = 0;

+            pImagePlanesTemp[0].pac_data = M4OSA_NULL;

+

+            pImagePlanesTemp[1].u_width = pPlaneOut[1].u_width;

+            pImagePlanesTemp[1].u_height = pPlaneOut[1].u_height;

+            pImagePlanesTemp[1].u_stride = pPlaneOut[1].u_width;

+            pImagePlanesTemp[1].u_topleft = 0;

+            pImagePlanesTemp[1].pac_data = M4OSA_NULL;

+

+            pImagePlanesTemp[2].u_width = pPlaneOut[2].u_width;

+            pImagePlanesTemp[2].u_height = pPlaneOut[2].u_height;

+            pImagePlanesTemp[2].u_stride = pPlaneOut[2].u_width;

+            pImagePlanesTemp[2].u_topleft = 0;

+            pImagePlanesTemp[2].pac_data = M4OSA_NULL;

+

+            /* Allocates plan in local image plane structure */

+            pImagePlanesTemp[0].pac_data = (M4OSA_UInt8*)M4OSA_malloc(pImagePlanesTemp[0].u_width * pImagePlanesTemp[0].u_height, M4VS, (M4OSA_Char*)"applyRenderingMode: temporary plane bufferY") ;

+            if(pImagePlanesTemp[0].pac_data == M4OSA_NULL)

+            {

+                M4OSA_TRACE1_0("Error alloc in applyRenderingMode");

+                return M4ERR_ALLOC;

+            }

+            pImagePlanesTemp[1].pac_data = (M4OSA_UInt8*)M4OSA_malloc(pImagePlanesTemp[1].u_width * pImagePlanesTemp[1].u_height, M4VS, (M4OSA_Char*)"applyRenderingMode: temporary plane bufferU") ;

+            if(pImagePlanesTemp[1].pac_data == M4OSA_NULL)

+            {

+

+                M4OSA_TRACE1_0("Error alloc in applyRenderingMode");

+                return M4ERR_ALLOC;

+            }

+            pImagePlanesTemp[2].pac_data = (M4OSA_UInt8*)M4OSA_malloc(pImagePlanesTemp[2].u_width * pImagePlanesTemp[2].u_height, M4VS, (M4OSA_Char*)"applyRenderingMode: temporary plane bufferV") ;

+            if(pImagePlanesTemp[2].pac_data == M4OSA_NULL)

+            {

+

+                M4OSA_TRACE1_0("Error alloc in applyRenderingMode");

+                return M4ERR_ALLOC;

+            }

+

+            pInPlaneY = pImagePlanesTemp[0].pac_data ;

+            pInPlaneU = pImagePlanesTemp[1].pac_data ;

+            pInPlaneV = pImagePlanesTemp[2].pac_data ;

+

+            M4OSA_memset((M4OSA_MemAddr8)pImagePlanesTemp[0].pac_data,(pImagePlanesTemp[0].u_height*pImagePlanesTemp[0].u_stride),Y_PLANE_BORDER_VALUE);

+            M4OSA_memset((M4OSA_MemAddr8)pImagePlanesTemp[1].pac_data,(pImagePlanesTemp[1].u_height*pImagePlanesTemp[1].u_stride),U_PLANE_BORDER_VALUE);

+            M4OSA_memset((M4OSA_MemAddr8)pImagePlanesTemp[2].pac_data,(pImagePlanesTemp[2].u_height*pImagePlanesTemp[2].u_stride),V_PLANE_BORDER_VALUE);

+

+            if((M4OSA_UInt32)((pPlaneIn->u_height * pPlaneOut->u_width) /pPlaneIn->u_width) <= pPlaneOut->u_height)//Params.m_inputSize.m_height < Params.m_inputSize.m_width)

+            {

+                /*it is height so black borders will be on the top and on the bottom side*/

+                Params.m_outputSize.m_width = pPlaneOut->u_width;

+                Params.m_outputSize.m_height = (M4OSA_UInt32)((pPlaneIn->u_height * pPlaneOut->u_width) /pPlaneIn->u_width);

+                /*number of lines at the top*/

+                pImagePlanesTemp[0].u_topleft = (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[0].u_height-Params.m_outputSize.m_height)>>1))*pImagePlanesTemp[0].u_stride;

+                pImagePlanesTemp[0].u_height = Params.m_outputSize.m_height;

+                pImagePlanesTemp[1].u_topleft = (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[1].u_height-(Params.m_outputSize.m_height>>1)))>>1)*pImagePlanesTemp[1].u_stride;

+                pImagePlanesTemp[1].u_height = Params.m_outputSize.m_height>>1;

+                pImagePlanesTemp[2].u_topleft = (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[2].u_height-(Params.m_outputSize.m_height>>1)))>>1)*pImagePlanesTemp[2].u_stride;

+                pImagePlanesTemp[2].u_height = Params.m_outputSize.m_height>>1;

+            }

+            else

+            {

+                /*it is width so black borders will be on the left and right side*/

+                Params.m_outputSize.m_height = pPlaneOut->u_height;

+                Params.m_outputSize.m_width = (M4OSA_UInt32)((pPlaneIn->u_width * pPlaneOut->u_height) /pPlaneIn->u_height);

+

+                pImagePlanesTemp[0].u_topleft = (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[0].u_width-Params.m_outputSize.m_width)>>1));

+                pImagePlanesTemp[0].u_width = Params.m_outputSize.m_width;

+                pImagePlanesTemp[1].u_topleft = (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[1].u_width-(Params.m_outputSize.m_width>>1)))>>1);

+                pImagePlanesTemp[1].u_width = Params.m_outputSize.m_width>>1;

+                pImagePlanesTemp[2].u_topleft = (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[2].u_width-(Params.m_outputSize.m_width>>1)))>>1);

+                pImagePlanesTemp[2].u_width = Params.m_outputSize.m_width>>1;

+            }

+

+            /*Width and height have to be even*/

+            Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1;

+            Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1;

+            Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1;

+            Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1;

+            pImagePlanesTemp[0].u_width = (pImagePlanesTemp[0].u_width>>1)<<1;

+            pImagePlanesTemp[1].u_width = (pImagePlanesTemp[1].u_width>>1)<<1;

+            pImagePlanesTemp[2].u_width = (pImagePlanesTemp[2].u_width>>1)<<1;

+            pImagePlanesTemp[0].u_height = (pImagePlanesTemp[0].u_height>>1)<<1;

+            pImagePlanesTemp[1].u_height = (pImagePlanesTemp[1].u_height>>1)<<1;

+            pImagePlanesTemp[2].u_height = (pImagePlanesTemp[2].u_height>>1)<<1;

+

+            /*Check that values are coherent*/

+            if(Params.m_inputSize.m_height == Params.m_outputSize.m_height)

+            {

+                Params.m_inputSize.m_width = Params.m_outputSize.m_width;

+            }

+            else if(Params.m_inputSize.m_width == Params.m_outputSize.m_width)

+            {

+                Params.m_inputSize.m_height = Params.m_outputSize.m_height;

+            }

+            pPlaneTemp = pImagePlanesTemp;

+

+

+        }

+

+        /**

+        Media rendering: Cropping*/

+        if(mediaRendering == M4xVSS_kCropping)

+        {

+            Params.m_outputSize.m_height = pPlaneOut->u_height;

+            Params.m_outputSize.m_width = pPlaneOut->u_width;

+            if((Params.m_outputSize.m_height * Params.m_inputSize.m_width) /Params.m_outputSize.m_width<Params.m_inputSize.m_height)

+            {

+                /*height will be cropped*/

+                Params.m_inputSize.m_height = (M4OSA_UInt32)((Params.m_outputSize.m_height * Params.m_inputSize.m_width) /Params.m_outputSize.m_width);

+                Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1;

+                Params.m_inputCoord.m_y = (M4OSA_Int32)((M4OSA_Int32)((pPlaneIn->u_height - Params.m_inputSize.m_height))>>1);

+            }

+            else

+            {

+                /*width will be cropped*/

+                Params.m_inputSize.m_width = (M4OSA_UInt32)((Params.m_outputSize.m_width * Params.m_inputSize.m_height) /Params.m_outputSize.m_height);

+                Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1;

+                Params.m_inputCoord.m_x = (M4OSA_Int32)((M4OSA_Int32)((pPlaneIn->u_width - Params.m_inputSize.m_width))>>1);

+            }

+            pPlaneTemp = pPlaneOut;

+        }

+

+                 /**

+         * Call AIR functions */

+        err = M4AIR_create(&m_air_context, M4AIR_kYUV420P);

+        if(err != M4NO_ERROR)

+        {

+

+            M4OSA_TRACE1_1("applyRenderingMode: Error when initializing AIR: 0x%x", err);

+            for(i=0; i<3; i++)

+            {

+                if(pImagePlanesTemp[i].pac_data != M4OSA_NULL)

+                {

+                    M4OSA_free((M4OSA_MemAddr32)pImagePlanesTemp[i].pac_data);

+                    pImagePlanesTemp[i].pac_data = M4OSA_NULL;

+                }

+            }

+            return err;

+        }

+

+

+        err = M4AIR_configure(m_air_context, &Params);

+        if(err != M4NO_ERROR)

+        {

+

+            M4OSA_TRACE1_1("applyRenderingMode: Error when configuring AIR: 0x%x", err);

+            M4AIR_cleanUp(m_air_context);

+            for(i=0; i<3; i++)

+            {

+                if(pImagePlanesTemp[i].pac_data != M4OSA_NULL)

+                {

+                    M4OSA_free((M4OSA_MemAddr32)pImagePlanesTemp[i].pac_data);

+                    pImagePlanesTemp[i].pac_data = M4OSA_NULL;

+                }

+            }

+            return err;

+        }

+

+        err = M4AIR_get(m_air_context, pPlaneIn, pPlaneTemp);

+        if(err != M4NO_ERROR)

+        {

+            M4OSA_TRACE1_1("applyRenderingMode: Error when getting AIR plane: 0x%x", err);

+            M4AIR_cleanUp(m_air_context);

+            for(i=0; i<3; i++)

+            {

+                if(pImagePlanesTemp[i].pac_data != M4OSA_NULL)

+                {

+                    M4OSA_free((M4OSA_MemAddr32)pImagePlanesTemp[i].pac_data);

+                    pImagePlanesTemp[i].pac_data = M4OSA_NULL;

+                }

+            }

+            return err;

+        }

+

+        if(mediaRendering == M4xVSS_kBlackBorders)

+        {

+            for(i=0; i<pPlaneOut[0].u_height; i++)

+            {

+                M4OSA_memcpy((M4OSA_MemAddr8)pOutPlaneY, (M4OSA_MemAddr8)pInPlaneY, pPlaneOut[0].u_width);

+                pInPlaneY += pPlaneOut[0].u_width;

+                pOutPlaneY += pPlaneOut[0].u_stride;

+            }

+            for(i=0; i<pPlaneOut[1].u_height; i++)

+            {

+                M4OSA_memcpy((M4OSA_MemAddr8)pOutPlaneU, (M4OSA_MemAddr8)pInPlaneU, pPlaneOut[1].u_width);

+                pInPlaneU += pPlaneOut[1].u_width;

+                pOutPlaneU += pPlaneOut[1].u_stride;

+            }

+            for(i=0; i<pPlaneOut[2].u_height; i++)

+            {

+                M4OSA_memcpy((M4OSA_MemAddr8)pOutPlaneV, (M4OSA_MemAddr8)pInPlaneV, pPlaneOut[2].u_width);

+                pInPlaneV += pPlaneOut[2].u_width;

+                pOutPlaneV += pPlaneOut[2].u_stride;

+            }

+

+            for(i=0; i<3; i++)

+            {

+                if(pImagePlanesTemp[i].pac_data != M4OSA_NULL)

+                {

+                    M4OSA_free((M4OSA_MemAddr32)pImagePlanesTemp[i].pac_data);

+                    pImagePlanesTemp[i].pac_data = M4OSA_NULL;

+                }

+            }

+        }

+    }

+

+    return err;

+}

+

+//TODO: remove this code after link with videoartist lib

+/* M4AIR code*/

+#define M4AIR_YUV420_FORMAT_SUPPORTED

+#define M4AIR_YUV420A_FORMAT_SUPPORTED

+

+/************************* COMPILATION CHECKS ***************************/

+#ifndef M4AIR_YUV420_FORMAT_SUPPORTED

+#ifndef M4AIR_BGR565_FORMAT_SUPPORTED

+#ifndef M4AIR_RGB565_FORMAT_SUPPORTED

+#ifndef M4AIR_BGR888_FORMAT_SUPPORTED

+#ifndef M4AIR_RGB888_FORMAT_SUPPORTED

+#ifndef M4AIR_JPG_FORMAT_SUPPORTED

+

+#error "Please define at least one input format for the AIR component"

+

+#endif

+#endif

+#endif

+#endif

+#endif

+#endif

+

+/************************ M4AIR INTERNAL TYPES DEFINITIONS ***********************/

+

+/**

+ ******************************************************************************

+ * enum         M4AIR_States

+ * @brief       The following enumeration defines the internal states of the AIR.

+ ******************************************************************************

+*/

+typedef enum

+{

+    M4AIR_kCreated,         /**< State after M4AIR_create has been called */

+    M4AIR_kConfigured           /**< State after M4AIR_configure has been called */

+}M4AIR_States;

+

+

+/**

+ ******************************************************************************

+ * struct       M4AIR_InternalContext

+ * @brief       The following structure is the internal context of the AIR.

+ ******************************************************************************

+*/

+typedef struct

+{

+    M4AIR_States                m_state;            /**< Internal state */

+    M4AIR_InputFormatType   m_inputFormat;      /**< Input format like YUV420Planar, RGB565, JPG, etc ... */

+    M4AIR_Params            m_params;           /**< Current input Parameter of  the processing */

+    M4OSA_UInt32            u32_x_inc[4];       /**< ratio between input and ouput width for YUV */

+    M4OSA_UInt32            u32_y_inc[4];       /**< ratio between input and ouput height for YUV */

+    M4OSA_UInt32            u32_x_accum_start[4];   /**< horizontal initial accumulator value */

+    M4OSA_UInt32            u32_y_accum_start[4];   /**< Vertical initial accumulator value */

+    M4OSA_UInt32            u32_x_accum[4];     /**< save of horizontal accumulator value */

+    M4OSA_UInt32            u32_y_accum[4];     /**< save of vertical accumulator value */

+    M4OSA_UInt8*            pu8_data_in[4];         /**< Save of input plane pointers in case of stripe mode */

+    M4OSA_UInt32            m_procRows;         /**< Number of processed rows, used in stripe mode only */

+    M4OSA_Bool              m_bOnlyCopy;            /**< Flag to know if we just perform a copy or a bilinear interpolation */

+    M4OSA_Bool              m_bFlipX;               /**< Depend on output orientation, used during processing to revert processing order in X coordinates */

+    M4OSA_Bool              m_bFlipY;               /**< Depend on output orientation, used during processing to revert processing order in Y coordinates */

+    M4OSA_Bool              m_bRevertXY;            /**< Depend on output orientation, used during processing to revert X and Y processing order (+-90° rotation) */

+}M4AIR_InternalContext;

+

+/********************************* MACROS *******************************/

+#define M4ERR_CHECK_NULL_RETURN_VALUE(retval, pointer) if ((pointer) == M4OSA_NULL) return ((M4OSA_ERR)(retval));

+

+

+/********************** M4AIR PUBLIC API IMPLEMENTATION ********************/

+/**

+ ******************************************************************************

+ * M4OSA_ERR M4AIR_create(M4OSA_Context* pContext,M4AIR_InputFormatType inputFormat)

+ * @author  Arnaud Collard

+ * @brief       This function initialize an instance of the AIR.

+ * @param   pContext:   (IN/OUT) Address of the context to create

+ * @param   inputFormat:    (IN) input format type.

+ * @return  M4NO_ERROR: there is no error

+ * @return  M4ERR_PARAMETER: pContext is M4OSA_NULL (debug only). Invalid formatType

+ * @return  M4ERR_ALLOC: No more memory is available

+ ******************************************************************************

+*/

+M4OSA_ERR M4AIR_create(M4OSA_Context* pContext,M4AIR_InputFormatType inputFormat)

+{

+    M4OSA_ERR err = M4NO_ERROR ;

+    M4AIR_InternalContext* pC = M4OSA_NULL ;

+    /* Check that the address on the context is not NULL */

+    M4ERR_CHECK_NULL_RETURN_VALUE(M4ERR_PARAMETER, pContext) ;

+

+    *pContext = M4OSA_NULL ;

+

+    /* Internal Context creation */

+    pC = (M4AIR_InternalContext*)M4OSA_malloc(sizeof(M4AIR_InternalContext), M4AIR, (M4OSA_Char*)"AIR internal context") ;

+    M4ERR_CHECK_NULL_RETURN_VALUE(M4ERR_ALLOC, pC) ;

+

+

+    /* Check if the input format is supported */

+    switch(inputFormat)

+    {

+#ifdef M4AIR_YUV420_FORMAT_SUPPORTED

+        case M4AIR_kYUV420P:

+        break ;

+#endif

+#ifdef M4AIR_YUV420A_FORMAT_SUPPORTED

+        case M4AIR_kYUV420AP:

+        break ;

+#endif

+        default:

+            err = M4ERR_AIR_FORMAT_NOT_SUPPORTED;

+            goto M4AIR_create_cleanup ;

+    }

+

+    /**< Save input format and update state */

+    pC->m_inputFormat = inputFormat;

+    pC->m_state = M4AIR_kCreated;

+

+    /* Return the context to the caller */

+    *pContext = pC ;

+

+    return M4NO_ERROR ;

+

+M4AIR_create_cleanup:

+    /* Error management : we destroy the context if needed */

+    if(M4OSA_NULL != pC)

+    {

+        M4OSA_free((M4OSA_MemAddr32)pC) ;

+    }

+

+    *pContext = M4OSA_NULL ;

+    

+    return err ;

+}

+

+

+

+/**

+ ******************************************************************************

+ * M4OSA_ERR M4AIR_cleanUp(M4OSA_Context pContext)

+ * @author  Arnaud Collard

+ * @brief       This function destroys an instance of the AIR component

+ * @param   pContext:   (IN) Context identifying the instance to destroy

+ * @return  M4NO_ERROR: there is no error

+ * @return  M4ERR_PARAMETER: pContext is M4OSA_NULL (debug only).

+ * @return  M4ERR_STATE: Internal state is incompatible with this function call.

+******************************************************************************

+*/

+M4OSA_ERR M4AIR_cleanUp(M4OSA_Context pContext)

+{

+    M4AIR_InternalContext* pC = (M4AIR_InternalContext*)pContext ;

+

+    M4ERR_CHECK_NULL_RETURN_VALUE(M4ERR_PARAMETER, pContext) ;

+

+    /**< Check state */

+    if((M4AIR_kCreated != pC->m_state)&&(M4AIR_kConfigured != pC->m_state))

+    {

+        return M4ERR_STATE;

+    }

+    M4OSA_free((M4OSA_MemAddr32)pC) ;

+

+    return M4NO_ERROR ;

+

+}

+

+

+/**

+ ******************************************************************************

+ * M4OSA_ERR M4AIR_configure(M4OSA_Context pContext, M4AIR_Params* pParams)

+ * @brief       This function will configure the AIR.

+ * @note    It will set the input and output coordinates and sizes,

+ *          and indicates if we will proceed in stripe or not.

+ *          In case a M4AIR_get in stripe mode was on going, it will cancel this previous processing

+ *          and reset the get process.

+ * @param   pContext:               (IN) Context identifying the instance

+ * @param   pParams->m_bOutputStripe:(IN) Stripe mode.

+ * @param   pParams->m_inputCoord:  (IN) X,Y coordinates of the first valid pixel in input.

+ * @param   pParams->m_inputSize:   (IN) input ROI size.

+ * @param   pParams->m_outputSize:  (IN) output size.

+ * @return  M4NO_ERROR: there is no error

+ * @return  M4ERR_ALLOC: No more memory space to add a new effect.

+ * @return  M4ERR_PARAMETER: pContext is M4OSA_NULL (debug only).

+ * @return  M4ERR_AIR_FORMAT_NOT_SUPPORTED: the requested input format is not supported.

+ ******************************************************************************

+*/

+M4OSA_ERR M4AIR_configure(M4OSA_Context pContext, M4AIR_Params* pParams)

+{

+    M4AIR_InternalContext* pC = (M4AIR_InternalContext*)pContext ;

+    M4OSA_UInt32    i,u32_width_in, u32_width_out, u32_height_in, u32_height_out;

+    M4OSA_UInt32    nb_planes;

+

+    M4ERR_CHECK_NULL_RETURN_VALUE(M4ERR_PARAMETER, pContext) ;

+    

+    if(M4AIR_kYUV420AP == pC->m_inputFormat)

+    {

+        nb_planes = 4;

+    }

+    else

+    {

+        nb_planes = 3;

+    }

+

+    /**< Check state */

+    if((M4AIR_kCreated != pC->m_state)&&(M4AIR_kConfigured != pC->m_state))

+    {

+        return M4ERR_STATE;

+    }

+

+    /** Save parameters */

+    pC->m_params = *pParams;

+

+    /* Check for the input&output width and height are even */

+        if( ((pC->m_params.m_inputSize.m_height)&0x1)    ||

+         ((pC->m_params.m_inputSize.m_height)&0x1))

+        {

+         return M4ERR_AIR_ILLEGAL_FRAME_SIZE;

+        }

+

+    if( ((pC->m_params.m_inputSize.m_width)&0x1)    ||

+         ((pC->m_params.m_inputSize.m_width)&0x1))

+        {

+            return M4ERR_AIR_ILLEGAL_FRAME_SIZE;

+        }

+    if(((pC->m_params.m_inputSize.m_width) == (pC->m_params.m_outputSize.m_width))

+        &&((pC->m_params.m_inputSize.m_height) == (pC->m_params.m_outputSize.m_height)))

+    {

+        /**< No resize in this case, we will just copy input in output */

+        pC->m_bOnlyCopy = M4OSA_TRUE;

+    }

+    else

+    {

+        pC->m_bOnlyCopy = M4OSA_FALSE;

+

+        /**< Initialize internal variables used for resize filter */

+        for(i=0;i<nb_planes;i++)

+        {

+

+            u32_width_in = ((i==0)||(i==3))?pC->m_params.m_inputSize.m_width:(pC->m_params.m_inputSize.m_width+1)>>1;

+            u32_height_in = ((i==0)||(i==3))?pC->m_params.m_inputSize.m_height:(pC->m_params.m_inputSize.m_height+1)>>1;

+            u32_width_out = ((i==0)||(i==3))?pC->m_params.m_outputSize.m_width:(pC->m_params.m_outputSize.m_width+1)>>1;

+            u32_height_out = ((i==0)||(i==3))?pC->m_params.m_outputSize.m_height:(pC->m_params.m_outputSize.m_height+1)>>1;

+

+                /* Compute horizontal ratio between src and destination width.*/

+                if (u32_width_out >= u32_width_in)

+                {

+                    pC->u32_x_inc[i]   = ((u32_width_in-1) * 0x10000) / (u32_width_out-1);

+                }

+                else

+                {

+                    pC->u32_x_inc[i]   = (u32_width_in * 0x10000) / (u32_width_out);

+                }

+

+                /* Compute vertical ratio between src and destination height.*/

+                if (u32_height_out >= u32_height_in)

+                {

+                    pC->u32_y_inc[i]   = ((u32_height_in - 1) * 0x10000) / (u32_height_out-1);

+                }

+                else

+                {

+                    pC->u32_y_inc[i] = (u32_height_in * 0x10000) / (u32_height_out);

+                }

+

+                /*

+                Calculate initial accumulator value : u32_y_accum_start.

+                u32_y_accum_start is coded on 15 bits, and represents a value between 0 and 0.5

+                */

+                if (pC->u32_y_inc[i] >= 0x10000)

+                {

+                    /*

+                        Keep the fractionnal part, assimung that integer  part is coded

+                        on the 16 high bits and the fractionnal on the 15 low bits

+                    */

+                    pC->u32_y_accum_start[i] = pC->u32_y_inc[i] & 0xffff;

+

+                    if (!pC->u32_y_accum_start[i])

+                    {

+                        pC->u32_y_accum_start[i] = 0x10000;

+                    }

+

+                    pC->u32_y_accum_start[i] >>= 1;

+                }

+                else

+                {

+                    pC->u32_y_accum_start[i] = 0;

+                }

+                /**< Take into account that Y coordinate can be odd

+                    in this case we have to put a 0.5 offset

+                    for U and V plane as there a 2 times sub-sampled vs Y*/

+                if((pC->m_params.m_inputCoord.m_y&0x1)&&((i==1)||(i==2)))

+                {

+                    pC->u32_y_accum_start[i] += 0x8000;

+                }

+

+                /*

+                    Calculate initial accumulator value : u32_x_accum_start.

+                    u32_x_accum_start is coded on 15 bits, and represents a value between 0 and 0.5

+                */

+

+                if (pC->u32_x_inc[i] >= 0x10000)

+                {

+                    pC->u32_x_accum_start[i] = pC->u32_x_inc[i] & 0xffff;

+

+                    if (!pC->u32_x_accum_start[i])

+                    {

+                        pC->u32_x_accum_start[i] = 0x10000;

+                    }

+

+                    pC->u32_x_accum_start[i] >>= 1;

+                }

+                else

+                {

+                    pC->u32_x_accum_start[i] = 0;

+                }

+                /**< Take into account that X coordinate can be odd

+                    in this case we have to put a 0.5 offset

+                    for U and V plane as there a 2 times sub-sampled vs Y*/

+                if((pC->m_params.m_inputCoord.m_x&0x1)&&((i==1)||(i==2)))

+                {

+                    pC->u32_x_accum_start[i] += 0x8000;

+                }

+        }

+    }

+

+    /**< Reset variable used for stripe mode */

+    pC->m_procRows = 0;

+

+    /**< Initialize var for X/Y processing order according to orientation */

+    pC->m_bFlipX = M4OSA_FALSE;

+    pC->m_bFlipY = M4OSA_FALSE;

+    pC->m_bRevertXY = M4OSA_FALSE;

+    switch(pParams->m_outputOrientation)

+    {

+        case M4COMMON_kOrientationTopLeft:

+            break;

+        case M4COMMON_kOrientationTopRight:

+            pC->m_bFlipX = M4OSA_TRUE;

+            break;

+        case M4COMMON_kOrientationBottomRight:

+            pC->m_bFlipX = M4OSA_TRUE;

+            pC->m_bFlipY = M4OSA_TRUE;

+            break;

+        case M4COMMON_kOrientationBottomLeft:

+            pC->m_bFlipY = M4OSA_TRUE;

+            break;

+        case M4COMMON_kOrientationLeftTop:

+            pC->m_bRevertXY = M4OSA_TRUE;

+            break;

+        case M4COMMON_kOrientationRightTop:

+            pC->m_bRevertXY = M4OSA_TRUE;

+            pC->m_bFlipY = M4OSA_TRUE;

+        break;

+        case M4COMMON_kOrientationRightBottom:

+            pC->m_bRevertXY = M4OSA_TRUE;

+            pC->m_bFlipX = M4OSA_TRUE;

+            pC->m_bFlipY = M4OSA_TRUE;

+            break;

+        case M4COMMON_kOrientationLeftBottom:

+            pC->m_bRevertXY = M4OSA_TRUE;

+            pC->m_bFlipX = M4OSA_TRUE;

+            break;

+        default:

+        return M4ERR_PARAMETER;

+    }

+    /**< Update state */

+    pC->m_state = M4AIR_kConfigured;

+    

+    return M4NO_ERROR ;

+}

+

+

+/**

+ ******************************************************************************

+ * M4OSA_ERR M4AIR_get(M4OSA_Context pContext, M4VIFI_ImagePlane* pIn, M4VIFI_ImagePlane* pOut)

+ * @brief   This function will provide the requested resized area of interest according to settings

+ *          provided in M4AIR_configure.

+ * @note    In case the input format type is JPEG, input plane(s)

+ *          in pIn is not used. In normal mode, dimension specified in output plane(s) structure must be the

+ *          same than the one specified in M4AIR_configure. In stripe mode, only the width will be the same,

+ *          height will be taken as the stripe height (typically 16).

+ *          In normal mode, this function is call once to get the full output picture. In stripe mode, it is called

+ *          for each stripe till the whole picture has been retrieved,and  the position of the output stripe in the output picture

+ *          is internally incremented at each step.

+ *          Any call to M4AIR_configure during stripe process will reset this one to the beginning of the output picture.

+ * @param   pContext:   (IN) Context identifying the instance

+ * @param   pIn:            (IN) Plane structure containing input Plane(s).

+ * @param   pOut:       (IN/OUT)  Plane structure containing output Plane(s).

+ * @return  M4NO_ERROR: there is no error

+ * @return  M4ERR_ALLOC: No more memory space to add a new effect.

+ * @return  M4ERR_PARAMETER: pContext is M4OSA_NULL (debug only).

+ ******************************************************************************

+*/

+M4OSA_ERR M4AIR_get(M4OSA_Context pContext, M4VIFI_ImagePlane* pIn, M4VIFI_ImagePlane* pOut)

+{

+    M4AIR_InternalContext* pC = (M4AIR_InternalContext*)pContext ;

+    M4OSA_UInt32 i,j,k,u32_x_frac,u32_y_frac,u32_x_accum,u32_y_accum,u32_shift;

+        M4OSA_UInt8    *pu8_data_in, *pu8_data_in_org, *pu8_data_in_tmp, *pu8_data_out;

+        M4OSA_UInt8    *pu8_src_top;

+        M4OSA_UInt8    *pu8_src_bottom;

+    M4OSA_UInt32    u32_temp_value;

+    M4OSA_Int32 i32_tmp_offset;

+    M4OSA_UInt32    nb_planes;

+

+

+

+    M4ERR_CHECK_NULL_RETURN_VALUE(M4ERR_PARAMETER, pContext) ;

+

+    /**< Check state */

+    if(M4AIR_kConfigured != pC->m_state)

+    {

+        return M4ERR_STATE;

+    }

+

+    if(M4AIR_kYUV420AP == pC->m_inputFormat)

+    {

+        nb_planes = 4;

+    }

+    else

+    {

+        nb_planes = 3;

+    }

+

+    /**< Loop on each Plane */

+    for(i=0;i<nb_planes;i++)

+    {

+

+         /* Set the working pointers at the beginning of the input/output data field */

+

+        u32_shift = ((i==0)||(i==3))?0:1; /**< Depend on Luma or Chroma */

+

+        if((M4OSA_FALSE == pC->m_params.m_bOutputStripe)||((M4OSA_TRUE == pC->m_params.m_bOutputStripe)&&(0 == pC->m_procRows)))

+        {

+            /**< For input, take care about ROI */

+            pu8_data_in     = pIn[i].pac_data + pIn[i].u_topleft + (pC->m_params.m_inputCoord.m_x>>u32_shift)

+                        + (pC->m_params.m_inputCoord.m_y >> u32_shift) * pIn[i].u_stride;

+

+            /** Go at end of line/column in case X/Y scanning is flipped */

+            if(M4OSA_TRUE == pC->m_bFlipX)

+            {

+                pu8_data_in += ((pC->m_params.m_inputSize.m_width)>>u32_shift) -1 ;

+            }

+            if(M4OSA_TRUE == pC->m_bFlipY)

+            {

+                pu8_data_in += ((pC->m_params.m_inputSize.m_height>>u32_shift) -1) * pIn[i].u_stride;

+            }

+

+            /**< Initialize accumulators in case we are using it (bilinear interpolation) */

+            if( M4OSA_FALSE == pC->m_bOnlyCopy)

+            {

+                pC->u32_x_accum[i] = pC->u32_x_accum_start[i];

+                pC->u32_y_accum[i] = pC->u32_y_accum_start[i];

+            }

+

+        }

+        else

+        {

+            /**< In case of stripe mode for other than first stripe, we need to recover input pointer from internal context */

+            pu8_data_in = pC->pu8_data_in[i];

+        }

+

+        /**< In every mode, output data are at the beginning of the output plane */

+        pu8_data_out    = pOut[i].pac_data + pOut[i].u_topleft;

+

+        /**< Initialize input offset applied after each pixel */

+        if(M4OSA_FALSE == pC->m_bFlipY)

+        {

+            i32_tmp_offset = pIn[i].u_stride;

+        }

+        else

+        {

+            i32_tmp_offset = -pIn[i].u_stride;

+        }

+

+        /**< In this case, no bilinear interpolation is needed as input and output dimensions are the same */

+        if( M4OSA_TRUE == pC->m_bOnlyCopy)

+        {

+            /**< No +-90° rotation */

+            if(M4OSA_FALSE == pC->m_bRevertXY)

+            {

+                /**< No flip on X abscissa */

+                if(M4OSA_FALSE == pC->m_bFlipX)

+                {

+                    /**< Loop on each row */

+                    for(j=0;j<pOut[i].u_height;j++)

+                    {

+                        /**< Copy one whole line */

+                        M4OSA_memcpy((M4OSA_MemAddr8)pu8_data_out, (M4OSA_MemAddr8)pu8_data_in, pOut[i].u_width);

+

+                        /**< Update pointers */

+                        pu8_data_out += pOut[i].u_stride;

+                        if(M4OSA_FALSE == pC->m_bFlipY)

+                        {

+                            pu8_data_in += pIn[i].u_stride;

+                        }

+                        else

+                        {

+                            pu8_data_in -= pIn[i].u_stride;

+                        }

+                    }

+                }

+                else

+                {

+                    /**< Loop on each row */

+                    for(j=0;j<pOut[i].u_height;j++)

+                    {

+                        /**< Loop on each pixel of 1 row */

+                        for(k=0;k<pOut[i].u_width;k++)

+                        {

+                            *pu8_data_out++ = *pu8_data_in--;

+                        }

+

+                        /**< Update pointers */

+                        pu8_data_out += (pOut[i].u_stride - pOut[i].u_width);

+

+                        pu8_data_in += pOut[i].u_width + i32_tmp_offset;

+

+                    }

+                }

+            }

+            /**< Here we have a +-90° rotation */

+            else

+            {

+

+                /**< Loop on each row */

+                for(j=0;j<pOut[i].u_height;j++)

+                {

+                    pu8_data_in_tmp = pu8_data_in;

+

+                    /**< Loop on each pixel of 1 row */

+                    for(k=0;k<pOut[i].u_width;k++)

+                    {

+                        *pu8_data_out++ = *pu8_data_in_tmp;

+

+                        /**< Update input pointer in order to go to next/past line */

+                        pu8_data_in_tmp += i32_tmp_offset;

+                    }

+

+                    /**< Update pointers */

+                    pu8_data_out += (pOut[i].u_stride - pOut[i].u_width);

+                    if(M4OSA_FALSE == pC->m_bFlipX)

+                    {

+                        pu8_data_in ++;

+                    }

+                    else

+                    {

+                        pu8_data_in --;

+                    }

+                }

+            }

+        }

+        /**< Bilinear interpolation */

+        else

+        {

+

+        if(3 != i)  /**< other than alpha plane */

+        {

+            /**No +-90° rotation */

+            if(M4OSA_FALSE == pC->m_bRevertXY)

+            {

+

+                /**< Loop on each row */

+                for(j=0;j<pOut[i].u_height;j++)

+                {

+                    /* Vertical weight factor */

+                    u32_y_frac = (pC->u32_y_accum[i]>>12)&15;

+

+                    /* Reinit horizontal weight factor */

+                    u32_x_accum = pC->u32_x_accum_start[i];

+

+

+

+                        if(M4OSA_TRUE ==  pC->m_bFlipX)

+                        {

+

+                            /**< Loop on each output pixel in a row */

+                            for(k=0;k<pOut[i].u_width;k++)

+                            {

+

+                                u32_x_frac = (u32_x_accum >> 12)&15; /* Fraction of Horizontal weight factor */

+

+                                pu8_src_top = (pu8_data_in - (u32_x_accum >> 16)) -1 ;

+

+                                pu8_src_bottom = pu8_src_top + i32_tmp_offset;

+

+                                /* Weighted combination */

+                                u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[1]*(16-u32_x_frac) +

+                                                                 pu8_src_top[0]*u32_x_frac)*(16-u32_y_frac) +

+                                                                (pu8_src_bottom[1]*(16-u32_x_frac) +

+                                                                 pu8_src_bottom[0]*u32_x_frac)*u32_y_frac )>>8);

+

+                                *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;

+

+                                /* Update horizontal accumulator */

+                                u32_x_accum += pC->u32_x_inc[i];

+                            }

+                        }

+

+                        else

+                        {

+                            /**< Loop on each output pixel in a row */

+                            for(k=0;k<pOut[i].u_width;k++)

+                            {

+                                u32_x_frac = (u32_x_accum >> 12)&15; /* Fraction of Horizontal weight factor */

+

+                                pu8_src_top = pu8_data_in + (u32_x_accum >> 16);

+

+                                pu8_src_bottom = pu8_src_top + i32_tmp_offset;

+

+                                /* Weighted combination */

+                                u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) +

+                                                                 pu8_src_top[1]*u32_x_frac)*(16-u32_y_frac) +

+                                                                (pu8_src_bottom[0]*(16-u32_x_frac) +

+                                                                 pu8_src_bottom[1]*u32_x_frac)*u32_y_frac )>>8);

+

+                                    *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;

+

+                                /* Update horizontal accumulator */

+                                u32_x_accum += pC->u32_x_inc[i];

+                            }

+

+                        }

+

+                    pu8_data_out += pOut[i].u_stride - pOut[i].u_width;

+

+                    /* Update vertical accumulator */

+                    pC->u32_y_accum[i] += pC->u32_y_inc[i];

+                    if (pC->u32_y_accum[i]>>16)

+                    {

+                        pu8_data_in = pu8_data_in + (pC->u32_y_accum[i] >> 16) * i32_tmp_offset;

+                        pC->u32_y_accum[i] &= 0xffff;

+                    }

+                }

+        }

+            /** +-90° rotation */

+            else

+            {

+                pu8_data_in_org = pu8_data_in;

+

+                /**< Loop on each output row */

+                for(j=0;j<pOut[i].u_height;j++)

+                {

+                    /* horizontal weight factor */

+                    u32_x_frac = (pC->u32_x_accum[i]>>12)&15;

+

+                    /* Reinit accumulator */

+                    u32_y_accum = pC->u32_y_accum_start[i];

+

+                    if(M4OSA_TRUE ==  pC->m_bFlipX)

+                    {

+

+                        /**< Loop on each output pixel in a row */

+                        for(k=0;k<pOut[i].u_width;k++)

+                        {

+

+                            u32_y_frac = (u32_y_accum >> 12)&15; /* Vertical weight factor */

+

+

+                            pu8_src_top = (pu8_data_in - (pC->u32_x_accum[i] >> 16)) - 1;

+

+                            pu8_src_bottom = pu8_src_top + i32_tmp_offset;

+

+                            /* Weighted combination */

+                            u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[1]*(16-u32_x_frac) +

+                                                                 pu8_src_top[0]*u32_x_frac)*(16-u32_y_frac) +

+                                                                (pu8_src_bottom[1]*(16-u32_x_frac) +

+                                                                 pu8_src_bottom[0]*u32_x_frac)*u32_y_frac )>>8);

+

+                            *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;

+

+                            /* Update vertical accumulator */

+                            u32_y_accum += pC->u32_y_inc[i];

+                            if (u32_y_accum>>16)

+                            {

+                                pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * i32_tmp_offset;

+                                u32_y_accum &= 0xffff;

+                            }

+

+                        }

+                    }

+                    else

+                    {

+                        /**< Loop on each output pixel in a row */

+                        for(k=0;k<pOut[i].u_width;k++)

+                        {

+

+                            u32_y_frac = (u32_y_accum >> 12)&15; /* Vertical weight factor */

+

+                            pu8_src_top = pu8_data_in + (pC->u32_x_accum[i] >> 16);

+

+                            pu8_src_bottom = pu8_src_top + i32_tmp_offset;

+

+                            /* Weighted combination */

+                            u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) +

+                                                                 pu8_src_top[1]*u32_x_frac)*(16-u32_y_frac) +

+                                                                (pu8_src_bottom[0]*(16-u32_x_frac) +

+                                                                 pu8_src_bottom[1]*u32_x_frac)*u32_y_frac )>>8);

+

+                            *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;

+

+                            /* Update vertical accumulator */

+                            u32_y_accum += pC->u32_y_inc[i];

+                            if (u32_y_accum>>16)

+                            {

+                                pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * i32_tmp_offset;

+                                u32_y_accum &= 0xffff;

+                            }

+                        }

+                    }

+                    pu8_data_out += pOut[i].u_stride - pOut[i].u_width;

+

+                    /* Update horizontal accumulator */

+                    pC->u32_x_accum[i] += pC->u32_x_inc[i];

+

+                    pu8_data_in = pu8_data_in_org;

+                }

+

+            }

+            }/** 3 != i */

+            else

+            {

+            /**No +-90° rotation */

+            if(M4OSA_FALSE == pC->m_bRevertXY)

+            {

+

+                /**< Loop on each row */

+                for(j=0;j<pOut[i].u_height;j++)

+                {

+                    /* Vertical weight factor */

+                    u32_y_frac = (pC->u32_y_accum[i]>>12)&15;

+

+                    /* Reinit horizontal weight factor */

+                    u32_x_accum = pC->u32_x_accum_start[i];

+

+

+

+                        if(M4OSA_TRUE ==  pC->m_bFlipX)

+                        {

+

+                            /**< Loop on each output pixel in a row */

+                            for(k=0;k<pOut[i].u_width;k++)

+                            {

+

+                                u32_x_frac = (u32_x_accum >> 12)&15; /* Fraction of Horizontal weight factor */

+

+                                pu8_src_top = (pu8_data_in - (u32_x_accum >> 16)) -1 ;

+

+                                pu8_src_bottom = pu8_src_top + i32_tmp_offset;

+

+                                /* Weighted combination */

+                                u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[1]*(16-u32_x_frac) +

+                                                                 pu8_src_top[0]*u32_x_frac)*(16-u32_y_frac) +

+                                                                (pu8_src_bottom[1]*(16-u32_x_frac) +

+                                                                 pu8_src_bottom[0]*u32_x_frac)*u32_y_frac )>>8);

+

+                                u32_temp_value= (u32_temp_value >> 7)*0xff;

+

+                                *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;

+

+                                /* Update horizontal accumulator */

+                                u32_x_accum += pC->u32_x_inc[i];

+                            }

+                        }

+

+                        else

+                        {

+                            /**< Loop on each output pixel in a row */

+                            for(k=0;k<pOut[i].u_width;k++)

+                            {

+                                u32_x_frac = (u32_x_accum >> 12)&15; /* Fraction of Horizontal weight factor */

+

+                                pu8_src_top = pu8_data_in + (u32_x_accum >> 16);

+

+                                pu8_src_bottom = pu8_src_top + i32_tmp_offset;

+

+                                /* Weighted combination */

+                                u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) +

+                                                                 pu8_src_top[1]*u32_x_frac)*(16-u32_y_frac) +

+                                                                (pu8_src_bottom[0]*(16-u32_x_frac) +

+                                                                 pu8_src_bottom[1]*u32_x_frac)*u32_y_frac )>>8);

+

+                                u32_temp_value= (u32_temp_value >> 7)*0xff;

+

+                                *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;

+

+                                /* Update horizontal accumulator */

+                                u32_x_accum += pC->u32_x_inc[i];

+                            }

+

+                        }

+

+                    pu8_data_out += pOut[i].u_stride - pOut[i].u_width;

+

+                    /* Update vertical accumulator */

+                    pC->u32_y_accum[i] += pC->u32_y_inc[i];

+                    if (pC->u32_y_accum[i]>>16)

+                    {

+                        pu8_data_in = pu8_data_in + (pC->u32_y_accum[i] >> 16) * i32_tmp_offset;

+                        pC->u32_y_accum[i] &= 0xffff;

+                    }

+                }

+

+            } /**< M4OSA_FALSE == pC->m_bRevertXY */

+            /** +-90° rotation */

+            else

+            {

+                pu8_data_in_org = pu8_data_in;

+

+                /**< Loop on each output row */

+                for(j=0;j<pOut[i].u_height;j++)

+                {

+                    /* horizontal weight factor */

+                    u32_x_frac = (pC->u32_x_accum[i]>>12)&15;

+

+                    /* Reinit accumulator */

+                    u32_y_accum = pC->u32_y_accum_start[i];

+

+                    if(M4OSA_TRUE ==  pC->m_bFlipX)

+                    {

+

+                        /**< Loop on each output pixel in a row */

+                        for(k=0;k<pOut[i].u_width;k++)

+                        {

+

+                            u32_y_frac = (u32_y_accum >> 12)&15; /* Vertical weight factor */

+

+

+                            pu8_src_top = (pu8_data_in - (pC->u32_x_accum[i] >> 16)) - 1;

+

+                            pu8_src_bottom = pu8_src_top + i32_tmp_offset;

+

+                            /* Weighted combination */

+                            u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[1]*(16-u32_x_frac) +

+                                                                 pu8_src_top[0]*u32_x_frac)*(16-u32_y_frac) +

+                                                                (pu8_src_bottom[1]*(16-u32_x_frac) +

+                                                                 pu8_src_bottom[0]*u32_x_frac)*u32_y_frac )>>8);

+

+                            u32_temp_value= (u32_temp_value >> 7)*0xff;

+

+                            *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;

+

+                            /* Update vertical accumulator */

+                            u32_y_accum += pC->u32_y_inc[i];

+                            if (u32_y_accum>>16)

+                            {

+                                pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * i32_tmp_offset;

+                                u32_y_accum &= 0xffff;

+                            }

+

+                        }

+                    }

+                    else

+                    {

+                        /**< Loop on each output pixel in a row */

+                        for(k=0;k<pOut[i].u_width;k++)

+                        {

+

+                            u32_y_frac = (u32_y_accum >> 12)&15; /* Vertical weight factor */

+

+                            pu8_src_top = pu8_data_in + (pC->u32_x_accum[i] >> 16);

+

+                            pu8_src_bottom = pu8_src_top + i32_tmp_offset;

+

+                            /* Weighted combination */

+                            u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) +

+                                                                 pu8_src_top[1]*u32_x_frac)*(16-u32_y_frac) +

+                                                                (pu8_src_bottom[0]*(16-u32_x_frac) +

+                                                                 pu8_src_bottom[1]*u32_x_frac)*u32_y_frac )>>8);

+

+                            u32_temp_value= (u32_temp_value >> 7)*0xff;

+

+                            *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;

+

+                            /* Update vertical accumulator */

+                            u32_y_accum += pC->u32_y_inc[i];

+                            if (u32_y_accum>>16)

+                            {

+                                pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * i32_tmp_offset;

+                                u32_y_accum &= 0xffff;

+                            }

+                        }

+                    }

+                    pu8_data_out += pOut[i].u_stride - pOut[i].u_width;

+

+                    /* Update horizontal accumulator */

+                    pC->u32_x_accum[i] += pC->u32_x_inc[i];

+

+                    pu8_data_in = pu8_data_in_org;

+

+                }

+                } /**< M4OSA_TRUE == pC->m_bRevertXY */

+        }/** 3 == i */

+            }

+        /**< In case of stripe mode, save current input pointer */

+        if(M4OSA_TRUE == pC->m_params.m_bOutputStripe)

+        {

+            pC->pu8_data_in[i] = pu8_data_in;

+        }

+    }

+

+    /**< Update number of processed rows, reset it if we have finished with the whole processing */

+    pC->m_procRows += pOut[0].u_height;

+    if(M4OSA_FALSE == pC->m_bRevertXY)

+    {

+        if(pC->m_params.m_outputSize.m_height <= pC->m_procRows)    pC->m_procRows = 0;

+    }

+    else

+    {

+        if(pC->m_params.m_outputSize.m_width <= pC->m_procRows) pC->m_procRows = 0;

+    }

+

+    return M4NO_ERROR ;

+

+}

+/*+ Handle the image files here */

+

+/**

+ ******************************************************************************

+ * M4OSA_ERR LvGetImageThumbNail(M4OSA_UChar *fileName, M4OSA_Void **pBuffer)

+ * @brief   This function gives YUV420 buffer of a given image file (in argb888 format)

+ * @Note: The caller of the function is responsible to free the yuv buffer allocated

+ * @param   fileName:       (IN) Path to the filename of the image argb data

+ * @param   height:     (IN) Height of the image

+ * @param     width:             (OUT) pBuffer pointer to the address where the yuv data address needs to be returned.

+ * @return  M4NO_ERROR: there is no error

+ * @return  M4ERR_ALLOC: No more memory space to add a new effect.

+ * @return  M4ERR_FILE_NOT_FOUND: if the file passed does not exists.

+ ******************************************************************************

+*/

+M4OSA_ERR LvGetImageThumbNail(const char *fileName, M4OSA_UInt32 height, M4OSA_UInt32 width, M4OSA_Void **pBuffer) {

+

+    M4VIFI_ImagePlane rgbPlane, *yuvPlane;

+    M4OSA_UInt32 frameSize_argb = (width * height * 4); // argb data

+    M4OSA_Context lImageFileFp  = M4OSA_NULL;

+    M4OSA_ERR err = M4NO_ERROR;

+

+    M4OSA_UInt8 *pTmpData = (M4OSA_UInt8*) M4OSA_malloc(frameSize_argb, M4VS, (M4OSA_Char*)"Image argb data");

+    if(pTmpData == M4OSA_NULL) {

+        LOGE("Failed to allocate memory for Image clip");

+        return M4ERR_ALLOC;

+    }

+

+       /** Read the argb data from the passed file. */

+    M4OSA_ERR lerr = M4OSA_fileReadOpen(&lImageFileFp, (M4OSA_Void *) fileName, M4OSA_kFileRead);

+

+    if((lerr != M4NO_ERROR) || (lImageFileFp == M4OSA_NULL))

+    {

+        LOGE("LVPreviewController: Can not open the file ");

+        M4OSA_free((M4OSA_MemAddr32)pTmpData);

+        return M4ERR_FILE_NOT_FOUND;

+    }

+    lerr = M4OSA_fileReadData(lImageFileFp, (M4OSA_MemAddr8)pTmpData, &frameSize_argb);

+    if(lerr != M4NO_ERROR)

+    {

+        LOGE("LVPreviewController: can not read the data ");

+        M4OSA_fileReadClose(lImageFileFp);

+        M4OSA_free((M4OSA_MemAddr32)pTmpData);

+        return lerr;

+    }

+    M4OSA_fileReadClose(lImageFileFp);

+

+    M4OSA_UInt32 frameSize = (width * height * 3); //Size of YUV420 data.

+    rgbPlane.pac_data = (M4VIFI_UInt8*)M4OSA_malloc(frameSize, M4VS, (M4OSA_Char*)"Image clip RGB888 data");

+    if(rgbPlane.pac_data == M4OSA_NULL)

+    {

+        LOGE("Failed to allocate memory for Image clip");

+        M4OSA_free((M4OSA_MemAddr32)pTmpData);

+        return M4ERR_ALLOC;

+    }

+

+    /** Remove the alpha channel */

+    for (int i=0, j = 0; i < frameSize_argb; i++) {

+        if ((i % 4) == 0) continue;

+        rgbPlane.pac_data[j] = pTmpData[i];

+        j++;

+    }

+    M4OSA_free((M4OSA_MemAddr32)pTmpData);

+

+#ifdef FILE_DUMP

+    FILE *fp = fopen("/sdcard/Input/test_rgb.raw", "wb");

+    if(fp == NULL)

+        LOGE("Errors file can not be created");

+    else {

+        fwrite(rgbPlane.pac_data, frameSize, 1, fp);

+        fclose(fp);

+    }

+#endif

+        rgbPlane.u_height = height;

+        rgbPlane.u_width = width;

+        rgbPlane.u_stride = width*3;

+        rgbPlane.u_topleft = 0;

+

+        yuvPlane = (M4VIFI_ImagePlane*)M4OSA_malloc(3*sizeof(M4VIFI_ImagePlane),

+                M4VS, (M4OSA_Char*)"M4xVSS_internalConvertRGBtoYUV: Output plane YUV");

+        yuvPlane[0].u_height = height;

+        yuvPlane[0].u_width = width;

+        yuvPlane[0].u_stride = width;

+        yuvPlane[0].u_topleft = 0;

+        yuvPlane[0].pac_data = (M4VIFI_UInt8*)M4OSA_malloc(yuvPlane[0].u_height * yuvPlane[0].u_width * 1.5, M4VS, (M4OSA_Char*)"imageClip YUV data");

+

+        yuvPlane[1].u_height = yuvPlane[0].u_height >>1;

+        yuvPlane[1].u_width = yuvPlane[0].u_width >> 1;

+        yuvPlane[1].u_stride = yuvPlane[1].u_width;

+        yuvPlane[1].u_topleft = 0;

+        yuvPlane[1].pac_data = (M4VIFI_UInt8*)(yuvPlane[0].pac_data + yuvPlane[0].u_height * yuvPlane[0].u_width);

+

+        yuvPlane[2].u_height = yuvPlane[0].u_height >>1;

+        yuvPlane[2].u_width = yuvPlane[0].u_width >> 1;

+        yuvPlane[2].u_stride = yuvPlane[2].u_width;

+        yuvPlane[2].u_topleft = 0;

+        yuvPlane[2].pac_data = (M4VIFI_UInt8*)(yuvPlane[1].pac_data + yuvPlane[1].u_height * yuvPlane[1].u_width);

+

+

+        err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane, yuvPlane);

+        //err = M4VIFI_BGR888toYUV420(M4OSA_NULL, &rgbPlane, yuvPlane);

+        if(err != M4NO_ERROR)

+        {

+            LOGE("error when converting from RGB to YUV: 0x%x\n", err);

+        }

+        M4OSA_free((M4OSA_MemAddr32)rgbPlane.pac_data);

+

+        //LOGE("RGB to YUV done");

+#ifdef FILE_DUMP

+        FILE *fp1 = fopen("/sdcard/Input/test_yuv.raw", "wb");

+        if(fp1 == NULL)

+            LOGE("Errors file can not be created");

+        else {

+            fwrite(yuvPlane[0].pac_data, yuvPlane[0].u_height * yuvPlane[0].u_width * 1.5, 1, fp1);

+            fclose(fp1);

+        }

+#endif

+        *pBuffer = yuvPlane[0].pac_data;

+        M4OSA_free((M4OSA_MemAddr32)yuvPlane);

+        return M4NO_ERROR;

+

+}

+M4OSA_Void prepareYUV420ImagePlane(M4VIFI_ImagePlane *plane,

+    M4OSA_UInt32 width, M4OSA_UInt32 height, M4VIFI_UInt8 *buffer) {

+    

+    //Y plane

+    plane[0].u_width = width;

+    plane[0].u_height = height;

+    plane[0].u_stride = plane[0].u_width;

+    plane[0].u_topleft = 0;

+    plane[0].pac_data = buffer;

+

+    // U plane

+    plane[1].u_width = width/2;

+    plane[1].u_height = height/2;

+    plane[1].u_stride = plane[1].u_width;

+    plane[1].u_topleft = 0;

+    plane[1].pac_data = buffer+(width*height);

+

+    // V Plane

+    plane[2].u_width = width/2;

+    plane[2].u_height = height/2;

+    plane[2].u_stride = plane[2].u_width;

+    plane[2].u_topleft = 0;

+    plane[2].pac_data = buffer+(width*height+(width/2)*(height/2));

+    

+}

+

+M4OSA_Void prepareYV12ImagePlane(M4VIFI_ImagePlane *plane,

+    M4OSA_UInt32 width, M4OSA_UInt32 height, M4OSA_UInt32 stride,

+    M4VIFI_UInt8 *buffer) {

+    

+    //Y plane

+    plane[0].u_width = width;

+    plane[0].u_height = height;

+    plane[0].u_stride = stride;

+    plane[0].u_topleft = 0;

+    plane[0].pac_data = buffer;

+

+    // U plane

+    plane[1].u_width = width/2;

+    plane[1].u_height = height/2;

+    plane[1].u_stride = android::PreviewRenderer::ALIGN(plane[0].u_stride/2, 16);

+    plane[1].u_topleft = 0;

+    plane[1].pac_data = (buffer

+                + plane[0].u_height * plane[0].u_stride

+                + (plane[0].u_height/2) * android::PreviewRenderer::ALIGN((

+                 plane[0].u_stride / 2), 16));

+

+    // V Plane

+    plane[2].u_width = width/2;

+    plane[2].u_height = height/2;

+    plane[2].u_stride = android::PreviewRenderer::ALIGN(plane[0].u_stride/2, 16);

+    plane[2].u_topleft = 0;

+    plane[2].pac_data = (buffer +

+     plane[0].u_height * android::PreviewRenderer::ALIGN(plane[0].u_stride, 16));

+

+     

+}

+

+M4OSA_Void swapImagePlanes(

+    M4VIFI_ImagePlane *planeIn, M4VIFI_ImagePlane *planeOut,

+    M4VIFI_UInt8 *buffer1, M4VIFI_UInt8 *buffer2) {

+    

+    planeIn[0].u_height = planeOut[0].u_height;

+    planeIn[0].u_width = planeOut[0].u_width;

+    planeIn[0].u_stride = planeOut[0].u_stride;

+    planeIn[0].u_topleft = planeOut[0].u_topleft;

+    planeIn[0].pac_data = planeOut[0].pac_data;

+

+    /**

+     * U plane */

+    planeIn[1].u_width = planeOut[1].u_width;

+    planeIn[1].u_height = planeOut[1].u_height;

+    planeIn[1].u_stride = planeOut[1].u_stride;

+    planeIn[1].u_topleft = planeOut[1].u_topleft;

+    planeIn[1].pac_data = planeOut[1].pac_data;

+    /**

+     * V Plane */

+    planeIn[2].u_width = planeOut[2].u_width;

+    planeIn[2].u_height = planeOut[2].u_height;

+    planeIn[2].u_stride = planeOut[2].u_stride;

+    planeIn[2].u_topleft = planeOut[2].u_topleft;

+    planeIn[2].pac_data = planeOut[2].pac_data;

+

+    if(planeOut[0].pac_data == (M4VIFI_UInt8*)buffer1)

+    {

+        planeOut[0].pac_data = (M4VIFI_UInt8*)buffer2;

+        planeOut[1].pac_data = (M4VIFI_UInt8*)(buffer2 +

+         planeOut[0].u_width*planeOut[0].u_height);

+

+        planeOut[2].pac_data = (M4VIFI_UInt8*)(buffer2 +

+         planeOut[0].u_width*planeOut[0].u_height +

+         planeOut[1].u_width*planeOut[1].u_height);

+    }

+    else

+    {

+        planeOut[0].pac_data = (M4VIFI_UInt8*)buffer1;

+        planeOut[1].pac_data = (M4VIFI_UInt8*)(buffer1 +

+         planeOut[0].u_width*planeOut[0].u_height);

+

+        planeOut[2].pac_data = (M4VIFI_UInt8*)(buffer1 +

+         planeOut[0].u_width*planeOut[0].u_height +

+         planeOut[1].u_width*planeOut[1].u_height);

+    }

+    

+}

+

+M4OSA_Void computePercentageDone(

+    M4OSA_UInt32 ctsMs, M4OSA_UInt32 effectStartTimeMs,

+    M4OSA_UInt32 effectDuration, M4OSA_Double *percentageDone) {

+

+    M4OSA_Double videoEffectTime =0;

+

+    // Compute how far from the beginning of the effect we are, in clip-base time.

+    videoEffectTime =

+     (M4OSA_Int32)(ctsMs+ 0.5) - effectStartTimeMs;

+

+    // To calculate %, substract timeIncrement

+    // because effect should finish on the last frame

+    // which is from CTS = (eof-timeIncrement) till CTS = eof

+    *percentageDone =

+     videoEffectTime / ((M4OSA_Float)effectDuration);

+

+    if(*percentageDone < 0.0) *percentageDone = 0.0;

+    if(*percentageDone > 1.0) *percentageDone = 1.0;

+

+}

+

+

+M4OSA_Void computeProgressForVideoEffect(

+    M4OSA_UInt32 ctsMs, M4OSA_UInt32 effectStartTimeMs,

+    M4OSA_UInt32 effectDuration, M4VSS3GPP_ExternalProgress* extProgress) {

+

+    M4OSA_Double percentageDone =0;

+

+    computePercentageDone(ctsMs, effectStartTimeMs, effectDuration, &percentageDone);

+

+    extProgress->uiProgress = (M4OSA_UInt32)( percentageDone * 1000 );

+    extProgress->uiOutputTime = (M4OSA_UInt32)(ctsMs + 0.5);

+    extProgress->uiClipTime = extProgress->uiOutputTime;

+    extProgress->bIsLast = M4OSA_FALSE;

+}

+

+M4OSA_ERR prepareFramingStructure(

+    M4xVSS_FramingStruct* framingCtx,

+    M4VSS3GPP_EffectSettings* effectsSettings, M4OSA_UInt32 index,

+    M4VIFI_UInt8* overlayRGB, M4VIFI_UInt8* overlayYUV) {

+

+    M4OSA_ERR err = M4NO_ERROR;

+

+    // Force input RGB buffer to even size to avoid errors in YUV conversion

+    framingCtx->FramingRgb = effectsSettings[index].xVSS.pFramingBuffer;

+    framingCtx->FramingRgb->u_width = framingCtx->FramingRgb->u_width & ~1;

+    framingCtx->FramingRgb->u_height = framingCtx->FramingRgb->u_height & ~1;

+    framingCtx->FramingYuv = NULL;

+

+    framingCtx->duration = effectsSettings[index].uiDuration;

+    framingCtx->topleft_x = effectsSettings[index].xVSS.topleft_x;

+    framingCtx->topleft_y = effectsSettings[index].xVSS.topleft_y;

+    framingCtx->pCurrent = framingCtx;

+    framingCtx->pNext = framingCtx;

+    framingCtx->previousClipTime = -1;

+

+    framingCtx->alphaBlendingStruct =

+     (M4xVSS_internalEffectsAlphaBlending*)M4OSA_malloc(

+      sizeof(M4xVSS_internalEffectsAlphaBlending), M4VS,

+      (M4OSA_Char*)"alpha blending struct");

+

+    framingCtx->alphaBlendingStruct->m_fadeInTime =

+     effectsSettings[index].xVSS.uialphaBlendingFadeInTime;

+

+    framingCtx->alphaBlendingStruct->m_fadeOutTime =

+     effectsSettings[index].xVSS.uialphaBlendingFadeOutTime;

+

+    framingCtx->alphaBlendingStruct->m_end =

+     effectsSettings[index].xVSS.uialphaBlendingEnd;

+

+    framingCtx->alphaBlendingStruct->m_middle =

+     effectsSettings[index].xVSS.uialphaBlendingMiddle;

+

+    framingCtx->alphaBlendingStruct->m_start =

+     effectsSettings[index].xVSS.uialphaBlendingStart;

+

+    // If new Overlay buffer, convert from RGB to YUV

+    if((overlayRGB != framingCtx->FramingRgb->pac_data) || (overlayYUV == NULL) ) {

+

+        // If YUV buffer exists, delete it

+        if(overlayYUV != NULL) {

+           M4OSA_free((M4OSA_MemAddr32)overlayYUV);

+           overlayYUV = NULL;

+        }

+    if(effectsSettings[index].xVSS.rgbType == M4VSS3GPP_kRGB565) {

+        // Input RGB565 plane is provided,

+        // let's convert it to YUV420, and update framing structure

+        err = M4xVSS_internalConvertRGBtoYUV(framingCtx);

+    }

+    else if(effectsSettings[index].xVSS.rgbType == M4VSS3GPP_kRGB888) {

+        // Input RGB888 plane is provided,

+        // let's convert it to YUV420, and update framing structure

+        err = M4xVSS_internalConvertRGB888toYUV(framingCtx);

+    }

+    else {

+        err = M4ERR_PARAMETER;

+    }

+        overlayYUV = framingCtx->FramingYuv[0].pac_data;

+        overlayRGB = framingCtx->FramingRgb->pac_data;

+

+    }

+    else {

+        LOGV(" YUV buffer reuse");

+        framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_malloc(

+            3*sizeof(M4VIFI_ImagePlane), M4VS, (M4OSA_Char*)"YUV");

+

+        if(framingCtx->FramingYuv == M4OSA_NULL) {

+            return M4ERR_ALLOC;

+        }

+

+        framingCtx->FramingYuv[0].u_width = framingCtx->FramingRgb->u_width;

+        framingCtx->FramingYuv[0].u_height = framingCtx->FramingRgb->u_height;

+        framingCtx->FramingYuv[0].u_topleft = 0;

+        framingCtx->FramingYuv[0].u_stride = framingCtx->FramingRgb->u_width;

+        framingCtx->FramingYuv[0].pac_data = (M4VIFI_UInt8*)overlayYUV;

+

+        framingCtx->FramingYuv[1].u_width = (framingCtx->FramingRgb->u_width)>>1;

+        framingCtx->FramingYuv[1].u_height = (framingCtx->FramingRgb->u_height)>>1;

+        framingCtx->FramingYuv[1].u_topleft = 0;

+        framingCtx->FramingYuv[1].u_stride = (framingCtx->FramingRgb->u_width)>>1;

+        framingCtx->FramingYuv[1].pac_data = framingCtx->FramingYuv[0].pac_data +

+            framingCtx->FramingYuv[0].u_width * framingCtx->FramingYuv[0].u_height;

+

+        framingCtx->FramingYuv[2].u_width = (framingCtx->FramingRgb->u_width)>>1;

+        framingCtx->FramingYuv[2].u_height = (framingCtx->FramingRgb->u_height)>>1;

+        framingCtx->FramingYuv[2].u_topleft = 0;

+        framingCtx->FramingYuv[2].u_stride = (framingCtx->FramingRgb->u_width)>>1;

+        framingCtx->FramingYuv[2].pac_data = framingCtx->FramingYuv[1].pac_data +

+            framingCtx->FramingYuv[1].u_width * framingCtx->FramingYuv[1].u_height;

+

+        framingCtx->duration = 0;

+        framingCtx->previousClipTime = -1;

+        framingCtx->previewOffsetClipTime = -1;

+

+    }

+    return err;

+}

+

+M4OSA_ERR applyColorEffect(M4xVSS_VideoEffectType colorEffect,

+    M4VIFI_ImagePlane *planeIn, M4VIFI_ImagePlane *planeOut,

+    M4VIFI_UInt8 *buffer1, M4VIFI_UInt8 *buffer2, M4OSA_UInt16 rgbColorData) {

+

+    M4xVSS_ColorStruct colorContext;

+    M4OSA_ERR err = M4NO_ERROR;

+

+    colorContext.colorEffectType = colorEffect;

+    colorContext.rgb16ColorData = rgbColorData;

+

+    err = M4VSS3GPP_externalVideoEffectColor(

+     (M4OSA_Void *)&colorContext, planeIn, planeOut, NULL,

+     colorEffect);

+

+    if(err != M4NO_ERROR) {

+        LOGV("M4VSS3GPP_externalVideoEffectColor(%d) error %d",

+            colorEffect, err);

+

+        if(NULL != buffer1) {

+            M4OSA_free((M4OSA_MemAddr32)buffer1);

+            buffer1 = NULL;

+        }

+        if(NULL != buffer2) {

+            M4OSA_free((M4OSA_MemAddr32)buffer2);

+            buffer2 = NULL;

+        }

+        return err;

+    }

+

+    // The out plane now becomes the in plane for adding other effects

+    swapImagePlanes(planeIn, planeOut, buffer1, buffer2);

+

+    return err;

+}

+

+M4OSA_ERR applyLumaEffect(M4VSS3GPP_VideoEffectType videoEffect,

+    M4VIFI_ImagePlane *planeIn, M4VIFI_ImagePlane *planeOut,

+    M4VIFI_UInt8 *buffer1, M4VIFI_UInt8 *buffer2, M4OSA_Int32 lum_factor) {

+

+    M4OSA_ERR err = M4NO_ERROR;

+

+    err = M4VFL_modifyLumaWithScale(

+         (M4ViComImagePlane*)planeIn,(M4ViComImagePlane*)planeOut,

+         lum_factor, NULL);

+

+    if(err != M4NO_ERROR) {

+        LOGE("M4VFL_modifyLumaWithScale(%d) error %d", videoEffect, err);

+

+        if(NULL != buffer1) {

+            M4OSA_free((M4OSA_MemAddr32)buffer1);

+            buffer1= NULL;

+        }

+        if(NULL != buffer2) {

+            M4OSA_free((M4OSA_MemAddr32)buffer2);

+            buffer2= NULL;

+        }

+        return err;

+    }

+

+    // The out plane now becomes the in plane for adding other effects

+    swapImagePlanes(planeIn, planeOut,(M4VIFI_UInt8 *)buffer1,

+     (M4VIFI_UInt8 *)buffer2);

+

+    return err;

+}

+

+M4OSA_ERR applyCurtainEffect(M4VSS3GPP_VideoEffectType videoEffect,

+    M4VIFI_ImagePlane *planeIn, M4VIFI_ImagePlane *planeOut,

+    M4VIFI_UInt8 *buffer1, M4VIFI_UInt8 *buffer2,

+    M4VFL_CurtainParam* curtainParams) {

+

+    M4OSA_ERR err = M4NO_ERROR;

+

+    // Apply the curtain effect

+    err = M4VFL_applyCurtain( (M4ViComImagePlane*)planeIn,

+          (M4ViComImagePlane*)planeOut, curtainParams, NULL);

+    if(err != M4NO_ERROR) {

+        LOGE("M4VFL_applyCurtain(%d) error %d", videoEffect, err);

+

+        if(NULL != buffer1) {

+            M4OSA_free((M4OSA_MemAddr32)buffer1);

+            buffer1= NULL;

+        }

+        if(NULL != buffer2) {

+            M4OSA_free((M4OSA_MemAddr32)buffer2);

+            buffer2 = NULL;

+        }

+        return err;

+    }

+

+    // The out plane now becomes the in plane for adding other effects

+    swapImagePlanes(planeIn, planeOut,(M4VIFI_UInt8 *)buffer1,

+    (M4VIFI_UInt8 *)buffer2);

+

+    return err;

+}

+

+M4OSA_ERR applyEffectsAndRenderingMode(vePostProcessParams    *params) {

+

+    M4OSA_ERR err = M4NO_ERROR;

+    M4VIFI_ImagePlane planeIn[3], planeOut[3];

+    M4VIFI_UInt8 *finalOutputBuffer = NULL, *tempOutputBuffer= NULL;

+    M4OSA_Double percentageDone =0;

+    M4OSA_Int32 lum_factor;

+    M4VFL_CurtainParam curtainParams;

+    M4VSS3GPP_ExternalProgress extProgress;

+    M4xVSS_FiftiesStruct fiftiesCtx;

+    M4OSA_UInt32 frameSize = 0, i=0;

+

+    frameSize = (params->videoWidth*params->videoHeight*3) >> 1;

+

+    finalOutputBuffer = (M4VIFI_UInt8*)M4OSA_malloc(frameSize, M4VS,

+     (M4OSA_Char*)("lvpp finalOutputBuffer"));

+

+    if(finalOutputBuffer == NULL) {

+        LOGE("applyEffectsAndRenderingMode: malloc error");

+        return M4ERR_ALLOC;

+    }

+

+    // allocate the tempOutputBuffer

+    tempOutputBuffer = (M4VIFI_UInt8*)M4OSA_malloc(

+     ((params->videoHeight*params->videoWidth*3)>>1), M4VS, (M4OSA_Char*)("lvpp colorBuffer"));

+

+    if(tempOutputBuffer == NULL) {

+        LOGE("applyEffectsAndRenderingMode: malloc error tempOutputBuffer");

+        if(NULL != finalOutputBuffer) {

+            M4OSA_free((M4OSA_MemAddr32)finalOutputBuffer);

+            finalOutputBuffer = NULL;

+        }

+        return M4ERR_ALLOC;

+    }

+

+    // Initialize the In plane

+    prepareYUV420ImagePlane(planeIn, params->videoWidth,

+      params->videoHeight, params->vidBuffer);

+

+    // Initialize the Out plane

+    prepareYUV420ImagePlane(planeOut, params->videoWidth,

+      params->videoHeight, (M4VIFI_UInt8 *)tempOutputBuffer);

+

+    // The planeIn contains the YUV420 input data to postprocessing node

+    // and planeOut will contain the YUV420 data with effect

+    // In each successive if condition, apply filter to successive

+    // output YUV frame so that concurrent effects are both applied

+

+    if(params->currentVideoEffect & VIDEO_EFFECT_BLACKANDWHITE) {

+        err = applyColorEffect(M4xVSS_kVideoEffectType_BlackAndWhite,

+              planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,

+              (M4VIFI_UInt8 *)tempOutputBuffer, 0);

+        if(err != M4NO_ERROR) {

+            return err;

+        }

+    }

+

+    if(params->currentVideoEffect & VIDEO_EFFECT_PINK) {

+        err = applyColorEffect(M4xVSS_kVideoEffectType_Pink,

+              planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,

+              (M4VIFI_UInt8 *)tempOutputBuffer, 0);

+        if(err != M4NO_ERROR) {

+            return err;

+        }

+    }

+

+    if(params->currentVideoEffect & VIDEO_EFFECT_GREEN) {

+        err = applyColorEffect(M4xVSS_kVideoEffectType_Green,

+              planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,

+              (M4VIFI_UInt8 *)tempOutputBuffer, 0);

+        if(err != M4NO_ERROR) {

+            return err;

+        }

+    }

+

+    if(params->currentVideoEffect & VIDEO_EFFECT_SEPIA) {

+        err = applyColorEffect(M4xVSS_kVideoEffectType_Sepia,

+              planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,

+              (M4VIFI_UInt8 *)tempOutputBuffer, 0);

+        if(err != M4NO_ERROR) {

+            return err;

+        }

+    }

+

+    if(params->currentVideoEffect & VIDEO_EFFECT_NEGATIVE) {

+        err = applyColorEffect(M4xVSS_kVideoEffectType_Negative,

+              planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,

+              (M4VIFI_UInt8 *)tempOutputBuffer, 0);

+        if(err != M4NO_ERROR) {

+            return err;

+        }

+    }

+

+    if(params->currentVideoEffect & VIDEO_EFFECT_GRADIENT) {

+        // find the effect in effectSettings array

+        for(i=0;i<params->numberEffects;i++) {

+            if(params->effectsSettings[i].VideoEffectType ==

+             M4xVSS_kVideoEffectType_Gradient)

+                break;

+        }

+        err = applyColorEffect(M4xVSS_kVideoEffectType_Gradient,

+              planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,

+              (M4VIFI_UInt8 *)tempOutputBuffer,

+              params->effectsSettings[i].xVSS.uiRgb16InputColor);

+        if(err != M4NO_ERROR) {

+            return err;

+        }

+    }

+

+    if(params->currentVideoEffect & VIDEO_EFFECT_COLOR_RGB16) {

+        // Find the effect in effectSettings array

+        for(i=0;i<params->numberEffects;i++) {

+            if(params->effectsSettings[i].VideoEffectType ==

+             M4xVSS_kVideoEffectType_ColorRGB16)

+                break;

+        }

+        err = applyColorEffect(M4xVSS_kVideoEffectType_ColorRGB16,

+              planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,

+              (M4VIFI_UInt8 *)tempOutputBuffer,

+              params->effectsSettings[i].xVSS.uiRgb16InputColor);

+        if(err != M4NO_ERROR) {

+            return err;

+        }

+    }

+

+    if(params->currentVideoEffect & VIDEO_EFFECT_FIFTIES) {

+        // Find the effect in effectSettings array

+        for(i=0;i<params->numberEffects;i++) {

+            if(params->effectsSettings[i].VideoEffectType ==

+             M4xVSS_kVideoEffectType_Fifties)

+                break;

+        }

+        if(i < params->numberEffects) {

+            computeProgressForVideoEffect(params->timeMs,

+             params->effectsSettings[i].uiStartTime,

+             params->effectsSettings[i].uiDuration, &extProgress);

+

+            if(params->isFiftiesEffectStarted) {

+                fiftiesCtx.previousClipTime = -1;

+            }

+            fiftiesCtx.fiftiesEffectDuration =

+             1000/params->effectsSettings[i].xVSS.uiFiftiesOutFrameRate;

+

+            fiftiesCtx.shiftRandomValue = 0;

+            fiftiesCtx.stripeRandomValue = 0;

+

+            err = M4VSS3GPP_externalVideoEffectFifties(

+             (M4OSA_Void *)&fiftiesCtx, planeIn, planeOut, &extProgress,

+             M4xVSS_kVideoEffectType_Fifties);

+

+            if(err != M4NO_ERROR) {

+                LOGE("M4VSS3GPP_externalVideoEffectFifties error 0x%x", err);

+

+                if(NULL != finalOutputBuffer) {

+                    M4OSA_free((M4OSA_MemAddr32)finalOutputBuffer);

+                    finalOutputBuffer = NULL;

+                }

+                if(NULL != tempOutputBuffer) {

+                    M4OSA_free((M4OSA_MemAddr32)tempOutputBuffer);

+                    tempOutputBuffer = NULL;

+                }

+                return err;

+            }

+

+            // The out plane now becomes the in plane for adding other effects

+            swapImagePlanes(planeIn, planeOut,(M4VIFI_UInt8 *)finalOutputBuffer,

+             (M4VIFI_UInt8 *)tempOutputBuffer);

+        }

+    }

+

+    if(params->currentVideoEffect & VIDEO_EFFECT_FRAMING) {

+

+        M4xVSS_FramingStruct framingCtx;

+        // Find the effect in effectSettings array

+        for(i=0;i<params->numberEffects;i++) {

+            if(params->effectsSettings[i].VideoEffectType ==

+             M4xVSS_kVideoEffectType_Framing) {

+                if((params->effectsSettings[i].uiStartTime <= params->timeMs + params->timeOffset) &&

+                   ((params->effectsSettings[i].uiStartTime+

+                     params->effectsSettings[i].uiDuration) >= params->timeMs + params->timeOffset))

+                {

+                        break;

+                }

+            }

+        }

+        if(i < params->numberEffects) {

+            computeProgressForVideoEffect(params->timeMs,

+             params->effectsSettings[i].uiStartTime,

+             params->effectsSettings[i].uiDuration, &extProgress);

+

+            err = prepareFramingStructure(&framingCtx,

+                  params->effectsSettings, i, params->overlayFrameRGBBuffer,

+                  params->overlayFrameYUVBuffer);

+

+            if(err == M4NO_ERROR) {

+                err = M4VSS3GPP_externalVideoEffectFraming(

+                      (M4OSA_Void *)&framingCtx, planeIn, planeOut, &extProgress,

+                      M4xVSS_kVideoEffectType_Framing);

+            }

+

+            M4OSA_free((M4OSA_MemAddr32)framingCtx.alphaBlendingStruct);

+

+            if(framingCtx.FramingYuv != NULL) {

+                M4OSA_free((M4OSA_MemAddr32)framingCtx.FramingYuv);

+                framingCtx.FramingYuv = NULL;

+            }

+            //If prepareFramingStructure / M4VSS3GPP_externalVideoEffectFraming

+            // returned error, then return from function

+            if(err != M4NO_ERROR) {

+

+                if(NULL != finalOutputBuffer) {

+                    M4OSA_free((M4OSA_MemAddr32)finalOutputBuffer);

+                    finalOutputBuffer = NULL;

+                }

+                if(NULL != tempOutputBuffer) {

+                    M4OSA_free((M4OSA_MemAddr32)tempOutputBuffer);

+                    tempOutputBuffer = NULL;

+                }

+                return err;

+            }

+

+            // The out plane now becomes the in plane for adding other effects

+            swapImagePlanes(planeIn, planeOut,(M4VIFI_UInt8 *)finalOutputBuffer,

+             (M4VIFI_UInt8 *)tempOutputBuffer);

+        }

+    }

+

+    if(params->currentVideoEffect & VIDEO_EFFECT_FADEFROMBLACK) {

+        /* find the effect in effectSettings array*/

+        for(i=0;i<params->numberEffects;i++) {

+            if(params->effectsSettings[i].VideoEffectType ==

+             M4VSS3GPP_kVideoEffectType_FadeFromBlack)

+                break;

+        }

+

+        if(i < params->numberEffects) {

+            computePercentageDone(params->timeMs,

+             params->effectsSettings[i].uiStartTime,

+             params->effectsSettings[i].uiDuration, &percentageDone);

+

+            // Compute where we are in the effect (scale is 0->1024)

+            lum_factor = (M4OSA_Int32)( percentageDone * 1024 );

+            // Apply the darkening effect

+            err = applyLumaEffect(M4VSS3GPP_kVideoEffectType_FadeFromBlack,

+                  planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,

+                  (M4VIFI_UInt8 *)tempOutputBuffer, lum_factor);

+            if(err != M4NO_ERROR) {

+                return err;

+            }

+        }

+    }

+

+    if(params->currentVideoEffect & VIDEO_EFFECT_FADETOBLACK) {

+        // Find the effect in effectSettings array

+        for(i=0;i<params->numberEffects;i++) {

+            if(params->effectsSettings[i].VideoEffectType ==

+             M4VSS3GPP_kVideoEffectType_FadeToBlack)

+                break;

+        }

+        if(i < params->numberEffects) {

+            computePercentageDone(params->timeMs,

+             params->effectsSettings[i].uiStartTime,

+             params->effectsSettings[i].uiDuration, &percentageDone);

+

+            // Compute where we are in the effect (scale is 0->1024)

+            lum_factor = (M4OSA_Int32)( (1.0-percentageDone) * 1024 );

+            // Apply the darkening effect

+            err = applyLumaEffect(M4VSS3GPP_kVideoEffectType_FadeToBlack,

+                  planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,

+                  (M4VIFI_UInt8 *)tempOutputBuffer, lum_factor);

+            if(err != M4NO_ERROR) {

+                return err;

+            }

+        }

+    }

+

+    if(params->currentVideoEffect & VIDEO_EFFECT_CURTAINOPEN) {

+        // Find the effect in effectSettings array

+        for(i=0;i<params->numberEffects;i++) {

+            if(params->effectsSettings[i].VideoEffectType ==

+             M4VSS3GPP_kVideoEffectType_CurtainOpening)

+                break;

+        }

+        if(i < params->numberEffects) {

+            computePercentageDone(params->timeMs,

+             params->effectsSettings[i].uiStartTime,

+             params->effectsSettings[i].uiDuration, &percentageDone);

+

+            // Compute where we are in the effect (scale is 0->height).

+            // It is done with floats because tmp x height

+            // can be very large (with long clips).

+            curtainParams.nb_black_lines =

+             (M4OSA_UInt16)((1.0 - percentageDone) * planeIn[0].u_height );

+            // The curtain is hanged on the ceiling

+            curtainParams.top_is_black = 1;

+

+            // Apply the curtain effect

+            err = applyCurtainEffect(M4VSS3GPP_kVideoEffectType_CurtainOpening,

+                  planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,

+                  (M4VIFI_UInt8 *)tempOutputBuffer, &curtainParams);

+            if(err != M4NO_ERROR) {

+                return err;

+            }

+        }

+    }

+

+    if(params->currentVideoEffect & VIDEO_EFFECT_CURTAINCLOSE) {

+        // Find the effect in effectSettings array

+        for(i=0;i<params->numberEffects;i++) {

+            if(params->effectsSettings[i].VideoEffectType ==

+             M4VSS3GPP_kVideoEffectType_CurtainClosing)

+                break;

+        }

+        if(i < params->numberEffects) {

+            computePercentageDone(params->timeMs,

+             params->effectsSettings[i].uiStartTime,

+             params->effectsSettings[i].uiDuration, &percentageDone);

+

+            // Compute where we are in the effect (scale is 0->height).

+            // It is done with floats because

+            // tmp x height can be very large (with long clips).

+            curtainParams.nb_black_lines =

+             (M4OSA_UInt16)(percentageDone * planeIn[0].u_height );

+

+            // The curtain is hanged on the ceiling

+            curtainParams.top_is_black = 1;

+

+            // Apply the curtain effect

+            err = applyCurtainEffect(M4VSS3GPP_kVideoEffectType_CurtainClosing,

+                  planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,

+                  (M4VIFI_UInt8 *)tempOutputBuffer, &curtainParams);

+            if(err != M4NO_ERROR) {

+                return err;

+            }

+        }

+    }

+

+    LOGV("doMediaRendering CALL getBuffer()");

+    // Set the output YUV420 plane to be compatible with YV12 format

+    // W & H even

+    // YVU instead of YUV

+    // align buffers on 32 bits

+

+    // Y plane

+    //in YV12 format, sizes must be even

+    M4OSA_UInt32 yv12PlaneWidth = ((params->outVideoWidth +1)>>1)<<1;

+    M4OSA_UInt32 yv12PlaneHeight = ((params->outVideoHeight+1)>>1)<<1;

+

+    prepareYV12ImagePlane(planeOut, yv12PlaneWidth, yv12PlaneHeight,

+     (M4OSA_UInt32)params->outBufferStride, (M4VIFI_UInt8 *)params->pOutBuffer);

+

+    err = applyRenderingMode(planeIn, planeOut, params->renderingMode);

+

+    if(M4OSA_NULL != finalOutputBuffer) {

+        M4OSA_free((M4OSA_MemAddr32)finalOutputBuffer);

+        finalOutputBuffer= M4OSA_NULL;

+    }

+    if(M4OSA_NULL != tempOutputBuffer) {

+        M4OSA_free((M4OSA_MemAddr32)tempOutputBuffer);

+        tempOutputBuffer = M4OSA_NULL;

+    }

+    if(err != M4NO_ERROR) {

+        LOGV("doVideoPostProcessing: applyRenderingMode returned err=%d",err);

+        return err;

+    }

+    return M4NO_ERROR;

+}

diff --git a/libvideoeditor/lvpp/VideoEditorTools.h b/libvideoeditor/lvpp/VideoEditorTools.h
new file mode 100755
index 0000000..368047a
--- /dev/null
+++ b/libvideoeditor/lvpp/VideoEditorTools.h
@@ -0,0 +1,145 @@
+/*

+ * Copyright (C) 2011 NXP Software

+ * Copyright (C) 2011 The Android Open Source Project

+ *

+ * Licensed under the Apache License, Version 2.0 (the "License");

+ * you may not use this file except in compliance with the License.

+ * You may obtain a copy of the License at

+ *

+ *      http://www.apache.org/licenses/LICENSE-2.0

+ *

+ * Unless required by applicable law or agreed to in writing, software

+ * distributed under the License is distributed on an "AS IS" BASIS,

+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * See the License for the specific language governing permissions and

+ * limitations under the License.

+ */

+

+#ifndef ANDROID_VE_TOOLS_H

+#define ANDROID_VE_TOOLS_H

+

+#include "M4OSA_Types.h"

+#include "M4OSA_Memory.h"

+#include "M4OSA_Debug.h"

+#include "M4VIFI_FiltersAPI.h"

+/* Macro definitions */

+#include "M4VIFI_Defines.h"

+/* Clip table declaration */

+#include "M4VIFI_Clip.h"

+#include "M4VFL_transition.h"

+#include "M4VSS3GPP_API.h"

+#include "M4xVSS_API.h"

+#include "M4xVSS_Internal.h"

+

+#include "M4AIR_API.h"

+#include "PreviewRenderer.h"

+#define MEDIA_RENDERING_INVALID 255

+

+#define TRANSPARENT_COLOR 0x7E0

+#define LUM_FACTOR_MAX 10

+enum {

+    VIDEO_EFFECT_NONE               = 0,

+    VIDEO_EFFECT_BLACKANDWHITE      = 1,

+    VIDEO_EFFECT_PINK               = 2,

+    VIDEO_EFFECT_GREEN              = 4,

+    VIDEO_EFFECT_SEPIA              = 8,

+    VIDEO_EFFECT_NEGATIVE           = 16,

+    VIDEO_EFFECT_FRAMING            = 32,

+    VIDEO_EFFECT_FIFTIES            = 64,

+    VIDEO_EFFECT_COLOR_RGB16        = 128,

+    VIDEO_EFFECT_GRADIENT           = 256,

+    VIDEO_EFFECT_FADEFROMBLACK      = 512,

+    VIDEO_EFFECT_CURTAINOPEN        = 1024,

+    VIDEO_EFFECT_FADETOBLACK        = 2048,

+    VIDEO_EFFECT_CURTAINCLOSE       = 4096,

+};

+

+typedef struct {

+    M4VIFI_UInt8 *vidBuffer;

+    M4OSA_UInt32 videoWidth;

+    M4OSA_UInt32 videoHeight;

+    M4OSA_UInt32 timeMs;

+    M4OSA_UInt32 timeOffset; //has the duration of clips played.

+                             //The flag shall be used for Framing.

+    M4VSS3GPP_EffectSettings* effectsSettings;

+    M4OSA_UInt32 numberEffects;

+    M4OSA_UInt32 outVideoWidth;

+    M4OSA_UInt32 outVideoHeight;

+    M4OSA_UInt32 currentVideoEffect;

+    M4OSA_Bool isFiftiesEffectStarted;

+    M4xVSS_MediaRendering renderingMode;

+    uint8_t *pOutBuffer;

+    size_t outBufferStride;

+    M4VIFI_UInt8*  overlayFrameRGBBuffer;

+    M4VIFI_UInt8*  overlayFrameYUVBuffer;

+} vePostProcessParams;

+

+M4VIFI_UInt8 M4VIFI_YUV420PlanarToYUV420Semiplanar(void *user_data, M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane *PlaneOut );

+M4VIFI_UInt8 M4VIFI_SemiplanarYUV420toYUV420(void *user_data, M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane *PlaneOut );

+

+M4OSA_ERR M4VSS3GPP_externalVideoEffectColor(M4OSA_Void *pFunctionContext, M4VIFI_ImagePlane *PlaneIn,

+                                                    M4VIFI_ImagePlane *PlaneOut,M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiEffectKind);

+

+M4OSA_ERR M4VSS3GPP_externalVideoEffectFraming( M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn[3], M4VIFI_ImagePlane *PlaneOut, M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiEffectKind );

+

+M4OSA_ERR M4VSS3GPP_externalVideoEffectFifties( M4OSA_Void *pUserData, M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut, M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiEffectKind );

+

+unsigned char M4VFL_modifyLumaWithScale(M4ViComImagePlane *plane_in, M4ViComImagePlane *plane_out, unsigned long lum_factor, void *user_data);

+unsigned char M4VFL_applyCurtain(M4ViComImagePlane *plane_in, M4ViComImagePlane *plane_out, M4VFL_CurtainParam *curtain_factor, void *user_data);

+

+M4OSA_ERR M4xVSS_internalConvertRGBtoYUV(M4xVSS_FramingStruct* framingCtx);

+M4VIFI_UInt8    M4VIFI_xVSS_RGB565toYUV420(void *pUserData, M4VIFI_ImagePlane *pPlaneIn,

+                                                      M4VIFI_ImagePlane *pPlaneOut);

+

+M4OSA_ERR M4xVSS_internalConvertRGB888toYUV(M4xVSS_FramingStruct* framingCtx);

+M4VIFI_UInt8 M4VIFI_RGB888toYUV420(void *pUserData, M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane PlaneOut[3]);

+

+/*+ Handle the image files here */

+M4OSA_ERR LvGetImageThumbNail(const char *fileName, M4OSA_UInt32 height, M4OSA_UInt32 width, M4OSA_Void **pBuffer);

+/*- Handle the image files here */

+

+M4OSA_ERR applyRenderingMode(M4VIFI_ImagePlane* pPlaneIn, M4VIFI_ImagePlane* pPlaneOut, M4xVSS_MediaRendering mediaRendering);

+

+

+M4VIFI_UInt8 M4VIFI_YUV420toYUV420(void *user_data, M4VIFI_ImagePlane PlaneIn[3], M4VIFI_ImagePlane *PlaneOut );

+M4VIFI_UInt8    M4VIFI_ResizeBilinearYUV420toYUV420(void *pUserData,

+                                                                M4VIFI_ImagePlane *pPlaneIn,

+                                                                M4VIFI_ImagePlane *pPlaneOut);

+

+M4OSA_Void prepareYUV420ImagePlane(M4VIFI_ImagePlane *plane,

+    M4OSA_UInt32 width, M4OSA_UInt32 height, M4VIFI_UInt8 *buffer);

+

+M4OSA_Void prepareYV12ImagePlane(M4VIFI_ImagePlane *plane,

+    M4OSA_UInt32 width, M4OSA_UInt32 height, M4OSA_UInt32 stride, M4VIFI_UInt8 *buffer);

+

+M4OSA_Void swapImagePlanes(

+    M4VIFI_ImagePlane *planeIn, M4VIFI_ImagePlane *planeOut,

+    M4VIFI_UInt8 *buffer1, M4VIFI_UInt8 *buffer2);

+

+M4OSA_Void computePercentageDone(

+     M4OSA_UInt32 ctsMs, M4OSA_UInt32 effectStartTimeMs,

+     M4OSA_UInt32 effectDuration, M4OSA_Double *percentageDone);

+

+M4OSA_Void computeProgressForVideoEffect(

+     M4OSA_UInt32 ctsMs, M4OSA_UInt32 effectStartTimeMs,

+     M4OSA_UInt32 effectDuration, M4VSS3GPP_ExternalProgress* extProgress);

+

+M4OSA_ERR prepareFramingStructure(

+    M4xVSS_FramingStruct* framingCtx,

+    M4VSS3GPP_EffectSettings* effectsSettings, M4OSA_UInt32 index,

+    M4VIFI_UInt8* overlayRGB, M4VIFI_UInt8* overlayYUV);

+

+M4OSA_ERR applyColorEffect(M4xVSS_VideoEffectType colorEffect,

+    M4VIFI_ImagePlane *planeIn, M4VIFI_ImagePlane *planeOut,

+    M4VIFI_UInt8 *buffer1, M4VIFI_UInt8 *buffer2, M4OSA_UInt16 rgbColorData);

+

+M4OSA_ERR applyLumaEffect(M4VSS3GPP_VideoEffectType videoEffect,

+    M4VIFI_ImagePlane *planeIn, M4VIFI_ImagePlane *planeOut,

+    M4VIFI_UInt8 *buffer1, M4VIFI_UInt8 *buffer2, M4OSA_Int32 lum_factor);

+

+M4OSA_ERR applyCurtainEffect(M4VSS3GPP_VideoEffectType videoEffect,

+    M4VIFI_ImagePlane *planeIn, M4VIFI_ImagePlane *planeOut,

+    M4VIFI_UInt8 *buffer1, M4VIFI_UInt8 *buffer2, M4VFL_CurtainParam* curtainParams);

+

+M4OSA_ERR applyEffectsAndRenderingMode(vePostProcessParams*        params);

+#endif // ANDROID_VE_TOOLS_H