Make line-ending consistent (unix style).

Change-Id: Id837b042952ff5d97907591f337b2222cff2c8a9
diff --git a/libvideoeditor/lvpp/Android.mk b/libvideoeditor/lvpp/Android.mk
index 116ab29..92db50b 100755
--- a/libvideoeditor/lvpp/Android.mk
+++ b/libvideoeditor/lvpp/Android.mk
@@ -1,105 +1,105 @@
-#

-# Copyright (C) 2011 The Android Open Source Project

-#

-# Licensed under the Apache License, Version 2.0 (the "License");

-# you may not use this file except in compliance with the License.

-# You may obtain a copy of the License at

-#

-#      http://www.apache.org/licenses/LICENSE-2.0

-#

-# Unless required by applicable law or agreed to in writing, software

-# distributed under the License is distributed on an "AS IS" BASIS,

-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

-# See the License for the specific language governing permissions and

-# limitations under the License.

-#

-

-LOCAL_PATH:= $(call my-dir)

-

-#

-# libvideoeditorplayer

-#

-

-include $(CLEAR_VARS)

-

-LOCAL_MODULE:= libvideoeditorplayer

-

-LOCAL_SRC_FILES:=          \

-    VideoEditorTools.cpp \

-    VideoEditorPlayer.cpp \

-    PreviewPlayer.cpp \

-    VideoEditorAudioPlayer.cpp \

-    VideoEditorPreviewController.cpp \

-    VideoEditorSRC.cpp \

-    DummyAudioSource.cpp \

-    DummyVideoSource.cpp \

-    VideoEditorBGAudioProcessing.cpp \

-    AudioPlayerBase.cpp \

-    PreviewPlayerBase.cpp \

-    PreviewRenderer.cpp

-

-LOCAL_MODULE_TAGS := optional

-

-LOCAL_STATIC_LIBRARIES := \

-    libvideoeditor_osal \

-    libstagefright_color_conversion

-

-

-

-LOCAL_SHARED_LIBRARIES := \

-    libbinder          \

-    libutils           \

-    libcutils          \

-    libmedia           \

-    libdrmframework    \

-    libstagefright  \

-    libstagefright_omx  \

-    libstagefright_foundation \

-    libgui \

-    libaudioflinger \

-    libui

-

-

-LOCAL_C_INCLUDES += \

-    $(TOP)/frameworks/base/core/jni \

-    $(TOP)/frameworks/base/include \

-    $(TOP)/frameworks/base/include/media \

-    $(TOP)/frameworks/base/media/libmediaplayerservice \

-    $(TOP)/frameworks/base/media/libstagefright \

-    $(TOP)/frameworks/base/media/libstagefright/include \

-    $(TOP)/frameworks/base/media/libstagefright/rtsp \

-    $(JNI_H_INCLUDE) \

-    $(call include-path-for, corecg graphics) \

-    $(TOP)/frameworks/base/include/media/stagefright/openmax \

-    $(TOP)/frameworks/media/libvideoeditor/osal/inc \

-    $(TOP)/frameworks/media/libvideoeditor/vss/common/inc \

-    $(TOP)/frameworks/media/libvideoeditor/vss/mcs/inc \

-    $(TOP)/frameworks/media/libvideoeditor/vss/inc \

-    $(TOP)/frameworks/media/libvideoeditor/vss/stagefrightshells/inc \

-    $(TOP)/frameworks/media/libvideoeditor/lvpp \

-    $(TOP)/frameworks/base/media/jni/mediaeditor \

-    $(TOP)/frameworks/base/services/audioflinger

-

-

-ifeq ($(TARGET_SIMULATOR),true)

-else

-    LOCAL_SHARED_LIBRARIES += libdl

-endif

-

-# All of the shared libraries we link against.

-LOCAL_LDLIBS := \

-    -lpthread -ldl

-

-LOCAL_CFLAGS += -Wno-multichar \

-     -DM4_ENABLE_RENDERINGMODE \

-    -DUSE_STAGEFRIGHT_CODECS \

-    -DUSE_STAGEFRIGHT_AUDIODEC \

-    -DUSE_STAGEFRIGHT_VIDEODEC \

-    -DUSE_STAGEFRIGHT_AUDIOENC \

-    -DUSE_STAGEFRIGHT_VIDEOENC \

-    -DUSE_STAGEFRIGHT_READERS \

-    -DUSE_STAGEFRIGHT_3GPP_READER

-

-include $(BUILD_SHARED_LIBRARY)

-

-#include $(call all-makefiles-under,$(LOCAL_PATH))

+#
+# Copyright (C) 2011 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+LOCAL_PATH:= $(call my-dir)
+
+#
+# libvideoeditorplayer
+#
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE:= libvideoeditorplayer
+
+LOCAL_SRC_FILES:=          \
+    VideoEditorTools.cpp \
+    VideoEditorPlayer.cpp \
+    PreviewPlayer.cpp \
+    VideoEditorAudioPlayer.cpp \
+    VideoEditorPreviewController.cpp \
+    VideoEditorSRC.cpp \
+    DummyAudioSource.cpp \
+    DummyVideoSource.cpp \
+    VideoEditorBGAudioProcessing.cpp \
+    AudioPlayerBase.cpp \
+    PreviewPlayerBase.cpp \
+    PreviewRenderer.cpp
+
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_STATIC_LIBRARIES := \
+    libvideoeditor_osal \
+    libstagefright_color_conversion
+
+
+
+LOCAL_SHARED_LIBRARIES := \
+    libbinder          \
+    libutils           \
+    libcutils          \
+    libmedia           \
+    libdrmframework    \
+    libstagefright  \
+    libstagefright_omx  \
+    libstagefright_foundation \
+    libgui \
+    libaudioflinger \
+    libui
+
+
+LOCAL_C_INCLUDES += \
+    $(TOP)/frameworks/base/core/jni \
+    $(TOP)/frameworks/base/include \
+    $(TOP)/frameworks/base/include/media \
+    $(TOP)/frameworks/base/media/libmediaplayerservice \
+    $(TOP)/frameworks/base/media/libstagefright \
+    $(TOP)/frameworks/base/media/libstagefright/include \
+    $(TOP)/frameworks/base/media/libstagefright/rtsp \
+    $(JNI_H_INCLUDE) \
+    $(call include-path-for, corecg graphics) \
+    $(TOP)/frameworks/base/include/media/stagefright/openmax \
+    $(TOP)/frameworks/media/libvideoeditor/osal/inc \
+    $(TOP)/frameworks/media/libvideoeditor/vss/common/inc \
+    $(TOP)/frameworks/media/libvideoeditor/vss/mcs/inc \
+    $(TOP)/frameworks/media/libvideoeditor/vss/inc \
+    $(TOP)/frameworks/media/libvideoeditor/vss/stagefrightshells/inc \
+    $(TOP)/frameworks/media/libvideoeditor/lvpp \
+    $(TOP)/frameworks/base/media/jni/mediaeditor \
+    $(TOP)/frameworks/base/services/audioflinger
+
+
+ifeq ($(TARGET_SIMULATOR),true)
+else
+    LOCAL_SHARED_LIBRARIES += libdl
+endif
+
+# All of the shared libraries we link against.
+LOCAL_LDLIBS := \
+    -lpthread -ldl
+
+LOCAL_CFLAGS += -Wno-multichar \
+     -DM4_ENABLE_RENDERINGMODE \
+    -DUSE_STAGEFRIGHT_CODECS \
+    -DUSE_STAGEFRIGHT_AUDIODEC \
+    -DUSE_STAGEFRIGHT_VIDEODEC \
+    -DUSE_STAGEFRIGHT_AUDIOENC \
+    -DUSE_STAGEFRIGHT_VIDEOENC \
+    -DUSE_STAGEFRIGHT_READERS \
+    -DUSE_STAGEFRIGHT_3GPP_READER
+
+include $(BUILD_SHARED_LIBRARY)
+
+#include $(call all-makefiles-under,$(LOCAL_PATH))
diff --git a/libvideoeditor/lvpp/DummyAudioSource.h b/libvideoeditor/lvpp/DummyAudioSource.h
index b329bb4..8715b10 100755
--- a/libvideoeditor/lvpp/DummyAudioSource.h
+++ b/libvideoeditor/lvpp/DummyAudioSource.h
@@ -1,78 +1,78 @@
-/*

- * Copyright (C) 2011 The Android Open Source Project

- *

- * Licensed under the Apache License, Version 2.0 (the "License");

- * you may not use this file except in compliance with the License.

- * You may obtain a copy of the License at

- *

- *      http://www.apache.org/licenses/LICENSE-2.0

- *

- * Unless required by applicable law or agreed to in writing, software

- * distributed under the License is distributed on an "AS IS" BASIS,

- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

- * See the License for the specific language governing permissions and

- * limitations under the License.

- */

-

-#ifndef DUMMY_AUDIOSOURCE_H_

-

-#define DUMMY_AUDIOSOURCE_H_

-

-#include <utils/RefBase.h>

-#include <utils/threads.h>

-#include <media/stagefright/MediaBufferGroup.h>

-#include <media/stagefright/MediaSource.h>

-#include <media/stagefright/DataSource.h>

-

-

-namespace android {

-

-

-class MediaBuffer;

-class MetaData;

-struct MediaBufferGroup;

-

-

-

-struct DummyAudioSource : public MediaSource {

-

-public:

-    static sp<DummyAudioSource> Create(int32_t samplingRate,

-                                       int32_t channelCount,

-                                       int64_t frameDurationUs,

-                                       int64_t audioDurationUs);

-    virtual status_t start(MetaData *params = NULL);

-    virtual status_t stop();

-    virtual sp<MetaData> getFormat();

-    virtual status_t read (MediaBuffer **buffer,

-                            const MediaSource::ReadOptions *options = NULL);

-    void setDuration (int64_t audioDurationUs);

-

-protected:

-    DummyAudioSource (int32_t samplingRate,

-                      int32_t channelCount,

-                      int64_t frameDurationUs,

-                      int64_t audioDurationUs);

-    virtual ~DummyAudioSource();

-

-private:

-    int32_t mSamplingRate;

-    int32_t mChannelCount;

-    int64_t mFrameDurationUs;

-    int32_t mNumberOfSamplePerFrame;

-    int64_t mAudioDurationUs;

-    int64_t mTimeStampUs;

-    Mutex mLock;

-

-    MediaBufferGroup *mBufferGroup;

-

-    DummyAudioSource(const DummyAudioSource &);

-    DummyAudioSource &operator=(const DummyAudioSource &);

-

-};

-

-}//namespace android

-

-

-#endif //DUMMY_AUDIOSOURCE_H_

-

+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef DUMMY_AUDIOSOURCE_H_
+
+#define DUMMY_AUDIOSOURCE_H_
+
+#include <utils/RefBase.h>
+#include <utils/threads.h>
+#include <media/stagefright/MediaBufferGroup.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/DataSource.h>
+
+
+namespace android {
+
+
+class MediaBuffer;
+class MetaData;
+struct MediaBufferGroup;
+
+
+
+struct DummyAudioSource : public MediaSource {
+
+public:
+    static sp<DummyAudioSource> Create(int32_t samplingRate,
+                                       int32_t channelCount,
+                                       int64_t frameDurationUs,
+                                       int64_t audioDurationUs);
+    virtual status_t start(MetaData *params = NULL);
+    virtual status_t stop();
+    virtual sp<MetaData> getFormat();
+    virtual status_t read (MediaBuffer **buffer,
+                            const MediaSource::ReadOptions *options = NULL);
+    void setDuration (int64_t audioDurationUs);
+
+protected:
+    DummyAudioSource (int32_t samplingRate,
+                      int32_t channelCount,
+                      int64_t frameDurationUs,
+                      int64_t audioDurationUs);
+    virtual ~DummyAudioSource();
+
+private:
+    int32_t mSamplingRate;
+    int32_t mChannelCount;
+    int64_t mFrameDurationUs;
+    int32_t mNumberOfSamplePerFrame;
+    int64_t mAudioDurationUs;
+    int64_t mTimeStampUs;
+    Mutex mLock;
+
+    MediaBufferGroup *mBufferGroup;
+
+    DummyAudioSource(const DummyAudioSource &);
+    DummyAudioSource &operator=(const DummyAudioSource &);
+
+};
+
+}//namespace android
+
+
+#endif //DUMMY_AUDIOSOURCE_H_
+
diff --git a/libvideoeditor/lvpp/DummyVideoSource.cpp b/libvideoeditor/lvpp/DummyVideoSource.cpp
index e965575..44be104 100755
--- a/libvideoeditor/lvpp/DummyVideoSource.cpp
+++ b/libvideoeditor/lvpp/DummyVideoSource.cpp
@@ -1,165 +1,165 @@
-/*

- * Copyright (C) 2011 The Android Open Source Project

- *

- * Licensed under the Apache License, Version 2.0 (the "License");

- * you may not use this file except in compliance with the License.

- * You may obtain a copy of the License at

- *

- *      http://www.apache.org/licenses/LICENSE-2.0

- *

- * Unless required by applicable law or agreed to in writing, software

- * distributed under the License is distributed on an "AS IS" BASIS,

- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

- * See the License for the specific language governing permissions and

- * limitations under the License.

- */

-

-#define LOG_NDEBUG 1

-#define LOG_TAG "DummyVideoSource"

-#include "utils/Log.h"

-

-#include <media/stagefright/MediaErrors.h>

-#include <media/stagefright/MediaDebug.h>

-#include <media/stagefright/MediaDefs.h>

-#include <media/stagefright/MediaBuffer.h>

-#include <media/stagefright/MediaBufferGroup.h>

-#include <media/stagefright/MetaData.h>

-

-#include "DummyVideoSource.h"

-

-/* Android includes*/

-#include <utils/Log.h>

-#include <memory.h>

-

-

-#define LOG1 LOGE    /*ERRORS Logging*/

-#define LOG2 LOGV    /*WARNING Logging*/

-#define LOG3 //LOGV    /*COMMENTS Logging*/

-

-

-namespace android {

-

-

-sp<DummyVideoSource> DummyVideoSource::Create (

-            uint32_t width, uint32_t height,

-            uint64_t clipDuration, const char *imageUri) {

-    LOG2("DummyVideoSource::Create ");

-    sp<DummyVideoSource> vSource = new DummyVideoSource (

-                         width, height, clipDuration, imageUri);

-    return vSource;

-}

-

-

-DummyVideoSource::DummyVideoSource (

-            uint32_t width, uint32_t height,

-            uint64_t clipDuration, const char *imageUri) {

-

-    LOG2("DummyVideoSource::DummyVideoSource constructor START");

-    mFrameWidth = width;

-    mFrameHeight = height;

-    mImageClipDuration = clipDuration;

-    mUri = imageUri;

-    mImageBuffer = NULL;

-

-    LOG2("DummyVideoSource::DummyVideoSource constructor END");

-}

-

-

-DummyVideoSource::~DummyVideoSource () {

-    /* Do nothing here? */

-    LOG2("DummyVideoSource::~DummyVideoSource");

-}

-

-

-

-status_t DummyVideoSource::start(MetaData *params) {

-    status_t err = OK;

-    LOG2("DummyVideoSource::start START, %s", mUri);

-    //get the frame buffer from the rgb file and store into a MediaBuffer

-    err = LvGetImageThumbNail((const char *)mUri,

-                          mFrameHeight , mFrameWidth ,

-                          (M4OSA_Void **)&mImageBuffer);

-

-    mIsFirstImageFrame = true;

-    mImageSeekTime = 0;

-    mImagePlayStartTime = 0;

-    mFrameTimeUs = 0;

-    LOG2("DummyVideoSource::start END");

-

-    return err;

-}

-

-

-status_t DummyVideoSource::stop() {

-    status_t err = OK;

-

-    LOG2("DummyVideoSource::stop START");

-    if (mImageBuffer != NULL) {

-        free(mImageBuffer);

-        mImageBuffer = NULL;

-    }

-    LOG2("DummyVideoSource::stop END");

-

-    return err;

-}

-

-

-sp<MetaData> DummyVideoSource::getFormat() {

-    LOG2("DummyVideoSource::getFormat");

-

-    sp<MetaData> meta = new MetaData;

-

-    meta->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420Planar);

-    meta->setInt32(kKeyWidth, mFrameWidth);

-    meta->setInt32(kKeyHeight, mFrameHeight);

-    meta->setInt64(kKeyDuration, mImageClipDuration);

-    meta->setCString(kKeyDecoderComponent, "DummyVideoSource");

-

-    return meta;

-}

-

-status_t DummyVideoSource::read(

-                        MediaBuffer **out,

-                        const MediaSource::ReadOptions *options) {

-    status_t err = OK;

-    MediaBuffer *buffer;

-    LOG2("DummyVideoSource::read START");

-

-    bool seeking = false;

-    int64_t seekTimeUs;

-    ReadOptions::SeekMode seekMode;

-

-    if (options && options->getSeekTo(&seekTimeUs, &seekMode)) {

-        seeking = true;

-        mImageSeekTime = seekTimeUs;

-        M4OSA_clockGetTime(&mImagePlayStartTime, 1000); //1000 time scale for time in ms

-    }

-

-    if ((mImageSeekTime == mImageClipDuration) || (mFrameTimeUs == (int64_t)mImageClipDuration)) {

-        LOG2("DummyVideoSource::read() End of stream reached; return NULL buffer");

-        *out = NULL;

-        return ERROR_END_OF_STREAM;

-    }

-

-    buffer = new MediaBuffer(mImageBuffer, (mFrameWidth*mFrameHeight*1.5));

-

-    //set timestamp of buffer

-    if (mIsFirstImageFrame) {

-        M4OSA_clockGetTime(&mImagePlayStartTime, 1000); //1000 time scale for time in ms

-        mFrameTimeUs =  (mImageSeekTime + 1);

-        LOG2("DummyVideoSource::read() jpg 1st frame timeUs = %lld, begin cut time = %ld", mFrameTimeUs, mImageSeekTime);

-        mIsFirstImageFrame = false;

-    } else {

-        M4OSA_Time  currentTimeMs;

-        M4OSA_clockGetTime(&currentTimeMs, 1000);

-

-        mFrameTimeUs = mImageSeekTime + (currentTimeMs - mImagePlayStartTime)*1000;

-        LOG2("DummyVideoSource::read() jpg frame timeUs = %lld", mFrameTimeUs);

-    }

-    buffer->meta_data()->setInt64(kKeyTime, mFrameTimeUs);

-    buffer->set_range(buffer->range_offset(), mFrameWidth*mFrameHeight*1.5);

-    *out = buffer;

-    return err;

-}

-

-}// namespace android

+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 1
+#define LOG_TAG "DummyVideoSource"
+#include "utils/Log.h"
+
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaBufferGroup.h>
+#include <media/stagefright/MetaData.h>
+
+#include "DummyVideoSource.h"
+
+/* Android includes*/
+#include <utils/Log.h>
+#include <memory.h>
+
+
+#define LOG1 LOGE    /*ERRORS Logging*/
+#define LOG2 LOGV    /*WARNING Logging*/
+#define LOG3 //LOGV    /*COMMENTS Logging*/
+
+
+namespace android {
+
+
+sp<DummyVideoSource> DummyVideoSource::Create (
+            uint32_t width, uint32_t height,
+            uint64_t clipDuration, const char *imageUri) {
+    LOG2("DummyVideoSource::Create ");
+    sp<DummyVideoSource> vSource = new DummyVideoSource (
+                         width, height, clipDuration, imageUri);
+    return vSource;
+}
+
+
+DummyVideoSource::DummyVideoSource (
+            uint32_t width, uint32_t height,
+            uint64_t clipDuration, const char *imageUri) {
+
+    LOG2("DummyVideoSource::DummyVideoSource constructor START");
+    mFrameWidth = width;
+    mFrameHeight = height;
+    mImageClipDuration = clipDuration;
+    mUri = imageUri;
+    mImageBuffer = NULL;
+
+    LOG2("DummyVideoSource::DummyVideoSource constructor END");
+}
+
+
+DummyVideoSource::~DummyVideoSource () {
+    /* Do nothing here? */
+    LOG2("DummyVideoSource::~DummyVideoSource");
+}
+
+
+
+status_t DummyVideoSource::start(MetaData *params) {
+    status_t err = OK;
+    LOG2("DummyVideoSource::start START, %s", mUri);
+    //get the frame buffer from the rgb file and store into a MediaBuffer
+    err = LvGetImageThumbNail((const char *)mUri,
+                          mFrameHeight , mFrameWidth ,
+                          (M4OSA_Void **)&mImageBuffer);
+
+    mIsFirstImageFrame = true;
+    mImageSeekTime = 0;
+    mImagePlayStartTime = 0;
+    mFrameTimeUs = 0;
+    LOG2("DummyVideoSource::start END");
+
+    return err;
+}
+
+
+status_t DummyVideoSource::stop() {
+    status_t err = OK;
+
+    LOG2("DummyVideoSource::stop START");
+    if (mImageBuffer != NULL) {
+        free(mImageBuffer);
+        mImageBuffer = NULL;
+    }
+    LOG2("DummyVideoSource::stop END");
+
+    return err;
+}
+
+
+sp<MetaData> DummyVideoSource::getFormat() {
+    LOG2("DummyVideoSource::getFormat");
+
+    sp<MetaData> meta = new MetaData;
+
+    meta->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420Planar);
+    meta->setInt32(kKeyWidth, mFrameWidth);
+    meta->setInt32(kKeyHeight, mFrameHeight);
+    meta->setInt64(kKeyDuration, mImageClipDuration);
+    meta->setCString(kKeyDecoderComponent, "DummyVideoSource");
+
+    return meta;
+}
+
+status_t DummyVideoSource::read(
+                        MediaBuffer **out,
+                        const MediaSource::ReadOptions *options) {
+    status_t err = OK;
+    MediaBuffer *buffer;
+    LOG2("DummyVideoSource::read START");
+
+    bool seeking = false;
+    int64_t seekTimeUs;
+    ReadOptions::SeekMode seekMode;
+
+    if (options && options->getSeekTo(&seekTimeUs, &seekMode)) {
+        seeking = true;
+        mImageSeekTime = seekTimeUs;
+        M4OSA_clockGetTime(&mImagePlayStartTime, 1000); //1000 time scale for time in ms
+    }
+
+    if ((mImageSeekTime == mImageClipDuration) || (mFrameTimeUs == (int64_t)mImageClipDuration)) {
+        LOG2("DummyVideoSource::read() End of stream reached; return NULL buffer");
+        *out = NULL;
+        return ERROR_END_OF_STREAM;
+    }
+
+    buffer = new MediaBuffer(mImageBuffer, (mFrameWidth*mFrameHeight*1.5));
+
+    //set timestamp of buffer
+    if (mIsFirstImageFrame) {
+        M4OSA_clockGetTime(&mImagePlayStartTime, 1000); //1000 time scale for time in ms
+        mFrameTimeUs =  (mImageSeekTime + 1);
+        LOG2("DummyVideoSource::read() jpg 1st frame timeUs = %lld, begin cut time = %ld", mFrameTimeUs, mImageSeekTime);
+        mIsFirstImageFrame = false;
+    } else {
+        M4OSA_Time  currentTimeMs;
+        M4OSA_clockGetTime(&currentTimeMs, 1000);
+
+        mFrameTimeUs = mImageSeekTime + (currentTimeMs - mImagePlayStartTime)*1000;
+        LOG2("DummyVideoSource::read() jpg frame timeUs = %lld", mFrameTimeUs);
+    }
+    buffer->meta_data()->setInt64(kKeyTime, mFrameTimeUs);
+    buffer->set_range(buffer->range_offset(), mFrameWidth*mFrameHeight*1.5);
+    *out = buffer;
+    return err;
+}
+
+}// namespace android
diff --git a/libvideoeditor/lvpp/DummyVideoSource.h b/libvideoeditor/lvpp/DummyVideoSource.h
index 6fb39c5..28c33c3 100755
--- a/libvideoeditor/lvpp/DummyVideoSource.h
+++ b/libvideoeditor/lvpp/DummyVideoSource.h
@@ -1,78 +1,78 @@
-/*

- * Copyright (C) 2011 The Android Open Source Project

- *

- * Licensed under the Apache License, Version 2.0 (the "License");

- * you may not use this file except in compliance with the License.

- * You may obtain a copy of the License at

- *

- *      http://www.apache.org/licenses/LICENSE-2.0

- *

- * Unless required by applicable law or agreed to in writing, software

- * distributed under the License is distributed on an "AS IS" BASIS,

- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

- * See the License for the specific language governing permissions and

- * limitations under the License.

- */

-

-#ifndef DUMMY_VIDEOSOURCE_H_

-

-#define DUMMY_VIDEOSOURCE_H_

-

-#include <utils/RefBase.h>

-#include <utils/threads.h>

-#include <media/stagefright/MediaSource.h>

-#include <media/stagefright/DataSource.h>

-#include "OMX_IVCommon.h"

-#include "VideoEditorTools.h"

-#include "M4OSA_Clock.h"

-#include "M4OSA_Time.h"

-#include "M4OSA_Types.h"

-

-

-namespace android {

-

-

-class  MediaBuffer;

-class  MetaData;

-

-struct DummyVideoSource : public MediaSource {

-

-public:

-    static sp<DummyVideoSource> Create(uint32_t width, 

-                                       uint32_t height, 

-                                       uint64_t clipDuration, 

-                                       const char *imageUri);

-

-    virtual status_t start(MetaData *params = NULL);

-    virtual status_t stop();

-    virtual sp<MetaData> getFormat();

-    virtual status_t read(MediaBuffer **buffer, 

-                          const MediaSource::ReadOptions *options = NULL);

-

-protected:

-    DummyVideoSource (uint32_t width, uint32_t height, 

-                      uint64_t clipDuration, const char *imageUri);

-    virtual ~DummyVideoSource();

-

-private:

-    uint32_t mFrameWidth;

-    uint32_t mFrameHeight;

-    uint64_t mImageClipDuration;

-    const char *mUri;

-    int64_t mFrameTimeUs;

-    bool mIsFirstImageFrame;

-    void *mImageBuffer;

-    M4OSA_Time mImagePlayStartTime;

-    uint32_t mImageSeekTime;

-

-    DummyVideoSource(const DummyVideoSource &);

-    DummyVideoSource &operator=(const DummyVideoSource &);

-

-};

-

-

-}//namespace android

-

-

-#endif //DUMMY_VIDEOSOURCE_H_

-

+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef DUMMY_VIDEOSOURCE_H_
+
+#define DUMMY_VIDEOSOURCE_H_
+
+#include <utils/RefBase.h>
+#include <utils/threads.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/DataSource.h>
+#include "OMX_IVCommon.h"
+#include "VideoEditorTools.h"
+#include "M4OSA_Clock.h"
+#include "M4OSA_Time.h"
+#include "M4OSA_Types.h"
+
+
+namespace android {
+
+
+class  MediaBuffer;
+class  MetaData;
+
+struct DummyVideoSource : public MediaSource {
+
+public:
+    static sp<DummyVideoSource> Create(uint32_t width, 
+                                       uint32_t height, 
+                                       uint64_t clipDuration, 
+                                       const char *imageUri);
+
+    virtual status_t start(MetaData *params = NULL);
+    virtual status_t stop();
+    virtual sp<MetaData> getFormat();
+    virtual status_t read(MediaBuffer **buffer, 
+                          const MediaSource::ReadOptions *options = NULL);
+
+protected:
+    DummyVideoSource (uint32_t width, uint32_t height, 
+                      uint64_t clipDuration, const char *imageUri);
+    virtual ~DummyVideoSource();
+
+private:
+    uint32_t mFrameWidth;
+    uint32_t mFrameHeight;
+    uint64_t mImageClipDuration;
+    const char *mUri;
+    int64_t mFrameTimeUs;
+    bool mIsFirstImageFrame;
+    void *mImageBuffer;
+    M4OSA_Time mImagePlayStartTime;
+    uint32_t mImageSeekTime;
+
+    DummyVideoSource(const DummyVideoSource &);
+    DummyVideoSource &operator=(const DummyVideoSource &);
+
+};
+
+
+}//namespace android
+
+
+#endif //DUMMY_VIDEOSOURCE_H_
+
diff --git a/libvideoeditor/lvpp/PreviewRenderer.cpp b/libvideoeditor/lvpp/PreviewRenderer.cpp
index 1323ca1..9919ee9 100755
--- a/libvideoeditor/lvpp/PreviewRenderer.cpp
+++ b/libvideoeditor/lvpp/PreviewRenderer.cpp
@@ -1,293 +1,293 @@
-/*

- * Copyright (C) 2011 The Android Open Source Project

- *

- * Licensed under the Apache License, Version 2.0 (the "License");

- * you may not use this file except in compliance with the License.

- * You may obtain a copy of the License at

- *

- *      http://www.apache.org/licenses/LICENSE-2.0

- *

- * Unless required by applicable law or agreed to in writing, software

- * distributed under the License is distributed on an "AS IS" BASIS,

- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

- * See the License for the specific language governing permissions and

- * limitations under the License.

- */

-

-

-#define LOG_NDEBUG 1

-#define LOG_TAG "PreviewRenderer"

-#include <utils/Log.h>

-

-#include "PreviewRenderer.h"

-

-#include <binder/MemoryHeapBase.h>

-#include <binder/MemoryHeapPmem.h>

-#include <media/stagefright/MediaDebug.h>

-#include <surfaceflinger/Surface.h>

-

-namespace android {

-

-PreviewRenderer* PreviewRenderer::CreatePreviewRenderer (OMX_COLOR_FORMATTYPE colorFormat,

-        const sp<Surface> &surface,

-        size_t displayWidth, size_t displayHeight,

-        size_t decodedWidth, size_t decodedHeight,

-        int32_t rotationDegrees) {

-

-        PreviewRenderer* returnCtx =

-            new PreviewRenderer(colorFormat,

-            surface,

-            displayWidth, displayHeight,

-            decodedWidth, decodedHeight,

-            rotationDegrees);

-

-        int result = 0;

-

-        int halFormat;

-        switch (returnCtx->mColorFormat) {

-            case OMX_COLOR_FormatYUV420Planar:

-            {

-                halFormat = HAL_PIXEL_FORMAT_YV12;

-                returnCtx->mYUVMode = None;

-                break;

-            }

-            default:

-                halFormat = HAL_PIXEL_FORMAT_RGB_565;

-

-                returnCtx->mConverter = new ColorConverter(

-                        returnCtx->mColorFormat, OMX_COLOR_Format16bitRGB565);

-                CHECK(returnCtx->mConverter->isValid());

-                break;

-        }

-

-        CHECK(returnCtx->mSurface.get() != NULL);

-        CHECK(returnCtx->mDecodedWidth > 0);

-        CHECK(returnCtx->mDecodedHeight > 0);

-        CHECK(returnCtx->mConverter == NULL || returnCtx->mConverter->isValid());

-

-        result = native_window_set_usage(

-                    returnCtx->mSurface.get(),

-                    GRALLOC_USAGE_SW_READ_NEVER | GRALLOC_USAGE_SW_WRITE_OFTEN

-                    | GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP);

-

-        if ( result == 0 ) {

-            result = native_window_set_buffer_count(returnCtx->mSurface.get(), 3);

-

-            if (result == 0) {

-                result = native_window_set_buffers_geometry(

-                    returnCtx->mSurface.get(), returnCtx->mDecodedWidth, returnCtx->mDecodedHeight,

-                    halFormat);

-                if ( result == 0) {

-                    uint32_t transform;

-                    switch (rotationDegrees) {

-                        case 0: transform = 0; break;

-                        case 90: transform = HAL_TRANSFORM_ROT_90; break;

-                        case 180: transform = HAL_TRANSFORM_ROT_180; break;

-                        case 270: transform = HAL_TRANSFORM_ROT_270; break;

-                        default: transform = 0; break;

-                    }

-                    if (transform) {

-                        result = native_window_set_buffers_transform(

-                                    returnCtx->mSurface.get(), transform);

-

-                    }

-                }

-            }

-        }

-

-        if ( result != 0 )

-        {

-            /* free the ctx */

-            returnCtx->~PreviewRenderer();

-            return NULL;

-        }

-

-        return returnCtx;

-}

-

-PreviewRenderer::PreviewRenderer(

-        OMX_COLOR_FORMATTYPE colorFormat,

-        const sp<Surface> &surface,

-        size_t displayWidth, size_t displayHeight,

-        size_t decodedWidth, size_t decodedHeight,

-        int32_t rotationDegrees)

-    : mColorFormat(colorFormat),

-      mConverter(NULL),

-      mYUVMode(None),

-      mSurface(surface),

-      mDisplayWidth(displayWidth),

-      mDisplayHeight(displayHeight),

-      mDecodedWidth(decodedWidth),

-      mDecodedHeight(decodedHeight) {

-

-    LOGV("input format = %d", mColorFormat);

-    LOGV("display = %d x %d, decoded = %d x %d",

-            mDisplayWidth, mDisplayHeight, mDecodedWidth, mDecodedHeight);

-

-    mDecodedWidth = mDisplayWidth;

-    mDecodedHeight = mDisplayHeight;

-}

-

-PreviewRenderer::~PreviewRenderer() {

-    delete mConverter;

-    mConverter = NULL;

-}

-

-

-//

-// Provides a buffer and associated stride

-// This buffer is allocated by the SurfaceFlinger

-//

-// For optimal display performances, you should :

-// 1) call getBufferYV12()

-// 2) fill the buffer with your data

-// 3) call renderYV12() to take these changes into account

-//

-// For each call to getBufferYV12(), you must also call renderYV12()

-// Expected format in the buffer is YV12 formats (similar to YUV420 planar fromat)

-// for more details on this YV12 cf hardware/libhardware/include/hardware/hardware.h

-//

-void PreviewRenderer::getBufferYV12(uint8_t **data, size_t *stride) {

-    int err = OK;

-    LOGV("getBuffer START");

-

-    if ((err = mSurface->dequeueBuffer(mSurface.get(), &mBuf)) != 0) {

-        LOGW("Surface::dequeueBuffer returned error %d", err);

-        return;

-    }

-

-    CHECK_EQ(0, mSurface->lockBuffer(mSurface.get(), mBuf));

-

-    GraphicBufferMapper &mapper = GraphicBufferMapper::get();

-

-    Rect bounds(mDecodedWidth, mDecodedHeight);

-

-    void *dst;

-    CHECK_EQ(0, mapper.lock(

-                mBuf->handle, GRALLOC_USAGE_SW_WRITE_OFTEN, bounds, &dst));

-    LOGV("Buffer locked");

-

-    *data   = (uint8_t*)dst;

-    *stride = mBuf->stride;

-

-    LOGV("getBuffer END %p %d", dst, mBuf->stride);

-}

-

-

-//

-// Display the content of the buffer provided by last call to getBufferYV12()

-//

-// See getBufferYV12() for details.

-//

-void PreviewRenderer::renderYV12() {

-    LOGV("renderYV12() START");

-    int err = OK;

-

-    GraphicBufferMapper &mapper = GraphicBufferMapper::get();

-

-    if (mBuf!= NULL) {

-        CHECK_EQ(0, mapper.unlock(mBuf->handle));

-

-        if ((err = mSurface->queueBuffer(mSurface.get(), mBuf)) != 0) {

-            LOGW("Surface::queueBuffer returned error %d", err);

-        }

-    }

-    mBuf = NULL;

-    LOGV("renderYV12() END");

-}

-

-

-

-//

-// Display the given data buffer

-// platformPrivate is not used (kept for backwrad compatibility)

-// Please rather use getbuffer() and the other render()functions (with no params)

-// for optimal display

-//

-void PreviewRenderer::render(

-        const void *data, size_t size, void *platformPrivate) {

-    ANativeWindowBuffer *buf;

-    int err;

-

-    if ((err = mSurface->dequeueBuffer(mSurface.get(), &buf)) != 0) {

-        LOGW("Surface::dequeueBuffer returned error %d", err);

-        return;

-    }

-

-    CHECK_EQ(0, mSurface->lockBuffer(mSurface.get(), buf));

-

-    GraphicBufferMapper &mapper = GraphicBufferMapper::get();

-

-    Rect bounds(mDecodedWidth, mDecodedHeight);

-

-    void *dst;

-    CHECK_EQ(0, mapper.lock(

-                buf->handle, GRALLOC_USAGE_SW_WRITE_OFTEN, bounds, &dst));

-    LOGV("Buffer locked");

-

-    if (mConverter) {

-        LOGV("Convert to RGB565");

-        mConverter->convert(data,

-                mDecodedWidth, mDecodedHeight,

-                0,0,mDecodedWidth, mDecodedHeight,

-                dst, mDecodedWidth, mDecodedHeight,

-                0,0,mDecodedWidth, mDecodedHeight);

-    } else if (mYUVMode == None) {

-        // Input and output are both YUV420sp, but the alignment requirements

-        // are different.

-        LOGV("mYUVMode == None %d x %d", mDecodedWidth, mDecodedHeight);

-        size_t srcYStride = mDecodedWidth;

-        const uint8_t *srcY = (const uint8_t *)data;

-        uint8_t *dstY = (uint8_t *)dst;

-        LOGV("srcY =       %p   dstY =       %p", srcY, dstY);

-        LOGV("srcYStride = %d   dstYstride = %d", srcYStride, buf->stride);

-        for (size_t i = 0; i < mDecodedHeight; ++i) {

-            memcpy(dstY, srcY, mDecodedWidth);

-            srcY += srcYStride;

-            dstY += buf->stride;

-        }

-

-        size_t srcUVStride = (mDecodedWidth + 1) / 2;

-        size_t dstUVStride = ALIGN(mDecodedWidth / 2, 32);

-        LOGV("srcUVStride = %d   dstUVStride = %d", srcUVStride, dstUVStride);

-

-        // Copy V

-        // Source buffer is YUV, skip U

-        const uint8_t *srcV = (const uint8_t *)data

-                + mDecodedHeight * mDecodedWidth + (mDecodedHeight * mDecodedWidth)/4;

-        // Destination buffer is YVU

-        uint8_t *dstUV = (uint8_t *)dst

-                + buf->stride*mDecodedHeight;

-        LOGV("srcV =       %p   dstUV =       %p", srcV, dstUV);

-        for (size_t i = 0; i < (mDecodedHeight+1)/2; ++i) {

-            memcpy(dstUV, srcV, mDecodedWidth/2);

-            srcV += srcUVStride;

-            dstUV += dstUVStride;

-        }

-

-

-        // Copy V

-        // Source buffer is YUV, go back to end of Y

-        const uint8_t *srcU = (const uint8_t *)data

-            + mDecodedHeight * mDecodedWidth ;

-        // Destination buffer is YVU

-        // Keep writing after V buffer has been filled, U follows immediately

-        LOGV("srcU =       %p   dstUV =       %p", srcU, dstUV);

-        for (size_t i = 0; i < (mDecodedHeight+1)/2; ++i) {

-            memcpy(dstUV, srcU, mDecodedWidth/2);

-            srcU += srcUVStride;

-            dstUV += dstUVStride;

-        }

-    } else {

-        memcpy(dst, data, size);

-    }

-

-    CHECK_EQ(0, mapper.unlock(buf->handle));

-

-    if ((err = mSurface->queueBuffer(mSurface.get(), buf)) != 0) {

-        LOGW("Surface::queueBuffer returned error %d", err);

-    }

-    buf = NULL;

-}

-

-}  // namespace android

+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#define LOG_NDEBUG 1
+#define LOG_TAG "PreviewRenderer"
+#include <utils/Log.h>
+
+#include "PreviewRenderer.h"
+
+#include <binder/MemoryHeapBase.h>
+#include <binder/MemoryHeapPmem.h>
+#include <media/stagefright/MediaDebug.h>
+#include <surfaceflinger/Surface.h>
+
+namespace android {
+
+PreviewRenderer* PreviewRenderer::CreatePreviewRenderer (OMX_COLOR_FORMATTYPE colorFormat,
+        const sp<Surface> &surface,
+        size_t displayWidth, size_t displayHeight,
+        size_t decodedWidth, size_t decodedHeight,
+        int32_t rotationDegrees) {
+
+        PreviewRenderer* returnCtx =
+            new PreviewRenderer(colorFormat,
+            surface,
+            displayWidth, displayHeight,
+            decodedWidth, decodedHeight,
+            rotationDegrees);
+
+        int result = 0;
+
+        int halFormat;
+        switch (returnCtx->mColorFormat) {
+            case OMX_COLOR_FormatYUV420Planar:
+            {
+                halFormat = HAL_PIXEL_FORMAT_YV12;
+                returnCtx->mYUVMode = None;
+                break;
+            }
+            default:
+                halFormat = HAL_PIXEL_FORMAT_RGB_565;
+
+                returnCtx->mConverter = new ColorConverter(
+                        returnCtx->mColorFormat, OMX_COLOR_Format16bitRGB565);
+                CHECK(returnCtx->mConverter->isValid());
+                break;
+        }
+
+        CHECK(returnCtx->mSurface.get() != NULL);
+        CHECK(returnCtx->mDecodedWidth > 0);
+        CHECK(returnCtx->mDecodedHeight > 0);
+        CHECK(returnCtx->mConverter == NULL || returnCtx->mConverter->isValid());
+
+        result = native_window_set_usage(
+                    returnCtx->mSurface.get(),
+                    GRALLOC_USAGE_SW_READ_NEVER | GRALLOC_USAGE_SW_WRITE_OFTEN
+                    | GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP);
+
+        if ( result == 0 ) {
+            result = native_window_set_buffer_count(returnCtx->mSurface.get(), 3);
+
+            if (result == 0) {
+                result = native_window_set_buffers_geometry(
+                    returnCtx->mSurface.get(), returnCtx->mDecodedWidth, returnCtx->mDecodedHeight,
+                    halFormat);
+                if ( result == 0) {
+                    uint32_t transform;
+                    switch (rotationDegrees) {
+                        case 0: transform = 0; break;
+                        case 90: transform = HAL_TRANSFORM_ROT_90; break;
+                        case 180: transform = HAL_TRANSFORM_ROT_180; break;
+                        case 270: transform = HAL_TRANSFORM_ROT_270; break;
+                        default: transform = 0; break;
+                    }
+                    if (transform) {
+                        result = native_window_set_buffers_transform(
+                                    returnCtx->mSurface.get(), transform);
+
+                    }
+                }
+            }
+        }
+
+        if ( result != 0 )
+        {
+            /* free the ctx */
+            returnCtx->~PreviewRenderer();
+            return NULL;
+        }
+
+        return returnCtx;
+}
+
+PreviewRenderer::PreviewRenderer(
+        OMX_COLOR_FORMATTYPE colorFormat,
+        const sp<Surface> &surface,
+        size_t displayWidth, size_t displayHeight,
+        size_t decodedWidth, size_t decodedHeight,
+        int32_t rotationDegrees)
+    : mColorFormat(colorFormat),
+      mConverter(NULL),
+      mYUVMode(None),
+      mSurface(surface),
+      mDisplayWidth(displayWidth),
+      mDisplayHeight(displayHeight),
+      mDecodedWidth(decodedWidth),
+      mDecodedHeight(decodedHeight) {
+
+    LOGV("input format = %d", mColorFormat);
+    LOGV("display = %d x %d, decoded = %d x %d",
+            mDisplayWidth, mDisplayHeight, mDecodedWidth, mDecodedHeight);
+
+    mDecodedWidth = mDisplayWidth;
+    mDecodedHeight = mDisplayHeight;
+}
+
+PreviewRenderer::~PreviewRenderer() {
+    delete mConverter;
+    mConverter = NULL;
+}
+
+
+//
+// Provides a buffer and associated stride
+// This buffer is allocated by the SurfaceFlinger
+//
+// For optimal display performances, you should :
+// 1) call getBufferYV12()
+// 2) fill the buffer with your data
+// 3) call renderYV12() to take these changes into account
+//
+// For each call to getBufferYV12(), you must also call renderYV12()
+// Expected format in the buffer is YV12 formats (similar to YUV420 planar fromat)
+// for more details on this YV12 cf hardware/libhardware/include/hardware/hardware.h
+//
+void PreviewRenderer::getBufferYV12(uint8_t **data, size_t *stride) {
+    int err = OK;
+    LOGV("getBuffer START");
+
+    if ((err = mSurface->dequeueBuffer(mSurface.get(), &mBuf)) != 0) {
+        LOGW("Surface::dequeueBuffer returned error %d", err);
+        return;
+    }
+
+    CHECK_EQ(0, mSurface->lockBuffer(mSurface.get(), mBuf));
+
+    GraphicBufferMapper &mapper = GraphicBufferMapper::get();
+
+    Rect bounds(mDecodedWidth, mDecodedHeight);
+
+    void *dst;
+    CHECK_EQ(0, mapper.lock(
+                mBuf->handle, GRALLOC_USAGE_SW_WRITE_OFTEN, bounds, &dst));
+    LOGV("Buffer locked");
+
+    *data   = (uint8_t*)dst;
+    *stride = mBuf->stride;
+
+    LOGV("getBuffer END %p %d", dst, mBuf->stride);
+}
+
+
+//
+// Display the content of the buffer provided by last call to getBufferYV12()
+//
+// See getBufferYV12() for details.
+//
+void PreviewRenderer::renderYV12() {
+    LOGV("renderYV12() START");
+    int err = OK;
+
+    GraphicBufferMapper &mapper = GraphicBufferMapper::get();
+
+    if (mBuf!= NULL) {
+        CHECK_EQ(0, mapper.unlock(mBuf->handle));
+
+        if ((err = mSurface->queueBuffer(mSurface.get(), mBuf)) != 0) {
+            LOGW("Surface::queueBuffer returned error %d", err);
+        }
+    }
+    mBuf = NULL;
+    LOGV("renderYV12() END");
+}
+
+
+
+//
+// Display the given data buffer
+// platformPrivate is not used (kept for backwrad compatibility)
+// Please rather use getbuffer() and the other render()functions (with no params)
+// for optimal display
+//
+void PreviewRenderer::render(
+        const void *data, size_t size, void *platformPrivate) {
+    ANativeWindowBuffer *buf;
+    int err;
+
+    if ((err = mSurface->dequeueBuffer(mSurface.get(), &buf)) != 0) {
+        LOGW("Surface::dequeueBuffer returned error %d", err);
+        return;
+    }
+
+    CHECK_EQ(0, mSurface->lockBuffer(mSurface.get(), buf));
+
+    GraphicBufferMapper &mapper = GraphicBufferMapper::get();
+
+    Rect bounds(mDecodedWidth, mDecodedHeight);
+
+    void *dst;
+    CHECK_EQ(0, mapper.lock(
+                buf->handle, GRALLOC_USAGE_SW_WRITE_OFTEN, bounds, &dst));
+    LOGV("Buffer locked");
+
+    if (mConverter) {
+        LOGV("Convert to RGB565");
+        mConverter->convert(data,
+                mDecodedWidth, mDecodedHeight,
+                0,0,mDecodedWidth, mDecodedHeight,
+                dst, mDecodedWidth, mDecodedHeight,
+                0,0,mDecodedWidth, mDecodedHeight);
+    } else if (mYUVMode == None) {
+        // Input and output are both YUV420sp, but the alignment requirements
+        // are different.
+        LOGV("mYUVMode == None %d x %d", mDecodedWidth, mDecodedHeight);
+        size_t srcYStride = mDecodedWidth;
+        const uint8_t *srcY = (const uint8_t *)data;
+        uint8_t *dstY = (uint8_t *)dst;
+        LOGV("srcY =       %p   dstY =       %p", srcY, dstY);
+        LOGV("srcYStride = %d   dstYstride = %d", srcYStride, buf->stride);
+        for (size_t i = 0; i < mDecodedHeight; ++i) {
+            memcpy(dstY, srcY, mDecodedWidth);
+            srcY += srcYStride;
+            dstY += buf->stride;
+        }
+
+        size_t srcUVStride = (mDecodedWidth + 1) / 2;
+        size_t dstUVStride = ALIGN(mDecodedWidth / 2, 32);
+        LOGV("srcUVStride = %d   dstUVStride = %d", srcUVStride, dstUVStride);
+
+        // Copy V
+        // Source buffer is YUV, skip U
+        const uint8_t *srcV = (const uint8_t *)data
+                + mDecodedHeight * mDecodedWidth + (mDecodedHeight * mDecodedWidth)/4;
+        // Destination buffer is YVU
+        uint8_t *dstUV = (uint8_t *)dst
+                + buf->stride*mDecodedHeight;
+        LOGV("srcV =       %p   dstUV =       %p", srcV, dstUV);
+        for (size_t i = 0; i < (mDecodedHeight+1)/2; ++i) {
+            memcpy(dstUV, srcV, mDecodedWidth/2);
+            srcV += srcUVStride;
+            dstUV += dstUVStride;
+        }
+
+
+        // Copy V
+        // Source buffer is YUV, go back to end of Y
+        const uint8_t *srcU = (const uint8_t *)data
+            + mDecodedHeight * mDecodedWidth ;
+        // Destination buffer is YVU
+        // Keep writing after V buffer has been filled, U follows immediately
+        LOGV("srcU =       %p   dstUV =       %p", srcU, dstUV);
+        for (size_t i = 0; i < (mDecodedHeight+1)/2; ++i) {
+            memcpy(dstUV, srcU, mDecodedWidth/2);
+            srcU += srcUVStride;
+            dstUV += dstUVStride;
+        }
+    } else {
+        memcpy(dst, data, size);
+    }
+
+    CHECK_EQ(0, mapper.unlock(buf->handle));
+
+    if ((err = mSurface->queueBuffer(mSurface.get(), buf)) != 0) {
+        LOGW("Surface::queueBuffer returned error %d", err);
+    }
+    buf = NULL;
+}
+
+}  // namespace android
diff --git a/libvideoeditor/lvpp/PreviewRenderer.h b/libvideoeditor/lvpp/PreviewRenderer.h
index 4c9fa40..b215f6d 100755
--- a/libvideoeditor/lvpp/PreviewRenderer.h
+++ b/libvideoeditor/lvpp/PreviewRenderer.h
@@ -1,82 +1,82 @@
-/*

- * Copyright (C) 2011 The Android Open Source Project

- *

- * Licensed under the Apache License, Version 2.0 (the "License");

- * you may not use this file except in compliance with the License.

- * You may obtain a copy of the License at

- *

- *      http://www.apache.org/licenses/LICENSE-2.0

- *

- * Unless required by applicable law or agreed to in writing, software

- * distributed under the License is distributed on an "AS IS" BASIS,

- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

- * See the License for the specific language governing permissions and

- * limitations under the License.

- */

-

-#ifndef PREVIEW_RENDERER_H_

-

-#define PREVIEW_RENDERER_H_

-

-#include <media/stagefright/ColorConverter.h>

-#include <utils/RefBase.h>

-#include <ui/android_native_buffer.h>

-#include <ui/GraphicBufferMapper.h>

-#include "SoftwareRenderer.h"

-

-

-namespace android {

-

-class Surface;

-

-class PreviewRenderer {

-public:

-

-static PreviewRenderer* CreatePreviewRenderer (OMX_COLOR_FORMATTYPE colorFormat,

-        const sp<Surface> &surface,

-        size_t displayWidth, size_t displayHeight,

-        size_t decodedWidth, size_t decodedHeight,

-        int32_t rotationDegrees);

-

-    ~PreviewRenderer();

-

-    void render(

-            const void *data, size_t size, void *platformPrivate);

-

-    void getBufferYV12(uint8_t **data, size_t *stride);

-

-    void renderYV12();

-

-    static size_t ALIGN(size_t x, size_t alignment) {

-        return (x + alignment - 1) & ~(alignment - 1);

-    }

-

-private:

-    PreviewRenderer(

-            OMX_COLOR_FORMATTYPE colorFormat,

-            const sp<Surface> &surface,

-            size_t displayWidth, size_t displayHeight,

-            size_t decodedWidth, size_t decodedHeight,

-            int32_t rotationDegrees);

-    enum YUVMode {

-        None,

-        YUV420ToYUV420sp,

-        YUV420spToYUV420sp,

-    };

-

-    OMX_COLOR_FORMATTYPE mColorFormat;

-    ColorConverter *mConverter;

-    YUVMode mYUVMode;

-    sp<Surface> mSurface;

-    size_t mDisplayWidth, mDisplayHeight;

-    size_t mDecodedWidth, mDecodedHeight;

-

-    ANativeWindowBuffer *mBuf;

-

-    PreviewRenderer(const PreviewRenderer &);

-    PreviewRenderer &operator=(const PreviewRenderer &);

-};

-

-}  // namespace android

-

-#endif  // PREVIEW_RENDERER_H_

+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef PREVIEW_RENDERER_H_
+
+#define PREVIEW_RENDERER_H_
+
+#include <media/stagefright/ColorConverter.h>
+#include <utils/RefBase.h>
+#include <ui/android_native_buffer.h>
+#include <ui/GraphicBufferMapper.h>
+#include "SoftwareRenderer.h"
+
+
+namespace android {
+
+class Surface;
+
+class PreviewRenderer {
+public:
+
+static PreviewRenderer* CreatePreviewRenderer (OMX_COLOR_FORMATTYPE colorFormat,
+        const sp<Surface> &surface,
+        size_t displayWidth, size_t displayHeight,
+        size_t decodedWidth, size_t decodedHeight,
+        int32_t rotationDegrees);
+
+    ~PreviewRenderer();
+
+    void render(
+            const void *data, size_t size, void *platformPrivate);
+
+    void getBufferYV12(uint8_t **data, size_t *stride);
+
+    void renderYV12();
+
+    static size_t ALIGN(size_t x, size_t alignment) {
+        return (x + alignment - 1) & ~(alignment - 1);
+    }
+
+private:
+    PreviewRenderer(
+            OMX_COLOR_FORMATTYPE colorFormat,
+            const sp<Surface> &surface,
+            size_t displayWidth, size_t displayHeight,
+            size_t decodedWidth, size_t decodedHeight,
+            int32_t rotationDegrees);
+    enum YUVMode {
+        None,
+        YUV420ToYUV420sp,
+        YUV420spToYUV420sp,
+    };
+
+    OMX_COLOR_FORMATTYPE mColorFormat;
+    ColorConverter *mConverter;
+    YUVMode mYUVMode;
+    sp<Surface> mSurface;
+    size_t mDisplayWidth, mDisplayHeight;
+    size_t mDecodedWidth, mDecodedHeight;
+
+    ANativeWindowBuffer *mBuf;
+
+    PreviewRenderer(const PreviewRenderer &);
+    PreviewRenderer &operator=(const PreviewRenderer &);
+};
+
+}  // namespace android
+
+#endif  // PREVIEW_RENDERER_H_
diff --git a/libvideoeditor/lvpp/VideoEditorAudioPlayer.h b/libvideoeditor/lvpp/VideoEditorAudioPlayer.h
index b7eaa31..450a70a 100755
--- a/libvideoeditor/lvpp/VideoEditorAudioPlayer.h
+++ b/libvideoeditor/lvpp/VideoEditorAudioPlayer.h
@@ -1,96 +1,96 @@
-/*

- * Copyright (C) 2011 The Android Open Source Project

- *

- * Licensed under the Apache License, Version 2.0 (the "License");

- * you may not use this file except in compliance with the License.

- * You may obtain a copy of the License at

- *

- *      http://www.apache.org/licenses/LICENSE-2.0

- *

- * Unless required by applicable law or agreed to in writing, software

- * distributed under the License is distributed on an "AS IS" BASIS,

- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

- * See the License for the specific language governing permissions and

- * limitations under the License.

- */

-

-#ifndef VE_AUDIO_PLAYER_H_

-

-#define VE_AUDIO_PLAYER_H_

-

-#include <media/MediaPlayerInterface.h>

-#include <media/stagefright/MediaBuffer.h>

-#include <media/stagefright/TimeSource.h>

-#include <utils/threads.h>

-#include "M4xVSS_API.h"

-#include "VideoEditorMain.h"

-#include "M4OSA_FileReader.h"

-#include "VideoEditorBGAudioProcessing.h"

-#include "AudioPlayerBase.h"

-#include "PreviewPlayerBase.h"

-

-namespace android {

-

-class MediaSource;

-class AudioTrack;

-class PreviewPlayer;

-

-

-class VideoEditorAudioPlayer : public AudioPlayerBase {

-public:

-    enum {

-        REACHED_EOS,

-        SEEK_COMPLETE

-    };

-

-    VideoEditorAudioPlayer(const sp<MediaPlayerBase::AudioSink> &audioSink,

-        PreviewPlayerBase *audioObserver = NULL);

-

-    virtual ~VideoEditorAudioPlayer();

-

-    status_t start(bool sourceAlreadyStarted = false);

-    void resume();

-

-    void setAudioMixSettings(M4xVSS_AudioMixingSettings* pAudioMixSettings);

-    void setAudioMixPCMFileHandle(M4OSA_Context pBGAudioPCMFileHandle);

-    void setAudioMixStoryBoardSkimTimeStamp(

-        M4OSA_UInt32 pBGAudioStoryBoardSkimTimeStamp,

-        M4OSA_UInt32 pBGAudioCurrentMediaBeginCutTS,

-        M4OSA_UInt32 pBGAudioCurrentMediaVolumeVal);

-

-    void setObserver(PreviewPlayerBase *observer);

-    void setSource(const sp<MediaSource> &source);

-    sp<MediaSource> getSource();

-

-    bool isStarted();

-private:

-

-    M4xVSS_AudioMixingSettings *mAudioMixSettings;

-    VideoEditorBGAudioProcessing *mAudioProcess;

-

-    M4OSA_Context mBGAudioPCMFileHandle;

-    int64_t mBGAudioPCMFileLength;

-    int64_t mBGAudioPCMFileTrimmedLength;

-    int64_t mBGAudioPCMFileDuration;

-    int64_t mBGAudioPCMFileSeekPoint;

-    int64_t mBGAudioPCMFileOriginalSeekPoint;

-    int64_t mBGAudioStoryBoardSkimTimeStamp;

-    int64_t mBGAudioStoryBoardCurrentMediaBeginCutTS;

-    int64_t mBGAudioStoryBoardCurrentMediaVolumeVal;

-

-    size_t fillBuffer(void *data, size_t size);

-

-    void reset();

-    void setPrimaryTrackVolume(M4OSA_Int16 *data, M4OSA_UInt32 size, M4OSA_Float volLevel);

-

-    static size_t AudioSinkCallback(

-            MediaPlayerBase::AudioSink *audioSink,

-            void *data, size_t size, void *me);

-

-    VideoEditorAudioPlayer(const VideoEditorAudioPlayer &);

-    VideoEditorAudioPlayer &operator=(const VideoEditorAudioPlayer &);

-};

-

-}  // namespace android

-

-#endif  // VE_AUDIO_PLAYER_H_

+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VE_AUDIO_PLAYER_H_
+
+#define VE_AUDIO_PLAYER_H_
+
+#include <media/MediaPlayerInterface.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/TimeSource.h>
+#include <utils/threads.h>
+#include "M4xVSS_API.h"
+#include "VideoEditorMain.h"
+#include "M4OSA_FileReader.h"
+#include "VideoEditorBGAudioProcessing.h"
+#include "AudioPlayerBase.h"
+#include "PreviewPlayerBase.h"
+
+namespace android {
+
+class MediaSource;
+class AudioTrack;
+class PreviewPlayer;
+
+
+class VideoEditorAudioPlayer : public AudioPlayerBase {
+public:
+    enum {
+        REACHED_EOS,
+        SEEK_COMPLETE
+    };
+
+    VideoEditorAudioPlayer(const sp<MediaPlayerBase::AudioSink> &audioSink,
+        PreviewPlayerBase *audioObserver = NULL);
+
+    virtual ~VideoEditorAudioPlayer();
+
+    status_t start(bool sourceAlreadyStarted = false);
+    void resume();
+
+    void setAudioMixSettings(M4xVSS_AudioMixingSettings* pAudioMixSettings);
+    void setAudioMixPCMFileHandle(M4OSA_Context pBGAudioPCMFileHandle);
+    void setAudioMixStoryBoardSkimTimeStamp(
+        M4OSA_UInt32 pBGAudioStoryBoardSkimTimeStamp,
+        M4OSA_UInt32 pBGAudioCurrentMediaBeginCutTS,
+        M4OSA_UInt32 pBGAudioCurrentMediaVolumeVal);
+
+    void setObserver(PreviewPlayerBase *observer);
+    void setSource(const sp<MediaSource> &source);
+    sp<MediaSource> getSource();
+
+    bool isStarted();
+private:
+
+    M4xVSS_AudioMixingSettings *mAudioMixSettings;
+    VideoEditorBGAudioProcessing *mAudioProcess;
+
+    M4OSA_Context mBGAudioPCMFileHandle;
+    int64_t mBGAudioPCMFileLength;
+    int64_t mBGAudioPCMFileTrimmedLength;
+    int64_t mBGAudioPCMFileDuration;
+    int64_t mBGAudioPCMFileSeekPoint;
+    int64_t mBGAudioPCMFileOriginalSeekPoint;
+    int64_t mBGAudioStoryBoardSkimTimeStamp;
+    int64_t mBGAudioStoryBoardCurrentMediaBeginCutTS;
+    int64_t mBGAudioStoryBoardCurrentMediaVolumeVal;
+
+    size_t fillBuffer(void *data, size_t size);
+
+    void reset();
+    void setPrimaryTrackVolume(M4OSA_Int16 *data, M4OSA_UInt32 size, M4OSA_Float volLevel);
+
+    static size_t AudioSinkCallback(
+            MediaPlayerBase::AudioSink *audioSink,
+            void *data, size_t size, void *me);
+
+    VideoEditorAudioPlayer(const VideoEditorAudioPlayer &);
+    VideoEditorAudioPlayer &operator=(const VideoEditorAudioPlayer &);
+};
+
+}  // namespace android
+
+#endif  // VE_AUDIO_PLAYER_H_
diff --git a/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.cpp b/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.cpp
index 80dc9f9..e3bbdf2 100755
--- a/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.cpp
+++ b/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.cpp
@@ -1,315 +1,315 @@
-/*

- * Copyright (C) 2011 The Android Open Source Project

- *

- * Licensed under the Apache License, Version 2.0 (the "License");

- * you may not use this file except in compliance with the License.

- * You may obtain a copy of the License at

- *

- *      http://www.apache.org/licenses/LICENSE-2.0

- *

- * Unless required by applicable law or agreed to in writing, software

- * distributed under the License is distributed on an "AS IS" BASIS,

- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

- * See the License for the specific language governing permissions and

- * limitations under the License.

- */

-

-#define LOG_NDEBUG 1

-#define LOG_TAG "VideoEditorBGAudioProcessing"

-#include <utils/Log.h>

-#include "VideoEditorBGAudioProcessing.h"

-

-namespace android {

-

-VideoEditorBGAudioProcessing::VideoEditorBGAudioProcessing() {

-

-    LOGV("VideoEditorBGAudioProcessing:: Construct  VideoEditorBGAudioProcessing ");

-

-    mAudVolArrIndex = 0;

-    mDoDucking = 0;

-    mDucking_enable = 0;

-    mDucking_lowVolume = 0;

-    mDucking_threshold = 0;

-    mDuckingFactor = 0;

-

-    mBTVolLevel = 0;

-    mPTVolLevel = 0;

-

-    mIsSSRCneeded = 0;

-    mChannelConversion = 0;

-

-    mBTFormat = MONO_16_BIT;

-

-    mInSampleRate = 8000;

-    mOutSampleRate = 16000;

-    mPTChannelCount = 2;

-    mBTChannelCount = 1;

-}

-

-M4OSA_Int32 VideoEditorBGAudioProcessing::veProcessAudioMixNDuck(

-        void *pPTBuffer, void *pBTBuffer, void *pOutBuffer) {

-

-    M4AM_Buffer16* pPrimaryTrack   = (M4AM_Buffer16*)pPTBuffer;

-    M4AM_Buffer16* pBackgroundTrack = (M4AM_Buffer16*)pBTBuffer;

-    M4AM_Buffer16* pMixedOutBuffer  = (M4AM_Buffer16*)pOutBuffer;

-

-    LOGV("VideoEditorBGAudioProcessing::lvProcessAudioMixNDuck \

-        pPTBuffer 0x%x pBTBuffer 0x%x pOutBuffer 0x%x", pPTBuffer,

-        pBTBuffer, pOutBuffer);

-

-    M4OSA_ERR result = M4NO_ERROR;

-    M4OSA_Int16 *pBTMdata1;

-    M4OSA_Int16 *pPTMdata2;

-    M4OSA_UInt32 uiPCMsize;

-

-    // Ducking variable

-    M4OSA_UInt16 loopIndex = 0;

-    M4OSA_Int16 *pPCM16Sample = M4OSA_NULL;

-    M4OSA_Int32 peakDbValue = 0;

-    M4OSA_Int32 previousDbValue = 0;

-    M4OSA_UInt32 i;

-

-    // Output size if same as PT size

-    pMixedOutBuffer->m_bufferSize = pPrimaryTrack->m_bufferSize;

-

-    // Before mixing, we need to have only PT as out buffer

-    memcpy((void *)pMixedOutBuffer->m_dataAddress,

-        (void *)pPrimaryTrack->m_dataAddress, pMixedOutBuffer->m_bufferSize);

-

-    // Initially contains the input primary track

-    pPTMdata2 = (M4OSA_Int16*)pMixedOutBuffer->m_dataAddress;

-    // Contains BG track processed data(like channel conversion etc..

-    pBTMdata1 = (M4OSA_Int16*) pBackgroundTrack->m_dataAddress;

-

-    // Since we need to give sample count and not buffer size

-    uiPCMsize = pMixedOutBuffer->m_bufferSize/2 ;

-

-    if ((mDucking_enable) && (mPTVolLevel != 0.0)) {

-        // LOGI("VideoEditorBGAudioProcessing:: In Ducking analysis ");

-        loopIndex = 0;

-        peakDbValue = 0;

-        previousDbValue = peakDbValue;

-

-        pPCM16Sample = (M4OSA_Int16*)pPrimaryTrack->m_dataAddress;

-

-        while (loopIndex < pPrimaryTrack->m_bufferSize/sizeof(M4OSA_Int16)) {

-            if (pPCM16Sample[loopIndex] >= 0) {

-                peakDbValue = previousDbValue > pPCM16Sample[loopIndex] ?

-                        previousDbValue : pPCM16Sample[loopIndex];

-                previousDbValue = peakDbValue;

-            } else {

-                peakDbValue = previousDbValue > -pPCM16Sample[loopIndex] ?

-                        previousDbValue: -pPCM16Sample[loopIndex];

-                previousDbValue = peakDbValue;

-            }

-            loopIndex++;

-        }

-

-        mAudioVolumeArray[mAudVolArrIndex] = getDecibelSound(peakDbValue);

-

-        LOGV("VideoEditorBGAudioProcessing:: getDecibelSound %d",

-            mAudioVolumeArray[mAudVolArrIndex]);

-

-        // WINDOW_SIZE is 10 by default

-        // Check for threshold is done after 10 cycles

-        if (mAudVolArrIndex >= WINDOW_SIZE - 1) {

-            mDoDucking = isThresholdBreached(mAudioVolumeArray,

-            mAudVolArrIndex,mDucking_threshold );

-            mAudVolArrIndex = 0;

-        } else {

-            mAudVolArrIndex++;

-        }

-

-        //

-        // Below logic controls the mixing weightage

-        // for Background and Primary Tracks

-        // for the duration of window under analysis,

-        // to give fade-out for Background and fade-in for primary

-        // Current fading factor is distributed in equal range over

-        // the defined window size.

-        // For a window size = 25

-        // (500 ms (window under analysis) / 20 ms (sample duration))

-        //

-

-        if (mDoDucking) {

-            if (mDuckingFactor > mDucking_lowVolume) {

-                // FADE OUT BG Track

-                // Increment ducking factor in total steps in factor

-                // of low volume steps to reach low volume level

-                mDuckingFactor -= (mDucking_lowVolume);

-            } else {

-                mDuckingFactor = mDucking_lowVolume;

-            }

-        } else {

-            if (mDuckingFactor < 1.0 ) {

-                // FADE IN BG Track

-                // Increment ducking factor in total steps of

-                // low volume factor to reach orig.volume level

-                mDuckingFactor += (mDucking_lowVolume);

-            } else {

-                mDuckingFactor = 1.0;

-            }

-        }

-    } // end if - mDucking_enable

-

-

-    // Mixing Logic

-

-    LOGV("VideoEditorBGAudioProcessing:: Out of Ducking analysis uiPCMsize\

-        %d %f %f", mDoDucking, mDuckingFactor,mBTVolLevel);

-

-    while (uiPCMsize-- > 0) {

-

-        M4OSA_Int32 temp;

-        // Set vol factor for BT and PT

-        *pBTMdata1 = (M4OSA_Int16)(*pBTMdata1*mBTVolLevel);

-        *pPTMdata2 = (M4OSA_Int16)(*pPTMdata2*mPTVolLevel);

-

-        // Mix the two samples

-        if (mDoDucking) {

-

-            // Duck the BG track to ducking factor value before mixing

-            *pBTMdata1 = (M4OSA_Int16)((*pBTMdata1)*(mDuckingFactor));

-

-            // mix as normal case

-            *pBTMdata1 = (M4OSA_Int16)(*pBTMdata1 /2 + *pPTMdata2 /2);

-        } else {

-

-            *pBTMdata1 = (M4OSA_Int16)((*pBTMdata1)*(mDuckingFactor));

-            *pBTMdata1 = (M4OSA_Int16)(*pBTMdata1 /2 + *pPTMdata2 /2);

-        }

-

-        if (*pBTMdata1 < 0) {

-            temp = -(*pBTMdata1) * 2; // bring to original Amplitude level

-

-            if (temp > 32767) {

-                *pBTMdata1 = -32766; // less then max allowed value

-            } else {

-                *pBTMdata1 = (M4OSA_Int16)(-temp);

-            }

-        } else {

-            temp = (*pBTMdata1) * 2; // bring to original Amplitude level

-            if ( temp > 32768) {

-                *pBTMdata1 = 32767; // less than max allowed value

-            } else {

-                *pBTMdata1 = (M4OSA_Int16)temp;

-            }

-        }

-

-        pBTMdata1++;

-        pPTMdata2++;

-    }

-    //LOGV("VideoEditorBGAudioProcessing:: Copy final out ");

-    memcpy((void *)pMixedOutBuffer->m_dataAddress,

-        (void *)pBackgroundTrack->m_dataAddress,

-        pBackgroundTrack->m_bufferSize);

-

-    LOGV("VideoEditorBGAudioProcessing::lvProcessAudioMixNDuck EXIT");

-    return result;

-}

-

-VideoEditorBGAudioProcessing::~VideoEditorBGAudioProcessing() {

-

-}

-

-M4OSA_Int32 VideoEditorBGAudioProcessing::calculateOutResampleBufSize() {

-

-    // This already takes care of channel count in mBTBuffer.m_bufferSize

-    return (mOutSampleRate / mInSampleRate) * mBTBuffer.m_bufferSize;

-}

-

-void VideoEditorBGAudioProcessing ::veSetAudioProcessingParams(

-        const veAudMixSettings& gInputParams) {

-

-    LOGV("VideoEditorBGAudioProcessing:: ENTER lvSetAudioProcessingParams ");

-    mDucking_enable       = gInputParams.lvInDucking_enable;

-    mDucking_lowVolume    = gInputParams.lvInDucking_lowVolume;

-    mDucking_threshold    = gInputParams.lvInDucking_threshold;

-

-    mPTVolLevel           = gInputParams.lvPTVolLevel;

-    mBTVolLevel           = gInputParams.lvBTVolLevel ;

-

-    mBTChannelCount       = gInputParams.lvBTChannelCount;

-    mPTChannelCount       = gInputParams.lvPTChannelCount;

-

-    mBTFormat             = gInputParams.lvBTFormat;

-

-    mInSampleRate         = gInputParams.lvInSampleRate;

-    mOutSampleRate        = gInputParams.lvOutSampleRate;

-

-    mAudVolArrIndex       = 0;

-    mDoDucking            = 0;

-    mDuckingFactor        = 1.0; // default

-

-    LOGV("VideoEditorBGAudioProcessing::  ducking_enable 0x%x \

-        ducking_lowVolume %f  ducking_threshold %d  fPTVolLevel %f BTVolLevel %f",

-        mDucking_enable, mDucking_lowVolume, mDucking_threshold,

-        mPTVolLevel, mPTVolLevel);

-

-    // Following logc decides if SSRC support is needed for this mixing

-    mIsSSRCneeded = (gInputParams.lvInSampleRate != gInputParams.lvOutSampleRate);

-    if (gInputParams.lvBTChannelCount != gInputParams.lvPTChannelCount){

-        if (gInputParams.lvBTChannelCount == 2){

-            mChannelConversion   = 1; // convert to MONO

-        } else {

-            mChannelConversion   = 2; // Convert to STEREO

-        }

-    } else {

-        mChannelConversion   = 0;

-    }

-    LOGV("VideoEditorBGAudioProcessing:: EXIT veSetAudioProcessingParams ");

-}

-

-

-// Fast way to compute 10 * log(value)

-M4OSA_Int32 VideoEditorBGAudioProcessing::getDecibelSound(M4OSA_UInt32 value) {

-    if (value <= 0 || value > 0x8000) {

-        return 0;

-    } else if (value > 0x4000) { // 32768

-        return 90;

-    } else if (value > 0x2000) { // 16384

-        return 84;

-    } else if (value > 0x1000) { // 8192

-        return 78;

-    } else if (value > 0x0800) { // 4028

-        return 72;

-    } else if (value > 0x0400) { // 2048

-        return 66;

-    } else if (value > 0x0200) { // 1024

-        return 60;

-    } else if (value > 0x0100) { // 512

-        return 54;

-    } else if (value > 0x0080) { // 256

-        return 48;

-    } else if (value > 0x0040) { // 128

-        return 42;

-    } else if (value > 0x0020) { // 64

-        return 36;

-    } else if (value > 0x0010) { // 32

-        return 30;

-    } else if (value > 0x0008) { // 16

-        return 24;

-    } else if (value > 0x0007) { // 8

-        return 24;

-    } else if (value > 0x0003) { // 4

-        return 18;

-    } else if (value > 0x0001) { // 2

-        return 12;

-    } else  { // 1

-        return 6;

-    }

-}

-

-M4OSA_Bool VideoEditorBGAudioProcessing::isThresholdBreached(

-        M4OSA_Int32* averageValue,

-        M4OSA_Int32 storeCount,

-        M4OSA_Int32 thresholdValue) {

-

-    int totalValue = 0;

-    for (int i = 0; i < storeCount; ++i) {

-        totalValue += averageValue[i];

-    }

-    return (totalValue / storeCount > thresholdValue);

-}

-

-}//namespace android

+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 1
+#define LOG_TAG "VideoEditorBGAudioProcessing"
+#include <utils/Log.h>
+#include "VideoEditorBGAudioProcessing.h"
+
+namespace android {
+
+VideoEditorBGAudioProcessing::VideoEditorBGAudioProcessing() {
+
+    LOGV("VideoEditorBGAudioProcessing:: Construct  VideoEditorBGAudioProcessing ");
+
+    mAudVolArrIndex = 0;
+    mDoDucking = 0;
+    mDucking_enable = 0;
+    mDucking_lowVolume = 0;
+    mDucking_threshold = 0;
+    mDuckingFactor = 0;
+
+    mBTVolLevel = 0;
+    mPTVolLevel = 0;
+
+    mIsSSRCneeded = 0;
+    mChannelConversion = 0;
+
+    mBTFormat = MONO_16_BIT;
+
+    mInSampleRate = 8000;
+    mOutSampleRate = 16000;
+    mPTChannelCount = 2;
+    mBTChannelCount = 1;
+}
+
+M4OSA_Int32 VideoEditorBGAudioProcessing::veProcessAudioMixNDuck(
+        void *pPTBuffer, void *pBTBuffer, void *pOutBuffer) {
+
+    M4AM_Buffer16* pPrimaryTrack   = (M4AM_Buffer16*)pPTBuffer;
+    M4AM_Buffer16* pBackgroundTrack = (M4AM_Buffer16*)pBTBuffer;
+    M4AM_Buffer16* pMixedOutBuffer  = (M4AM_Buffer16*)pOutBuffer;
+
+    LOGV("VideoEditorBGAudioProcessing::lvProcessAudioMixNDuck \
+        pPTBuffer 0x%x pBTBuffer 0x%x pOutBuffer 0x%x", pPTBuffer,
+        pBTBuffer, pOutBuffer);
+
+    M4OSA_ERR result = M4NO_ERROR;
+    M4OSA_Int16 *pBTMdata1;
+    M4OSA_Int16 *pPTMdata2;
+    M4OSA_UInt32 uiPCMsize;
+
+    // Ducking variable
+    M4OSA_UInt16 loopIndex = 0;
+    M4OSA_Int16 *pPCM16Sample = M4OSA_NULL;
+    M4OSA_Int32 peakDbValue = 0;
+    M4OSA_Int32 previousDbValue = 0;
+    M4OSA_UInt32 i;
+
+    // Output size if same as PT size
+    pMixedOutBuffer->m_bufferSize = pPrimaryTrack->m_bufferSize;
+
+    // Before mixing, we need to have only PT as out buffer
+    memcpy((void *)pMixedOutBuffer->m_dataAddress,
+        (void *)pPrimaryTrack->m_dataAddress, pMixedOutBuffer->m_bufferSize);
+
+    // Initially contains the input primary track
+    pPTMdata2 = (M4OSA_Int16*)pMixedOutBuffer->m_dataAddress;
+    // Contains BG track processed data(like channel conversion etc..
+    pBTMdata1 = (M4OSA_Int16*) pBackgroundTrack->m_dataAddress;
+
+    // Since we need to give sample count and not buffer size
+    uiPCMsize = pMixedOutBuffer->m_bufferSize/2 ;
+
+    if ((mDucking_enable) && (mPTVolLevel != 0.0)) {
+        // LOGI("VideoEditorBGAudioProcessing:: In Ducking analysis ");
+        loopIndex = 0;
+        peakDbValue = 0;
+        previousDbValue = peakDbValue;
+
+        pPCM16Sample = (M4OSA_Int16*)pPrimaryTrack->m_dataAddress;
+
+        while (loopIndex < pPrimaryTrack->m_bufferSize/sizeof(M4OSA_Int16)) {
+            if (pPCM16Sample[loopIndex] >= 0) {
+                peakDbValue = previousDbValue > pPCM16Sample[loopIndex] ?
+                        previousDbValue : pPCM16Sample[loopIndex];
+                previousDbValue = peakDbValue;
+            } else {
+                peakDbValue = previousDbValue > -pPCM16Sample[loopIndex] ?
+                        previousDbValue: -pPCM16Sample[loopIndex];
+                previousDbValue = peakDbValue;
+            }
+            loopIndex++;
+        }
+
+        mAudioVolumeArray[mAudVolArrIndex] = getDecibelSound(peakDbValue);
+
+        LOGV("VideoEditorBGAudioProcessing:: getDecibelSound %d",
+            mAudioVolumeArray[mAudVolArrIndex]);
+
+        // WINDOW_SIZE is 10 by default
+        // Check for threshold is done after 10 cycles
+        if (mAudVolArrIndex >= WINDOW_SIZE - 1) {
+            mDoDucking = isThresholdBreached(mAudioVolumeArray,
+            mAudVolArrIndex,mDucking_threshold );
+            mAudVolArrIndex = 0;
+        } else {
+            mAudVolArrIndex++;
+        }
+
+        //
+        // Below logic controls the mixing weightage
+        // for Background and Primary Tracks
+        // for the duration of window under analysis,
+        // to give fade-out for Background and fade-in for primary
+        // Current fading factor is distributed in equal range over
+        // the defined window size.
+        // For a window size = 25
+        // (500 ms (window under analysis) / 20 ms (sample duration))
+        //
+
+        if (mDoDucking) {
+            if (mDuckingFactor > mDucking_lowVolume) {
+                // FADE OUT BG Track
+                // Increment ducking factor in total steps in factor
+                // of low volume steps to reach low volume level
+                mDuckingFactor -= (mDucking_lowVolume);
+            } else {
+                mDuckingFactor = mDucking_lowVolume;
+            }
+        } else {
+            if (mDuckingFactor < 1.0 ) {
+                // FADE IN BG Track
+                // Increment ducking factor in total steps of
+                // low volume factor to reach orig.volume level
+                mDuckingFactor += (mDucking_lowVolume);
+            } else {
+                mDuckingFactor = 1.0;
+            }
+        }
+    } // end if - mDucking_enable
+
+
+    // Mixing Logic
+
+    LOGV("VideoEditorBGAudioProcessing:: Out of Ducking analysis uiPCMsize\
+        %d %f %f", mDoDucking, mDuckingFactor,mBTVolLevel);
+
+    while (uiPCMsize-- > 0) {
+
+        M4OSA_Int32 temp;
+        // Set vol factor for BT and PT
+        *pBTMdata1 = (M4OSA_Int16)(*pBTMdata1*mBTVolLevel);
+        *pPTMdata2 = (M4OSA_Int16)(*pPTMdata2*mPTVolLevel);
+
+        // Mix the two samples
+        if (mDoDucking) {
+
+            // Duck the BG track to ducking factor value before mixing
+            *pBTMdata1 = (M4OSA_Int16)((*pBTMdata1)*(mDuckingFactor));
+
+            // mix as normal case
+            *pBTMdata1 = (M4OSA_Int16)(*pBTMdata1 /2 + *pPTMdata2 /2);
+        } else {
+
+            *pBTMdata1 = (M4OSA_Int16)((*pBTMdata1)*(mDuckingFactor));
+            *pBTMdata1 = (M4OSA_Int16)(*pBTMdata1 /2 + *pPTMdata2 /2);
+        }
+
+        if (*pBTMdata1 < 0) {
+            temp = -(*pBTMdata1) * 2; // bring to original Amplitude level
+
+            if (temp > 32767) {
+                *pBTMdata1 = -32766; // less then max allowed value
+            } else {
+                *pBTMdata1 = (M4OSA_Int16)(-temp);
+            }
+        } else {
+            temp = (*pBTMdata1) * 2; // bring to original Amplitude level
+            if ( temp > 32768) {
+                *pBTMdata1 = 32767; // less than max allowed value
+            } else {
+                *pBTMdata1 = (M4OSA_Int16)temp;
+            }
+        }
+
+        pBTMdata1++;
+        pPTMdata2++;
+    }
+    //LOGV("VideoEditorBGAudioProcessing:: Copy final out ");
+    memcpy((void *)pMixedOutBuffer->m_dataAddress,
+        (void *)pBackgroundTrack->m_dataAddress,
+        pBackgroundTrack->m_bufferSize);
+
+    LOGV("VideoEditorBGAudioProcessing::lvProcessAudioMixNDuck EXIT");
+    return result;
+}
+
+VideoEditorBGAudioProcessing::~VideoEditorBGAudioProcessing() {
+
+}
+
+M4OSA_Int32 VideoEditorBGAudioProcessing::calculateOutResampleBufSize() {
+
+    // This already takes care of channel count in mBTBuffer.m_bufferSize
+    return (mOutSampleRate / mInSampleRate) * mBTBuffer.m_bufferSize;
+}
+
+void VideoEditorBGAudioProcessing ::veSetAudioProcessingParams(
+        const veAudMixSettings& gInputParams) {
+
+    LOGV("VideoEditorBGAudioProcessing:: ENTER lvSetAudioProcessingParams ");
+    mDucking_enable       = gInputParams.lvInDucking_enable;
+    mDucking_lowVolume    = gInputParams.lvInDucking_lowVolume;
+    mDucking_threshold    = gInputParams.lvInDucking_threshold;
+
+    mPTVolLevel           = gInputParams.lvPTVolLevel;
+    mBTVolLevel           = gInputParams.lvBTVolLevel ;
+
+    mBTChannelCount       = gInputParams.lvBTChannelCount;
+    mPTChannelCount       = gInputParams.lvPTChannelCount;
+
+    mBTFormat             = gInputParams.lvBTFormat;
+
+    mInSampleRate         = gInputParams.lvInSampleRate;
+    mOutSampleRate        = gInputParams.lvOutSampleRate;
+
+    mAudVolArrIndex       = 0;
+    mDoDucking            = 0;
+    mDuckingFactor        = 1.0; // default
+
+    LOGV("VideoEditorBGAudioProcessing::  ducking_enable 0x%x \
+        ducking_lowVolume %f  ducking_threshold %d  fPTVolLevel %f BTVolLevel %f",
+        mDucking_enable, mDucking_lowVolume, mDucking_threshold,
+        mPTVolLevel, mPTVolLevel);
+
+    // Following logc decides if SSRC support is needed for this mixing
+    mIsSSRCneeded = (gInputParams.lvInSampleRate != gInputParams.lvOutSampleRate);
+    if (gInputParams.lvBTChannelCount != gInputParams.lvPTChannelCount){
+        if (gInputParams.lvBTChannelCount == 2){
+            mChannelConversion   = 1; // convert to MONO
+        } else {
+            mChannelConversion   = 2; // Convert to STEREO
+        }
+    } else {
+        mChannelConversion   = 0;
+    }
+    LOGV("VideoEditorBGAudioProcessing:: EXIT veSetAudioProcessingParams ");
+}
+
+
+// Fast way to compute 10 * log(value)
+M4OSA_Int32 VideoEditorBGAudioProcessing::getDecibelSound(M4OSA_UInt32 value) {
+    if (value <= 0 || value > 0x8000) {
+        return 0;
+    } else if (value > 0x4000) { // 32768
+        return 90;
+    } else if (value > 0x2000) { // 16384
+        return 84;
+    } else if (value > 0x1000) { // 8192
+        return 78;
+    } else if (value > 0x0800) { // 4028
+        return 72;
+    } else if (value > 0x0400) { // 2048
+        return 66;
+    } else if (value > 0x0200) { // 1024
+        return 60;
+    } else if (value > 0x0100) { // 512
+        return 54;
+    } else if (value > 0x0080) { // 256
+        return 48;
+    } else if (value > 0x0040) { // 128
+        return 42;
+    } else if (value > 0x0020) { // 64
+        return 36;
+    } else if (value > 0x0010) { // 32
+        return 30;
+    } else if (value > 0x0008) { // 16
+        return 24;
+    } else if (value > 0x0007) { // 8
+        return 24;
+    } else if (value > 0x0003) { // 4
+        return 18;
+    } else if (value > 0x0001) { // 2
+        return 12;
+    } else  { // 1
+        return 6;
+    }
+}
+
+M4OSA_Bool VideoEditorBGAudioProcessing::isThresholdBreached(
+        M4OSA_Int32* averageValue,
+        M4OSA_Int32 storeCount,
+        M4OSA_Int32 thresholdValue) {
+
+    int totalValue = 0;
+    for (int i = 0; i < storeCount; ++i) {
+        totalValue += averageValue[i];
+    }
+    return (totalValue / storeCount > thresholdValue);
+}
+
+}//namespace android
diff --git a/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.h b/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.h
index c3a565e..0798fc1 100755
--- a/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.h
+++ b/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.h
@@ -1,94 +1,94 @@
-/*

- * Copyright (C) 2011 The Android Open Source Project

- *

- * Licensed under the Apache License, Version 2.0 (the "License");

- * you may not use this file except in compliance with the License.

- * You may obtain a copy of the License at

- *

- *      http://www.apache.org/licenses/LICENSE-2.0

- *

- * Unless required by applicable law or agreed to in writing, software

- * distributed under the License is distributed on an "AS IS" BASIS,

- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

- * See the License for the specific language governing permissions and

- * limitations under the License.

- */

-

-#include "M4OSA_Error.h"

-#include "M4OSA_Types.h"

-#include "M4OSA_Memory.h"

-#include "M4OSA_Export.h"

-#include "M4OSA_CoreID.h"

-

-namespace android{

-

-#define WINDOW_SIZE 10

-

-enum veAudioFormat {MONO_16_BIT, STEREO_16_BIT};

-

-

-typedef struct {

-    M4OSA_UInt16*   m_dataAddress; // Android SRC needs a Int16 pointer

-    M4OSA_UInt32    m_bufferSize;

-} M4AM_Buffer16;    // Structure contains Int16_t pointer

-

-// Following struct will be used by app to supply the PT and BT properties

-// along with ducking values

-typedef struct {

-    M4OSA_Int32 lvInSampleRate; // Sampling audio freq (8000,16000 or more )

-    M4OSA_Int32 lvOutSampleRate; //Sampling audio freq (8000,16000 or more )

-    veAudioFormat lvBTFormat;

-

-    M4OSA_Int32 lvInDucking_threshold;

-    M4OSA_Float lvInDucking_lowVolume;

-    M4OSA_Bool lvInDucking_enable;

-    M4OSA_Float lvPTVolLevel;

-    M4OSA_Float lvBTVolLevel;

-    M4OSA_Int32 lvBTChannelCount;

-    M4OSA_Int32 lvPTChannelCount;

-} veAudMixSettings;

-

-// This class is defined to get SF SRC access

-class VideoEditorBGAudioProcessing {

-public:

-    VideoEditorBGAudioProcessing();

-    void veSetAudioProcessingParams(const veAudMixSettings& mixParams);

-

-    M4OSA_Int32 veProcessAudioMixNDuck(

-                    void* primaryTrackBuffer,

-                    void* backgroundTrackBuffer,

-                    void* mixedOutputBuffer);

-

-    ~VideoEditorBGAudioProcessing();

-

-private:

-    M4OSA_Int32 mInSampleRate;

-    M4OSA_Int32 mOutSampleRate;

-    veAudioFormat mBTFormat;

-

-    M4OSA_Bool mIsSSRCneeded;

-    M4OSA_Int32 mBTChannelCount;

-    M4OSA_Int32 mPTChannelCount;

-    M4OSA_UInt8 mChannelConversion;

-

-    M4OSA_UInt32 mDucking_threshold;

-    M4OSA_Float mDucking_lowVolume;

-    M4OSA_Float mDuckingFactor ;

-    M4OSA_Bool mDucking_enable;

-    M4OSA_Int32 mAudioVolumeArray[WINDOW_SIZE];

-    M4OSA_Int32 mAudVolArrIndex;

-    M4OSA_Bool mDoDucking;

-    M4OSA_Float mPTVolLevel;

-    M4OSA_Float mBTVolLevel;

-

-    M4AM_Buffer16 mBTBuffer;

-

-    M4OSA_Int32 getDecibelSound(M4OSA_UInt32 value);

-    M4OSA_Bool  isThresholdBreached(M4OSA_Int32* averageValue,

-                    M4OSA_Int32 storeCount, M4OSA_Int32 thresholdValue);

-

-    // This returns the size of buffer which needs to allocated

-    // before resampling is called

-    M4OSA_Int32 calculateOutResampleBufSize();

-};

-} // namespace android

+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "M4OSA_Error.h"
+#include "M4OSA_Types.h"
+#include "M4OSA_Memory.h"
+#include "M4OSA_Export.h"
+#include "M4OSA_CoreID.h"
+
+namespace android{
+
+#define WINDOW_SIZE 10
+
+enum veAudioFormat {MONO_16_BIT, STEREO_16_BIT};
+
+
+typedef struct {
+    M4OSA_UInt16*   m_dataAddress; // Android SRC needs a Int16 pointer
+    M4OSA_UInt32    m_bufferSize;
+} M4AM_Buffer16;    // Structure contains Int16_t pointer
+
+// Following struct will be used by app to supply the PT and BT properties
+// along with ducking values
+typedef struct {
+    M4OSA_Int32 lvInSampleRate; // Sampling audio freq (8000,16000 or more )
+    M4OSA_Int32 lvOutSampleRate; //Sampling audio freq (8000,16000 or more )
+    veAudioFormat lvBTFormat;
+
+    M4OSA_Int32 lvInDucking_threshold;
+    M4OSA_Float lvInDucking_lowVolume;
+    M4OSA_Bool lvInDucking_enable;
+    M4OSA_Float lvPTVolLevel;
+    M4OSA_Float lvBTVolLevel;
+    M4OSA_Int32 lvBTChannelCount;
+    M4OSA_Int32 lvPTChannelCount;
+} veAudMixSettings;
+
+// This class is defined to get SF SRC access
+class VideoEditorBGAudioProcessing {
+public:
+    VideoEditorBGAudioProcessing();
+    void veSetAudioProcessingParams(const veAudMixSettings& mixParams);
+
+    M4OSA_Int32 veProcessAudioMixNDuck(
+                    void* primaryTrackBuffer,
+                    void* backgroundTrackBuffer,
+                    void* mixedOutputBuffer);
+
+    ~VideoEditorBGAudioProcessing();
+
+private:
+    M4OSA_Int32 mInSampleRate;
+    M4OSA_Int32 mOutSampleRate;
+    veAudioFormat mBTFormat;
+
+    M4OSA_Bool mIsSSRCneeded;
+    M4OSA_Int32 mBTChannelCount;
+    M4OSA_Int32 mPTChannelCount;
+    M4OSA_UInt8 mChannelConversion;
+
+    M4OSA_UInt32 mDucking_threshold;
+    M4OSA_Float mDucking_lowVolume;
+    M4OSA_Float mDuckingFactor ;
+    M4OSA_Bool mDucking_enable;
+    M4OSA_Int32 mAudioVolumeArray[WINDOW_SIZE];
+    M4OSA_Int32 mAudVolArrIndex;
+    M4OSA_Bool mDoDucking;
+    M4OSA_Float mPTVolLevel;
+    M4OSA_Float mBTVolLevel;
+
+    M4AM_Buffer16 mBTBuffer;
+
+    M4OSA_Int32 getDecibelSound(M4OSA_UInt32 value);
+    M4OSA_Bool  isThresholdBreached(M4OSA_Int32* averageValue,
+                    M4OSA_Int32 storeCount, M4OSA_Int32 thresholdValue);
+
+    // This returns the size of buffer which needs to allocated
+    // before resampling is called
+    M4OSA_Int32 calculateOutResampleBufSize();
+};
+} // namespace android
diff --git a/libvideoeditor/lvpp/VideoEditorPlayer.cpp b/libvideoeditor/lvpp/VideoEditorPlayer.cpp
index 452b32d..2af0e8c 100755
--- a/libvideoeditor/lvpp/VideoEditorPlayer.cpp
+++ b/libvideoeditor/lvpp/VideoEditorPlayer.cpp
@@ -1,163 +1,163 @@
-/*

- * Copyright (C) 2011 The Android Open Source Project

- *

- * Licensed under the Apache License, Version 2.0 (the "License");

- * you may not use this file except in compliance with the License.

- * You may obtain a copy of the License at

- *

- *      http://www.apache.org/licenses/LICENSE-2.0

- *

- * Unless required by applicable law or agreed to in writing, software

- * distributed under the License is distributed on an "AS IS" BASIS,

- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

- * See the License for the specific language governing permissions and

- * limitations under the License.

- */

-

-#define LOG_NDEBUG 1

-#define LOG_TAG "VideoEditorPlayer"

-#include <utils/Log.h>

-

-#include "VideoEditorPlayer.h"

-#include "PreviewPlayer.h"

-

-#include <media/Metadata.h>

-#include <media/stagefright/MediaExtractor.h>

+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 1
+#define LOG_TAG "VideoEditorPlayer"
+#include <utils/Log.h>
+
+#include "VideoEditorPlayer.h"
+#include "PreviewPlayer.h"
+
+#include <media/Metadata.h>
+#include <media/stagefright/MediaExtractor.h>
 
 #include <system/audio.h>
-

-namespace android {

-

-VideoEditorPlayer::VideoEditorPlayer()

-    : mPlayer(new PreviewPlayer) {

-

-    LOGV("VideoEditorPlayer");

-    mPlayer->setListener(this);

-}

-

-VideoEditorPlayer::~VideoEditorPlayer() {

-    LOGV("~VideoEditorPlayer");

-

-    reset();

-    mVeAudioSink.clear();

-

-    delete mPlayer;

-    mPlayer = NULL;

-}

-

-status_t VideoEditorPlayer::initCheck() {

-    LOGV("initCheck");

-    return OK;

-}

-

-

-status_t VideoEditorPlayer::setAudioPlayer(VideoEditorAudioPlayer *audioPlayer) {

-    return mPlayer->setAudioPlayer(audioPlayer);

-}

-

-

-status_t VideoEditorPlayer::setDataSource(

-        const char *url, const KeyedVector<String8, String8> *headers) {

-    LOGI("setDataSource('%s')", url);

-

-    return mPlayer->setDataSource(url, headers);

-}

-

-//We donot use this in preview, dummy implimentation as this is pure virtual

-status_t VideoEditorPlayer::setDataSource(int fd, int64_t offset,

-    int64_t length) {

-    LOGE("setDataSource(%d, %lld, %lld) Not supported", fd, offset, length);

-    return (!OK);

-}

-

-status_t VideoEditorPlayer::setVideoSurface(const sp<Surface> &surface) {

-    LOGV("setVideoSurface");

-

-    mPlayer->setSurface(surface);

-    return OK;

-}

-

-status_t VideoEditorPlayer::setVideoSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture) {

-    LOGV("setVideoSurfaceTexture");

-

-    mPlayer->setSurfaceTexture(surfaceTexture);

-    return OK;

-}

-

-status_t VideoEditorPlayer::prepare() {

-    LOGV("prepare");

-    return mPlayer->prepare();

-}

-

-status_t VideoEditorPlayer::prepareAsync() {

-    return mPlayer->prepareAsync();

-}

-

-status_t VideoEditorPlayer::start() {

-    LOGV("start");

-    return mPlayer->play();

-}

-

-status_t VideoEditorPlayer::stop() {

-    LOGV("stop");

-    return pause();

-}

-

-status_t VideoEditorPlayer::pause() {

-    LOGV("pause");

-    return mPlayer->pause();

-}

-

-bool VideoEditorPlayer::isPlaying() {

-    LOGV("isPlaying");

-    return mPlayer->isPlaying();

-}

-

-status_t VideoEditorPlayer::seekTo(int msec) {

-    LOGV("seekTo");

-    status_t err = mPlayer->seekTo((int64_t)msec * 1000);

-    return err;

-}

-

-status_t VideoEditorPlayer::getCurrentPosition(int *msec) {

-    LOGV("getCurrentPosition");

-    int64_t positionUs;

-    status_t err = mPlayer->getPosition(&positionUs);

-

-    if (err != OK) {

-        return err;

-    }

-

-    *msec = (positionUs + 500) / 1000;

-    return OK;

-}

-

-status_t VideoEditorPlayer::getDuration(int *msec) {

-    LOGV("getDuration");

-

-    int64_t durationUs;

-    status_t err = mPlayer->getDuration(&durationUs);

-

-    if (err != OK) {

-        *msec = 0;

-        return OK;

-    }

-

-    *msec = (durationUs + 500) / 1000;

-    return OK;

-}

-

-status_t VideoEditorPlayer::reset() {

-    LOGV("reset");

-    mPlayer->reset();

-    return OK;

-}

-

-status_t VideoEditorPlayer::setLooping(int loop) {

-    LOGV("setLooping");

-    return mPlayer->setLooping(loop);

-}

-

+
+namespace android {
+
+VideoEditorPlayer::VideoEditorPlayer()
+    : mPlayer(new PreviewPlayer) {
+
+    LOGV("VideoEditorPlayer");
+    mPlayer->setListener(this);
+}
+
+VideoEditorPlayer::~VideoEditorPlayer() {
+    LOGV("~VideoEditorPlayer");
+
+    reset();
+    mVeAudioSink.clear();
+
+    delete mPlayer;
+    mPlayer = NULL;
+}
+
+status_t VideoEditorPlayer::initCheck() {
+    LOGV("initCheck");
+    return OK;
+}
+
+
+status_t VideoEditorPlayer::setAudioPlayer(VideoEditorAudioPlayer *audioPlayer) {
+    return mPlayer->setAudioPlayer(audioPlayer);
+}
+
+
+status_t VideoEditorPlayer::setDataSource(
+        const char *url, const KeyedVector<String8, String8> *headers) {
+    LOGI("setDataSource('%s')", url);
+
+    return mPlayer->setDataSource(url, headers);
+}
+
+//We donot use this in preview, dummy implimentation as this is pure virtual
+status_t VideoEditorPlayer::setDataSource(int fd, int64_t offset,
+    int64_t length) {
+    LOGE("setDataSource(%d, %lld, %lld) Not supported", fd, offset, length);
+    return (!OK);
+}
+
+status_t VideoEditorPlayer::setVideoSurface(const sp<Surface> &surface) {
+    LOGV("setVideoSurface");
+
+    mPlayer->setSurface(surface);
+    return OK;
+}
+
+status_t VideoEditorPlayer::setVideoSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture) {
+    LOGV("setVideoSurfaceTexture");
+
+    mPlayer->setSurfaceTexture(surfaceTexture);
+    return OK;
+}
+
+status_t VideoEditorPlayer::prepare() {
+    LOGV("prepare");
+    return mPlayer->prepare();
+}
+
+status_t VideoEditorPlayer::prepareAsync() {
+    return mPlayer->prepareAsync();
+}
+
+status_t VideoEditorPlayer::start() {
+    LOGV("start");
+    return mPlayer->play();
+}
+
+status_t VideoEditorPlayer::stop() {
+    LOGV("stop");
+    return pause();
+}
+
+status_t VideoEditorPlayer::pause() {
+    LOGV("pause");
+    return mPlayer->pause();
+}
+
+bool VideoEditorPlayer::isPlaying() {
+    LOGV("isPlaying");
+    return mPlayer->isPlaying();
+}
+
+status_t VideoEditorPlayer::seekTo(int msec) {
+    LOGV("seekTo");
+    status_t err = mPlayer->seekTo((int64_t)msec * 1000);
+    return err;
+}
+
+status_t VideoEditorPlayer::getCurrentPosition(int *msec) {
+    LOGV("getCurrentPosition");
+    int64_t positionUs;
+    status_t err = mPlayer->getPosition(&positionUs);
+
+    if (err != OK) {
+        return err;
+    }
+
+    *msec = (positionUs + 500) / 1000;
+    return OK;
+}
+
+status_t VideoEditorPlayer::getDuration(int *msec) {
+    LOGV("getDuration");
+
+    int64_t durationUs;
+    status_t err = mPlayer->getDuration(&durationUs);
+
+    if (err != OK) {
+        *msec = 0;
+        return OK;
+    }
+
+    *msec = (durationUs + 500) / 1000;
+    return OK;
+}
+
+status_t VideoEditorPlayer::reset() {
+    LOGV("reset");
+    mPlayer->reset();
+    return OK;
+}
+
+status_t VideoEditorPlayer::setLooping(int loop) {
+    LOGV("setLooping");
+    return mPlayer->setLooping(loop);
+}
+
 status_t VideoEditorPlayer::setParameter(int key, const Parcel &request) {
     LOGV("setParameter");
     return mPlayer->setParameter(key, request);
@@ -168,404 +168,404 @@
     return mPlayer->getParameter(key, reply);
 }
 
-player_type VideoEditorPlayer::playerType() {

-    LOGV("playerType");

-    return STAGEFRIGHT_PLAYER;

-}

-

-status_t VideoEditorPlayer::suspend() {

-    LOGV("suspend");

-    return mPlayer->suspend();

-}

-

-status_t VideoEditorPlayer::resume() {

-    LOGV("resume");

-    return mPlayer->resume();

+player_type VideoEditorPlayer::playerType() {
+    LOGV("playerType");
+    return STAGEFRIGHT_PLAYER;
+}
+
+status_t VideoEditorPlayer::suspend() {
+    LOGV("suspend");
+    return mPlayer->suspend();
+}
+
+status_t VideoEditorPlayer::resume() {
+    LOGV("resume");
+    return mPlayer->resume();
 }
 
 void VideoEditorPlayer::acquireLock() {
     LOGV("acquireLock");
     mPlayer->acquireLock();
 }
-

-void VideoEditorPlayer::releaseLock() {

-    LOGV("releaseLock");

-    mPlayer->releaseLock();

-}

-

-status_t VideoEditorPlayer::invoke(const Parcel &request, Parcel *reply) {

-    return INVALID_OPERATION;

-}

-

-void VideoEditorPlayer::setAudioSink(const sp<AudioSink> &audioSink) {

-    MediaPlayerInterface::setAudioSink(audioSink);

-

-    mPlayer->setAudioSink(audioSink);

-}

-

-status_t VideoEditorPlayer::getMetadata(

-        const media::Metadata::Filter& ids, Parcel *records) {

-    using media::Metadata;

-

-    uint32_t flags = mPlayer->flags();

-

-    Metadata metadata(records);

-

-    metadata.appendBool(

-            Metadata::kPauseAvailable,

-            flags & MediaExtractor::CAN_PAUSE);

-

-    metadata.appendBool(

-            Metadata::kSeekBackwardAvailable,

-            flags & MediaExtractor::CAN_SEEK_BACKWARD);

-

-    metadata.appendBool(

-            Metadata::kSeekForwardAvailable,

-            flags & MediaExtractor::CAN_SEEK_FORWARD);

-

-    metadata.appendBool(

-            Metadata::kSeekAvailable,

-            flags & MediaExtractor::CAN_SEEK);

-

-    return OK;

-}

-

-status_t VideoEditorPlayer::loadEffectsSettings(

-    M4VSS3GPP_EffectSettings* pEffectSettings, int nEffects) {

-    LOGV("loadEffectsSettings");

-    return mPlayer->loadEffectsSettings(pEffectSettings, nEffects);

-}

-

-status_t VideoEditorPlayer::loadAudioMixSettings(

-    M4xVSS_AudioMixingSettings* pAudioMixSettings) {

-    LOGV("VideoEditorPlayer: loadAudioMixSettings");

-    return mPlayer->loadAudioMixSettings(pAudioMixSettings);

-}

-

-status_t VideoEditorPlayer::setAudioMixPCMFileHandle(

-    M4OSA_Context pAudioMixPCMFileHandle) {

-

-    LOGV("VideoEditorPlayer: loadAudioMixSettings");

-    return mPlayer->setAudioMixPCMFileHandle(pAudioMixPCMFileHandle);

-}

-

-status_t VideoEditorPlayer::setAudioMixStoryBoardParam(

-    M4OSA_UInt32 audioMixStoryBoardTS,

-    M4OSA_UInt32 currentMediaBeginCutTime,

-    M4OSA_UInt32 primaryTrackVolValue) {

-

-    LOGV("VideoEditorPlayer: loadAudioMixSettings");

-    return mPlayer->setAudioMixStoryBoardParam(audioMixStoryBoardTS,

-     currentMediaBeginCutTime, primaryTrackVolValue);

-}

-

-status_t VideoEditorPlayer::setPlaybackBeginTime(uint32_t msec) {

-    LOGV("setPlaybackBeginTime");

-    return mPlayer->setPlaybackBeginTime(msec);

-}

-

-status_t VideoEditorPlayer::setPlaybackEndTime(uint32_t msec) {

-    LOGV("setPlaybackEndTime");

-    return mPlayer->setPlaybackEndTime(msec);

-}

-

-status_t VideoEditorPlayer::setStoryboardStartTime(uint32_t msec) {

-    LOGV("setStoryboardStartTime");

-    return mPlayer->setStoryboardStartTime(msec);

-}

-

-status_t VideoEditorPlayer::setProgressCallbackInterval(uint32_t cbInterval) {

-    LOGV("setProgressCallbackInterval");

-    return mPlayer->setProgressCallbackInterval(cbInterval);

-}

-

-status_t VideoEditorPlayer::setMediaRenderingMode(

-    M4xVSS_MediaRendering mode,

-    M4VIDEOEDITING_VideoFrameSize outputVideoSize) {

-

-    LOGV("setMediaRenderingMode");

-    return mPlayer->setMediaRenderingMode(mode, outputVideoSize);

-}

-

-status_t VideoEditorPlayer::resetJniCallbackTimeStamp() {

-    LOGV("resetJniCallbackTimeStamp");

-    return mPlayer->resetJniCallbackTimeStamp();

-}

-

-status_t VideoEditorPlayer::setImageClipProperties(

-    uint32_t width, uint32_t height) {

-    return mPlayer->setImageClipProperties(width, height);

-}

-

-status_t VideoEditorPlayer::readFirstVideoFrame() {

-    return mPlayer->readFirstVideoFrame();

-}

-

-status_t VideoEditorPlayer::getLastRenderedTimeMs(uint32_t *lastRenderedTimeMs) {

-    mPlayer->getLastRenderedTimeMs(lastRenderedTimeMs);

-    return NO_ERROR;

-}

-

-/* Implementation of AudioSink interface */

-#undef LOG_TAG

-#define LOG_TAG "VeAudioSink"

-

-int VideoEditorPlayer::VeAudioOutput::mMinBufferCount = 4;

-bool VideoEditorPlayer::VeAudioOutput::mIsOnEmulator = false;

-

-VideoEditorPlayer::VeAudioOutput::VeAudioOutput()

-    : mCallback(NULL),

-      mCallbackCookie(NULL) {

-    mTrack = 0;

-    mStreamType = AUDIO_STREAM_MUSIC;

-    mLeftVolume = 1.0;

-    mRightVolume = 1.0;

-    mLatency = 0;

-    mMsecsPerFrame = 0;

-    mNumFramesWritten = 0;

-    setMinBufferCount();

-}

-

-VideoEditorPlayer::VeAudioOutput::~VeAudioOutput() {

-    close();

-}

-

-void VideoEditorPlayer::VeAudioOutput::setMinBufferCount() {

-

-    mIsOnEmulator = false;

-    mMinBufferCount = 4;

-}

-

-bool VideoEditorPlayer::VeAudioOutput::isOnEmulator() {

-

-    setMinBufferCount();

-    return mIsOnEmulator;

-}

-

-int VideoEditorPlayer::VeAudioOutput::getMinBufferCount() {

-

-    setMinBufferCount();

-    return mMinBufferCount;

-}

-

-ssize_t VideoEditorPlayer::VeAudioOutput::bufferSize() const {

-

-    if (mTrack == 0) return NO_INIT;

-    return mTrack->frameCount() * frameSize();

-}

-

-ssize_t VideoEditorPlayer::VeAudioOutput::frameCount() const {

-

-    if (mTrack == 0) return NO_INIT;

-    return mTrack->frameCount();

-}

-

-ssize_t VideoEditorPlayer::VeAudioOutput::channelCount() const

-{

-    if (mTrack == 0) return NO_INIT;

-    return mTrack->channelCount();

-}

-

-ssize_t VideoEditorPlayer::VeAudioOutput::frameSize() const

-{

-    if (mTrack == 0) return NO_INIT;

-    return mTrack->frameSize();

-}

-

-uint32_t VideoEditorPlayer::VeAudioOutput::latency () const

-{

-    return mLatency;

-}

-

-float VideoEditorPlayer::VeAudioOutput::msecsPerFrame() const

-{

-    return mMsecsPerFrame;

-}

-

-status_t VideoEditorPlayer::VeAudioOutput::getPosition(uint32_t *position) {

-

-    if (mTrack == 0) return NO_INIT;

-    return mTrack->getPosition(position);

-}

-

-status_t VideoEditorPlayer::VeAudioOutput::open(

-        uint32_t sampleRate, int channelCount, int format, int bufferCount,

-        AudioCallback cb, void *cookie) {

-

-    mCallback = cb;

-    mCallbackCookie = cookie;

-

-    // Check argument "bufferCount" against the mininum buffer count

-    if (bufferCount < mMinBufferCount) {

-        LOGV("bufferCount (%d) is too small and increased to %d",

-            bufferCount, mMinBufferCount);

-        bufferCount = mMinBufferCount;

-

-    }

-    LOGV("open(%u, %d, %d, %d)", sampleRate, channelCount, format, bufferCount);

-    if (mTrack) close();

-    int afSampleRate;

-    int afFrameCount;

-    int frameCount;

-

-    if (AudioSystem::getOutputFrameCount(&afFrameCount, mStreamType) !=

-     NO_ERROR) {

-        return NO_INIT;

-    }

-    if (AudioSystem::getOutputSamplingRate(&afSampleRate, mStreamType) !=

-     NO_ERROR) {

-        return NO_INIT;

-    }

-

-    frameCount = (sampleRate*afFrameCount*bufferCount)/afSampleRate;

-

-    AudioTrack *t;

-    if (mCallback != NULL) {

-        t = new AudioTrack(

-                mStreamType,

-                sampleRate,

-                format,

-                (channelCount == 2) ?

-                 AUDIO_CHANNEL_OUT_STEREO : AUDIO_CHANNEL_OUT_MONO,

-                frameCount,

-                0 /* flags */,

-                CallbackWrapper,

-                this);

-    } else {

-        t = new AudioTrack(

-                mStreamType,

-                sampleRate,

-                format,

-                (channelCount == 2) ?

-                 AUDIO_CHANNEL_OUT_STEREO : AUDIO_CHANNEL_OUT_MONO,

-                frameCount);

-    }

-

-    if ((t == 0) || (t->initCheck() != NO_ERROR)) {

-        LOGE("Unable to create audio track");

-        delete t;

-        return NO_INIT;

-    }

-

-    LOGV("setVolume");

-    t->setVolume(mLeftVolume, mRightVolume);

-    mMsecsPerFrame = 1.e3 / (float) sampleRate;

-    mLatency = t->latency();

-    mTrack = t;

-    return NO_ERROR;

-}

-

-void VideoEditorPlayer::VeAudioOutput::start() {

-

-    LOGV("start");

-    if (mTrack) {

-        mTrack->setVolume(mLeftVolume, mRightVolume);

-        mTrack->start();

-        mTrack->getPosition(&mNumFramesWritten);

-    }

-}

-

-void VideoEditorPlayer::VeAudioOutput::snoopWrite(

-    const void* buffer, size_t size) {

-    // Visualization buffers not supported

-    return;

-

-}

-

-ssize_t VideoEditorPlayer::VeAudioOutput::write(

-     const void* buffer, size_t size) {

-

-    LOG_FATAL_IF(mCallback != NULL, "Don't call write if supplying a callback.");

-

-    //LOGV("write(%p, %u)", buffer, size);

-    if (mTrack) {

-        snoopWrite(buffer, size);

-        ssize_t ret = mTrack->write(buffer, size);

-        mNumFramesWritten += ret / 4; // assume 16 bit stereo

-        return ret;

-    }

-    return NO_INIT;

-}

-

-void VideoEditorPlayer::VeAudioOutput::stop() {

-

-    LOGV("stop");

-    if (mTrack) mTrack->stop();

-}

-

-void VideoEditorPlayer::VeAudioOutput::flush() {

-

-    LOGV("flush");

-    if (mTrack) mTrack->flush();

-}

-

-void VideoEditorPlayer::VeAudioOutput::pause() {

-

-    LOGV("VeAudioOutput::pause");

-    if (mTrack) mTrack->pause();

-}

-

-void VideoEditorPlayer::VeAudioOutput::close() {

-

-    LOGV("close");

-    delete mTrack;

-    mTrack = 0;

-}

-

-void VideoEditorPlayer::VeAudioOutput::setVolume(float left, float right) {

-

-    LOGV("setVolume(%f, %f)", left, right);

-    mLeftVolume = left;

-    mRightVolume = right;

-    if (mTrack) {

-        mTrack->setVolume(left, right);

-    }

-}

-

-// static

-void VideoEditorPlayer::VeAudioOutput::CallbackWrapper(

-        int event, void *cookie, void *info) {

-    //LOGV("VeAudioOutput::callbackwrapper");

-    if (event != AudioTrack::EVENT_MORE_DATA) {

-        return;

-    }

-

-    VeAudioOutput *me = (VeAudioOutput *)cookie;

-    AudioTrack::Buffer *buffer = (AudioTrack::Buffer *)info;

-

-    size_t actualSize = (*me->mCallback)(

-            me, buffer->raw, buffer->size, me->mCallbackCookie);

-

-    buffer->size = actualSize;

-

-    if (actualSize > 0) {

-        me->snoopWrite(buffer->raw, actualSize);

-    }

-}

-

-status_t VideoEditorPlayer::VeAudioOutput::dump(int fd, const Vector<String16>& args) const

-{

-    const size_t SIZE = 256;

-    char buffer[SIZE];

-    String8 result;

-

-    result.append(" VeAudioOutput\n");

-    snprintf(buffer, SIZE-1, "  stream type(%d), left - right volume(%f, %f)\n",

-            mStreamType, mLeftVolume, mRightVolume);

-    result.append(buffer);

-    snprintf(buffer, SIZE-1, "  msec per frame(%f), latency (%d)\n",

-            mMsecsPerFrame, mLatency);

-    result.append(buffer);

-    ::write(fd, result.string(), result.size());

-    if (mTrack != 0) {

-        mTrack->dump(fd, args);

-    }

-    return NO_ERROR;

-}

-

-int VideoEditorPlayer::VeAudioOutput::getSessionId() {

-

-    return mSessionId;

-}

-

-}  // namespace android

+
+void VideoEditorPlayer::releaseLock() {
+    LOGV("releaseLock");
+    mPlayer->releaseLock();
+}
+
+status_t VideoEditorPlayer::invoke(const Parcel &request, Parcel *reply) {
+    return INVALID_OPERATION;
+}
+
+void VideoEditorPlayer::setAudioSink(const sp<AudioSink> &audioSink) {
+    MediaPlayerInterface::setAudioSink(audioSink);
+
+    mPlayer->setAudioSink(audioSink);
+}
+
+status_t VideoEditorPlayer::getMetadata(
+        const media::Metadata::Filter& ids, Parcel *records) {
+    using media::Metadata;
+
+    uint32_t flags = mPlayer->flags();
+
+    Metadata metadata(records);
+
+    metadata.appendBool(
+            Metadata::kPauseAvailable,
+            flags & MediaExtractor::CAN_PAUSE);
+
+    metadata.appendBool(
+            Metadata::kSeekBackwardAvailable,
+            flags & MediaExtractor::CAN_SEEK_BACKWARD);
+
+    metadata.appendBool(
+            Metadata::kSeekForwardAvailable,
+            flags & MediaExtractor::CAN_SEEK_FORWARD);
+
+    metadata.appendBool(
+            Metadata::kSeekAvailable,
+            flags & MediaExtractor::CAN_SEEK);
+
+    return OK;
+}
+
+status_t VideoEditorPlayer::loadEffectsSettings(
+    M4VSS3GPP_EffectSettings* pEffectSettings, int nEffects) {
+    LOGV("loadEffectsSettings");
+    return mPlayer->loadEffectsSettings(pEffectSettings, nEffects);
+}
+
+status_t VideoEditorPlayer::loadAudioMixSettings(
+    M4xVSS_AudioMixingSettings* pAudioMixSettings) {
+    LOGV("VideoEditorPlayer: loadAudioMixSettings");
+    return mPlayer->loadAudioMixSettings(pAudioMixSettings);
+}
+
+status_t VideoEditorPlayer::setAudioMixPCMFileHandle(
+    M4OSA_Context pAudioMixPCMFileHandle) {
+
+    LOGV("VideoEditorPlayer: loadAudioMixSettings");
+    return mPlayer->setAudioMixPCMFileHandle(pAudioMixPCMFileHandle);
+}
+
+status_t VideoEditorPlayer::setAudioMixStoryBoardParam(
+    M4OSA_UInt32 audioMixStoryBoardTS,
+    M4OSA_UInt32 currentMediaBeginCutTime,
+    M4OSA_UInt32 primaryTrackVolValue) {
+
+    LOGV("VideoEditorPlayer: loadAudioMixSettings");
+    return mPlayer->setAudioMixStoryBoardParam(audioMixStoryBoardTS,
+     currentMediaBeginCutTime, primaryTrackVolValue);
+}
+
+status_t VideoEditorPlayer::setPlaybackBeginTime(uint32_t msec) {
+    LOGV("setPlaybackBeginTime");
+    return mPlayer->setPlaybackBeginTime(msec);
+}
+
+status_t VideoEditorPlayer::setPlaybackEndTime(uint32_t msec) {
+    LOGV("setPlaybackEndTime");
+    return mPlayer->setPlaybackEndTime(msec);
+}
+
+status_t VideoEditorPlayer::setStoryboardStartTime(uint32_t msec) {
+    LOGV("setStoryboardStartTime");
+    return mPlayer->setStoryboardStartTime(msec);
+}
+
+status_t VideoEditorPlayer::setProgressCallbackInterval(uint32_t cbInterval) {
+    LOGV("setProgressCallbackInterval");
+    return mPlayer->setProgressCallbackInterval(cbInterval);
+}
+
+status_t VideoEditorPlayer::setMediaRenderingMode(
+    M4xVSS_MediaRendering mode,
+    M4VIDEOEDITING_VideoFrameSize outputVideoSize) {
+
+    LOGV("setMediaRenderingMode");
+    return mPlayer->setMediaRenderingMode(mode, outputVideoSize);
+}
+
+status_t VideoEditorPlayer::resetJniCallbackTimeStamp() {
+    LOGV("resetJniCallbackTimeStamp");
+    return mPlayer->resetJniCallbackTimeStamp();
+}
+
+status_t VideoEditorPlayer::setImageClipProperties(
+    uint32_t width, uint32_t height) {
+    return mPlayer->setImageClipProperties(width, height);
+}
+
+status_t VideoEditorPlayer::readFirstVideoFrame() {
+    return mPlayer->readFirstVideoFrame();
+}
+
+status_t VideoEditorPlayer::getLastRenderedTimeMs(uint32_t *lastRenderedTimeMs) {
+    mPlayer->getLastRenderedTimeMs(lastRenderedTimeMs);
+    return NO_ERROR;
+}
+
+/* Implementation of AudioSink interface */
+#undef LOG_TAG
+#define LOG_TAG "VeAudioSink"
+
+int VideoEditorPlayer::VeAudioOutput::mMinBufferCount = 4;
+bool VideoEditorPlayer::VeAudioOutput::mIsOnEmulator = false;
+
+VideoEditorPlayer::VeAudioOutput::VeAudioOutput()
+    : mCallback(NULL),
+      mCallbackCookie(NULL) {
+    mTrack = 0;
+    mStreamType = AUDIO_STREAM_MUSIC;
+    mLeftVolume = 1.0;
+    mRightVolume = 1.0;
+    mLatency = 0;
+    mMsecsPerFrame = 0;
+    mNumFramesWritten = 0;
+    setMinBufferCount();
+}
+
+VideoEditorPlayer::VeAudioOutput::~VeAudioOutput() {
+    close();
+}
+
+void VideoEditorPlayer::VeAudioOutput::setMinBufferCount() {
+
+    mIsOnEmulator = false;
+    mMinBufferCount = 4;
+}
+
+bool VideoEditorPlayer::VeAudioOutput::isOnEmulator() {
+
+    setMinBufferCount();
+    return mIsOnEmulator;
+}
+
+int VideoEditorPlayer::VeAudioOutput::getMinBufferCount() {
+
+    setMinBufferCount();
+    return mMinBufferCount;
+}
+
+ssize_t VideoEditorPlayer::VeAudioOutput::bufferSize() const {
+
+    if (mTrack == 0) return NO_INIT;
+    return mTrack->frameCount() * frameSize();
+}
+
+ssize_t VideoEditorPlayer::VeAudioOutput::frameCount() const {
+
+    if (mTrack == 0) return NO_INIT;
+    return mTrack->frameCount();
+}
+
+ssize_t VideoEditorPlayer::VeAudioOutput::channelCount() const
+{
+    if (mTrack == 0) return NO_INIT;
+    return mTrack->channelCount();
+}
+
+ssize_t VideoEditorPlayer::VeAudioOutput::frameSize() const
+{
+    if (mTrack == 0) return NO_INIT;
+    return mTrack->frameSize();
+}
+
+uint32_t VideoEditorPlayer::VeAudioOutput::latency () const
+{
+    return mLatency;
+}
+
+float VideoEditorPlayer::VeAudioOutput::msecsPerFrame() const
+{
+    return mMsecsPerFrame;
+}
+
+status_t VideoEditorPlayer::VeAudioOutput::getPosition(uint32_t *position) {
+
+    if (mTrack == 0) return NO_INIT;
+    return mTrack->getPosition(position);
+}
+
+status_t VideoEditorPlayer::VeAudioOutput::open(
+        uint32_t sampleRate, int channelCount, int format, int bufferCount,
+        AudioCallback cb, void *cookie) {
+
+    mCallback = cb;
+    mCallbackCookie = cookie;
+
+    // Check argument "bufferCount" against the mininum buffer count
+    if (bufferCount < mMinBufferCount) {
+        LOGV("bufferCount (%d) is too small and increased to %d",
+            bufferCount, mMinBufferCount);
+        bufferCount = mMinBufferCount;
+
+    }
+    LOGV("open(%u, %d, %d, %d)", sampleRate, channelCount, format, bufferCount);
+    if (mTrack) close();
+    int afSampleRate;
+    int afFrameCount;
+    int frameCount;
+
+    if (AudioSystem::getOutputFrameCount(&afFrameCount, mStreamType) !=
+     NO_ERROR) {
+        return NO_INIT;
+    }
+    if (AudioSystem::getOutputSamplingRate(&afSampleRate, mStreamType) !=
+     NO_ERROR) {
+        return NO_INIT;
+    }
+
+    frameCount = (sampleRate*afFrameCount*bufferCount)/afSampleRate;
+
+    AudioTrack *t;
+    if (mCallback != NULL) {
+        t = new AudioTrack(
+                mStreamType,
+                sampleRate,
+                format,
+                (channelCount == 2) ?
+                 AUDIO_CHANNEL_OUT_STEREO : AUDIO_CHANNEL_OUT_MONO,
+                frameCount,
+                0 /* flags */,
+                CallbackWrapper,
+                this);
+    } else {
+        t = new AudioTrack(
+                mStreamType,
+                sampleRate,
+                format,
+                (channelCount == 2) ?
+                 AUDIO_CHANNEL_OUT_STEREO : AUDIO_CHANNEL_OUT_MONO,
+                frameCount);
+    }
+
+    if ((t == 0) || (t->initCheck() != NO_ERROR)) {
+        LOGE("Unable to create audio track");
+        delete t;
+        return NO_INIT;
+    }
+
+    LOGV("setVolume");
+    t->setVolume(mLeftVolume, mRightVolume);
+    mMsecsPerFrame = 1.e3 / (float) sampleRate;
+    mLatency = t->latency();
+    mTrack = t;
+    return NO_ERROR;
+}
+
+void VideoEditorPlayer::VeAudioOutput::start() {
+
+    LOGV("start");
+    if (mTrack) {
+        mTrack->setVolume(mLeftVolume, mRightVolume);
+        mTrack->start();
+        mTrack->getPosition(&mNumFramesWritten);
+    }
+}
+
+void VideoEditorPlayer::VeAudioOutput::snoopWrite(
+    const void* buffer, size_t size) {
+    // Visualization buffers not supported
+    return;
+
+}
+
+ssize_t VideoEditorPlayer::VeAudioOutput::write(
+     const void* buffer, size_t size) {
+
+    LOG_FATAL_IF(mCallback != NULL, "Don't call write if supplying a callback.");
+
+    //LOGV("write(%p, %u)", buffer, size);
+    if (mTrack) {
+        snoopWrite(buffer, size);
+        ssize_t ret = mTrack->write(buffer, size);
+        mNumFramesWritten += ret / 4; // assume 16 bit stereo
+        return ret;
+    }
+    return NO_INIT;
+}
+
+void VideoEditorPlayer::VeAudioOutput::stop() {
+
+    LOGV("stop");
+    if (mTrack) mTrack->stop();
+}
+
+void VideoEditorPlayer::VeAudioOutput::flush() {
+
+    LOGV("flush");
+    if (mTrack) mTrack->flush();
+}
+
+void VideoEditorPlayer::VeAudioOutput::pause() {
+
+    LOGV("VeAudioOutput::pause");
+    if (mTrack) mTrack->pause();
+}
+
+void VideoEditorPlayer::VeAudioOutput::close() {
+
+    LOGV("close");
+    delete mTrack;
+    mTrack = 0;
+}
+
+void VideoEditorPlayer::VeAudioOutput::setVolume(float left, float right) {
+
+    LOGV("setVolume(%f, %f)", left, right);
+    mLeftVolume = left;
+    mRightVolume = right;
+    if (mTrack) {
+        mTrack->setVolume(left, right);
+    }
+}
+
+// static
+void VideoEditorPlayer::VeAudioOutput::CallbackWrapper(
+        int event, void *cookie, void *info) {
+    //LOGV("VeAudioOutput::callbackwrapper");
+    if (event != AudioTrack::EVENT_MORE_DATA) {
+        return;
+    }
+
+    VeAudioOutput *me = (VeAudioOutput *)cookie;
+    AudioTrack::Buffer *buffer = (AudioTrack::Buffer *)info;
+
+    size_t actualSize = (*me->mCallback)(
+            me, buffer->raw, buffer->size, me->mCallbackCookie);
+
+    buffer->size = actualSize;
+
+    if (actualSize > 0) {
+        me->snoopWrite(buffer->raw, actualSize);
+    }
+}
+
+status_t VideoEditorPlayer::VeAudioOutput::dump(int fd, const Vector<String16>& args) const
+{
+    const size_t SIZE = 256;
+    char buffer[SIZE];
+    String8 result;
+
+    result.append(" VeAudioOutput\n");
+    snprintf(buffer, SIZE-1, "  stream type(%d), left - right volume(%f, %f)\n",
+            mStreamType, mLeftVolume, mRightVolume);
+    result.append(buffer);
+    snprintf(buffer, SIZE-1, "  msec per frame(%f), latency (%d)\n",
+            mMsecsPerFrame, mLatency);
+    result.append(buffer);
+    ::write(fd, result.string(), result.size());
+    if (mTrack != 0) {
+        mTrack->dump(fd, args);
+    }
+    return NO_ERROR;
+}
+
+int VideoEditorPlayer::VeAudioOutput::getSessionId() {
+
+    return mSessionId;
+}
+
+}  // namespace android
diff --git a/libvideoeditor/lvpp/VideoEditorPlayer.h b/libvideoeditor/lvpp/VideoEditorPlayer.h
index 2c7838e..1afdd88 100755
--- a/libvideoeditor/lvpp/VideoEditorPlayer.h
+++ b/libvideoeditor/lvpp/VideoEditorPlayer.h
@@ -1,161 +1,161 @@
-/*

- * Copyright (C) 2011 The Android Open Source Project

- *

- * Licensed under the Apache License, Version 2.0 (the "License");

- * you may not use this file except in compliance with the License.

- * You may obtain a copy of the License at

- *

- *      http://www.apache.org/licenses/LICENSE-2.0

- *

- * Unless required by applicable law or agreed to in writing, software

- * distributed under the License is distributed on an "AS IS" BASIS,

- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

- * See the License for the specific language governing permissions and

- * limitations under the License.

- */

-

-#ifndef ANDROID_VIDEOEDITOR_PLAYER_H

-#define ANDROID_VIDEOEDITOR_PLAYER_H

-

-#include <media/MediaPlayerInterface.h>

-#include "AudioTrack.h"

-#include "M4xVSS_API.h"

-#include "VideoEditorMain.h"

-#include "VideoEditorTools.h"

-#include "VideoEditorAudioPlayer.h"

-

-namespace android {

-

-struct PreviewPlayer;

-

-class VideoEditorPlayer : public MediaPlayerInterface {

-    public:

-    class VeAudioOutput: public MediaPlayerBase::AudioSink

-    {

-    public:

-                                VeAudioOutput();

-        virtual                 ~VeAudioOutput();

-

-        virtual bool            ready() const { return mTrack != NULL; }

-        virtual bool            realtime() const { return true; }

-        virtual ssize_t         bufferSize() const;

-        virtual ssize_t         frameCount() const;

-        virtual ssize_t         channelCount() const;

-        virtual ssize_t         frameSize() const;

-        virtual uint32_t        latency() const;

-        virtual float           msecsPerFrame() const;

-        virtual status_t        getPosition(uint32_t *position);

-        virtual int             getSessionId();

-

-        virtual status_t        open(

-                uint32_t sampleRate, int channelCount,

-                int format, int bufferCount,

-                AudioCallback cb, void *cookie);

-

-        virtual void            start();

-        virtual ssize_t         write(const void* buffer, size_t size);

-        virtual void            stop();

-        virtual void            flush();

-        virtual void            pause();

-        virtual void            close();

-        void setAudioStreamType(int streamType) { mStreamType = streamType; }

-                void            setVolume(float left, float right);

-        virtual status_t        dump(int fd,const Vector<String16>& args) const;

-

-        static bool             isOnEmulator();

-        static int              getMinBufferCount();

-    private:

-        static void             setMinBufferCount();

-        static void             CallbackWrapper(

-                int event, void *me, void *info);

-

-        AudioTrack*             mTrack;

-        AudioCallback           mCallback;

-        void *                  mCallbackCookie;

-        int                     mStreamType;

-        float                   mLeftVolume;

-        float                   mRightVolume;

-        float                   mMsecsPerFrame;

-        uint32_t                mLatency;

-        int                     mSessionId;

-        static bool             mIsOnEmulator;

-        static int              mMinBufferCount; // 12 for emulator; otherwise 4

-

-        public:

-        uint32_t                mNumFramesWritten;

-        void                    snoopWrite(const void*, size_t);

-    };

-

-public:

-    VideoEditorPlayer();

-    virtual ~VideoEditorPlayer();

-

-    virtual status_t initCheck();

-

-    virtual status_t setDataSource(

-            const char *url, const KeyedVector<String8, String8> *headers);

-

-    virtual status_t setDataSource(int fd, int64_t offset, int64_t length);

-    virtual status_t setVideoSurface(const sp<Surface> &surface);

-    virtual status_t setVideoSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture);

-    virtual status_t prepare();

-    virtual status_t prepareAsync();

-    virtual status_t start();

-    virtual status_t stop();

-    virtual status_t pause();

-    virtual bool isPlaying();

-    virtual status_t seekTo(int msec);

-    virtual status_t getCurrentPosition(int *msec);

-    virtual status_t getDuration(int *msec);

-    virtual status_t reset();

-    virtual status_t setLooping(int loop);

-    virtual player_type playerType();

-    virtual status_t invoke(const Parcel &request, Parcel *reply);

-    virtual void setAudioSink(const sp<AudioSink> &audioSink);

-    virtual status_t suspend();

+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_VIDEOEDITOR_PLAYER_H
+#define ANDROID_VIDEOEDITOR_PLAYER_H
+
+#include <media/MediaPlayerInterface.h>
+#include "AudioTrack.h"
+#include "M4xVSS_API.h"
+#include "VideoEditorMain.h"
+#include "VideoEditorTools.h"
+#include "VideoEditorAudioPlayer.h"
+
+namespace android {
+
+struct PreviewPlayer;
+
+class VideoEditorPlayer : public MediaPlayerInterface {
+    public:
+    class VeAudioOutput: public MediaPlayerBase::AudioSink
+    {
+    public:
+                                VeAudioOutput();
+        virtual                 ~VeAudioOutput();
+
+        virtual bool            ready() const { return mTrack != NULL; }
+        virtual bool            realtime() const { return true; }
+        virtual ssize_t         bufferSize() const;
+        virtual ssize_t         frameCount() const;
+        virtual ssize_t         channelCount() const;
+        virtual ssize_t         frameSize() const;
+        virtual uint32_t        latency() const;
+        virtual float           msecsPerFrame() const;
+        virtual status_t        getPosition(uint32_t *position);
+        virtual int             getSessionId();
+
+        virtual status_t        open(
+                uint32_t sampleRate, int channelCount,
+                int format, int bufferCount,
+                AudioCallback cb, void *cookie);
+
+        virtual void            start();
+        virtual ssize_t         write(const void* buffer, size_t size);
+        virtual void            stop();
+        virtual void            flush();
+        virtual void            pause();
+        virtual void            close();
+        void setAudioStreamType(int streamType) { mStreamType = streamType; }
+                void            setVolume(float left, float right);
+        virtual status_t        dump(int fd,const Vector<String16>& args) const;
+
+        static bool             isOnEmulator();
+        static int              getMinBufferCount();
+    private:
+        static void             setMinBufferCount();
+        static void             CallbackWrapper(
+                int event, void *me, void *info);
+
+        AudioTrack*             mTrack;
+        AudioCallback           mCallback;
+        void *                  mCallbackCookie;
+        int                     mStreamType;
+        float                   mLeftVolume;
+        float                   mRightVolume;
+        float                   mMsecsPerFrame;
+        uint32_t                mLatency;
+        int                     mSessionId;
+        static bool             mIsOnEmulator;
+        static int              mMinBufferCount; // 12 for emulator; otherwise 4
+
+        public:
+        uint32_t                mNumFramesWritten;
+        void                    snoopWrite(const void*, size_t);
+    };
+
+public:
+    VideoEditorPlayer();
+    virtual ~VideoEditorPlayer();
+
+    virtual status_t initCheck();
+
+    virtual status_t setDataSource(
+            const char *url, const KeyedVector<String8, String8> *headers);
+
+    virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
+    virtual status_t setVideoSurface(const sp<Surface> &surface);
+    virtual status_t setVideoSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture);
+    virtual status_t prepare();
+    virtual status_t prepareAsync();
+    virtual status_t start();
+    virtual status_t stop();
+    virtual status_t pause();
+    virtual bool isPlaying();
+    virtual status_t seekTo(int msec);
+    virtual status_t getCurrentPosition(int *msec);
+    virtual status_t getDuration(int *msec);
+    virtual status_t reset();
+    virtual status_t setLooping(int loop);
+    virtual player_type playerType();
+    virtual status_t invoke(const Parcel &request, Parcel *reply);
+    virtual void setAudioSink(const sp<AudioSink> &audioSink);
+    virtual status_t suspend();
     virtual status_t resume();
     virtual void acquireLock();
-    virtual void releaseLock();

+    virtual void releaseLock();
     virtual status_t setParameter(int key, const Parcel &request);
     virtual status_t getParameter(int key, Parcel *reply);
 
-    virtual status_t getMetadata(

-                        const media::Metadata::Filter& ids, Parcel *records);

-

-    virtual status_t loadEffectsSettings(

-                         M4VSS3GPP_EffectSettings* pEffectSettings, int nEffects);

-

-    virtual status_t loadAudioMixSettings(

-                         M4xVSS_AudioMixingSettings* pAudioMixSettings);

-

-    virtual status_t setAudioMixPCMFileHandle(

-                         M4OSA_Context pAudioMixPCMFileHandle);

-

-    virtual status_t setAudioMixStoryBoardParam(

-                         M4OSA_UInt32 x, M4OSA_UInt32 y, M4OSA_UInt32 z);

-

-    virtual status_t setPlaybackBeginTime(uint32_t msec);

-    virtual status_t setPlaybackEndTime(uint32_t msec);

-    virtual status_t setStoryboardStartTime(uint32_t msec);

-    virtual status_t setProgressCallbackInterval(uint32_t cbInterval);

-

-    virtual status_t setMediaRenderingMode(M4xVSS_MediaRendering mode,

-                          M4VIDEOEDITING_VideoFrameSize outputVideoSize);

-

-    virtual status_t resetJniCallbackTimeStamp();

-    virtual status_t setImageClipProperties(uint32_t width, uint32_t height);

-    virtual status_t readFirstVideoFrame();

-    virtual status_t getLastRenderedTimeMs(uint32_t *lastRenderedTimeMs);

-

-    status_t setAudioPlayer(VideoEditorAudioPlayer *audioPlayer);

-private:

-    PreviewPlayer       *mPlayer;

-    sp<VeAudioOutput>    mVeAudioSink;

-

-    VideoEditorPlayer(const VideoEditorPlayer &);

-    VideoEditorPlayer &operator=(const VideoEditorPlayer &);

-};

-

-}  // namespace android

-

-#endif  // ANDROID_VIDEOEDITOR_PLAYER_H

+    virtual status_t getMetadata(
+                        const media::Metadata::Filter& ids, Parcel *records);
+
+    virtual status_t loadEffectsSettings(
+                         M4VSS3GPP_EffectSettings* pEffectSettings, int nEffects);
+
+    virtual status_t loadAudioMixSettings(
+                         M4xVSS_AudioMixingSettings* pAudioMixSettings);
+
+    virtual status_t setAudioMixPCMFileHandle(
+                         M4OSA_Context pAudioMixPCMFileHandle);
+
+    virtual status_t setAudioMixStoryBoardParam(
+                         M4OSA_UInt32 x, M4OSA_UInt32 y, M4OSA_UInt32 z);
+
+    virtual status_t setPlaybackBeginTime(uint32_t msec);
+    virtual status_t setPlaybackEndTime(uint32_t msec);
+    virtual status_t setStoryboardStartTime(uint32_t msec);
+    virtual status_t setProgressCallbackInterval(uint32_t cbInterval);
+
+    virtual status_t setMediaRenderingMode(M4xVSS_MediaRendering mode,
+                          M4VIDEOEDITING_VideoFrameSize outputVideoSize);
+
+    virtual status_t resetJniCallbackTimeStamp();
+    virtual status_t setImageClipProperties(uint32_t width, uint32_t height);
+    virtual status_t readFirstVideoFrame();
+    virtual status_t getLastRenderedTimeMs(uint32_t *lastRenderedTimeMs);
+
+    status_t setAudioPlayer(VideoEditorAudioPlayer *audioPlayer);
+private:
+    PreviewPlayer       *mPlayer;
+    sp<VeAudioOutput>    mVeAudioSink;
+
+    VideoEditorPlayer(const VideoEditorPlayer &);
+    VideoEditorPlayer &operator=(const VideoEditorPlayer &);
+};
+
+}  // namespace android
+
+#endif  // ANDROID_VIDEOEDITOR_PLAYER_H
diff --git a/libvideoeditor/lvpp/VideoEditorPreviewController.cpp b/libvideoeditor/lvpp/VideoEditorPreviewController.cpp
index 09b9bfb..ee1dc60 100755
--- a/libvideoeditor/lvpp/VideoEditorPreviewController.cpp
+++ b/libvideoeditor/lvpp/VideoEditorPreviewController.cpp
@@ -1,1509 +1,1509 @@
-/*

- * Copyright (C) 2011 The Android Open Source Project

- *

- * Licensed under the Apache License, Version 2.0 (the "License");

- * you may not use this file except in compliance with the License.

- * You may obtain a copy of the License at

- *

- *      http://www.apache.org/licenses/LICENSE-2.0

- *

- * Unless required by applicable law or agreed to in writing, software

- * distributed under the License is distributed on an "AS IS" BASIS,

- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

- * See the License for the specific language governing permissions and

- * limitations under the License.

- */

-

-#define LOG_NDEBUG 1

-#define LOG_TAG "VideoEditorPreviewController"

-#include "VideoEditorPreviewController.h"

-

-namespace android {

-

-#define PREVIEW_THREAD_STACK_SIZE                           (65536)

-

-VideoEditorPreviewController::VideoEditorPreviewController()

-    : mCurrentPlayer(0),

-      mThreadContext(NULL),

-      mPlayerState(VePlayerIdle),

-      mPrepareReqest(M4OSA_FALSE),

-      mClipList(NULL),

-      mNumberClipsInStoryBoard(0),

-      mNumberClipsToPreview(0),

-      mStartingClipIndex(0),

-      mPreviewLooping(M4OSA_FALSE),

-      mCallBackAfterFrameCnt(0),

-      mEffectsSettings(NULL),

-      mNumberEffects(0),

-      mCurrentClipNumber(-1),

-      mClipTotalDuration(0),

-      mCurrentVideoEffect(VIDEO_EFFECT_NONE),

-      mBackgroundAudioSetting(NULL),

-      mAudioMixPCMFileHandle(NULL),

-      mTarget(NULL),

-      mJniCookie(NULL),

-      mJniCallback(NULL),

-      mCurrentPlayedDuration(0),

-      mCurrentClipDuration(0),

-      mVideoStoryBoardTimeMsUptoFirstPreviewClip(0),

-      mOverlayState(OVERLAY_CLEAR),

-      mActivePlayerIndex(0),

-      mOutputVideoWidth(0),

-      mOutputVideoHeight(0),

-      bStopThreadInProgress(false),

-      mSemThreadWait(NULL) {

-    LOGV("VideoEditorPreviewController");

-    mRenderingMode = M4xVSS_kBlackBorders;

-    mIsFiftiesEffectStarted = false;

-

-    for (int i=0; i<NBPLAYER_INSTANCES; i++) {

-        mVePlayer[i] = NULL;

-    }

-}

-

-VideoEditorPreviewController::~VideoEditorPreviewController() {

-    M4OSA_UInt32 i = 0;

-    M4OSA_ERR err = M4NO_ERROR;

-    LOGV("~VideoEditorPreviewController");

-

-    // Stop the thread if its still running

-    if(mThreadContext != NULL) {

-        err = M4OSA_threadSyncStop(mThreadContext);

-        if(err != M4NO_ERROR) {

-            LOGV("~VideoEditorPreviewController: error 0x%x \

-            in trying to stop thread", err);

-            // Continue even if error

-        }

-

-        err = M4OSA_threadSyncClose(mThreadContext);

-        if(err != M4NO_ERROR) {

-            LOGE("~VideoEditorPreviewController: error 0x%x \

-            in trying to close thread", (unsigned int) err);

-            // Continue even if error

-        }

-

-        mThreadContext = NULL;

-    }

-

-    for (int playerInst=0; playerInst<NBPLAYER_INSTANCES;

-         playerInst++) {

-        if(mVePlayer[playerInst] != NULL) {

-            LOGV("clearing mVePlayer %d", playerInst);

-            mVePlayer[playerInst].clear();

-        }

-    }

-

-    if(mClipList != NULL) {

-        // Clean up

-        for(i=0;i<mNumberClipsInStoryBoard;i++)

-        {

-            if(mClipList[i]->pFile != NULL) {

-                free(mClipList[i]->pFile);

-                mClipList[i]->pFile = NULL;

-            }

-

-            free(mClipList[i]);

-        }

-        free(mClipList);

-        mClipList = NULL;

-    }

-

-    if(mEffectsSettings) {

-        for(i=0;i<mNumberEffects;i++) {

-            if(mEffectsSettings[i].xVSS.pFramingBuffer != NULL) {

-                free(mEffectsSettings[i].xVSS.pFramingBuffer->pac_data);

-

-                free(mEffectsSettings[i].xVSS.pFramingBuffer);

-

-                mEffectsSettings[i].xVSS.pFramingBuffer = NULL;

-            }

-        }

-        free(mEffectsSettings);

-        mEffectsSettings = NULL;

-    }

-

-    if (mAudioMixPCMFileHandle) {

-        err = M4OSA_fileReadClose (mAudioMixPCMFileHandle);

-        mAudioMixPCMFileHandle = M4OSA_NULL;

-    }

-

-    if (mBackgroundAudioSetting != NULL) {

-        free(mBackgroundAudioSetting);

-        mBackgroundAudioSetting = NULL;

-    }

-

-    if(mTarget != NULL) {

-        delete mTarget;

-        mTarget = NULL;

-    }

-

-    mOverlayState = OVERLAY_CLEAR;

-

-    LOGV("~VideoEditorPreviewController returns");

-}

-

-M4OSA_ERR VideoEditorPreviewController::loadEditSettings(

-    M4VSS3GPP_EditSettings* pSettings,M4xVSS_AudioMixingSettings* bgmSettings) {

-

-    M4OSA_UInt32 i = 0, iClipDuration = 0, rgbSize = 0;

-    M4VIFI_UInt8 *tmp = NULL;

-    M4OSA_ERR err = M4NO_ERROR;

-

-    LOGV("loadEditSettings");

-    LOGV("loadEditSettings Channels = %d, sampling Freq %d",

-          bgmSettings->uiNbChannels, bgmSettings->uiSamplingFrequency  );

-          bgmSettings->uiSamplingFrequency = 32000;

-

-    LOGV("loadEditSettings Channels = %d, sampling Freq %d",

-          bgmSettings->uiNbChannels, bgmSettings->uiSamplingFrequency  );

-    Mutex::Autolock autoLock(mLock);

-

-    // Clean up any previous Edit settings before loading new ones

-    mCurrentVideoEffect = VIDEO_EFFECT_NONE;

-

-    if(mAudioMixPCMFileHandle) {

-        err = M4OSA_fileReadClose (mAudioMixPCMFileHandle);

-        mAudioMixPCMFileHandle = M4OSA_NULL;

-    }

-

-    if(mBackgroundAudioSetting != NULL) {

-        free(mBackgroundAudioSetting);

-        mBackgroundAudioSetting = NULL;

-    }

-

-    if(mClipList != NULL) {

-        // Clean up

-        for(i=0;i<mNumberClipsInStoryBoard;i++)

-        {

-            if(mClipList[i]->pFile != NULL) {

-                free(mClipList[i]->pFile);

-                mClipList[i]->pFile = NULL;

-            }

-

-            free(mClipList[i]);

-        }

-        free(mClipList);

-        mClipList = NULL;

-    }

-

-    if(mEffectsSettings) {

-        for(i=0;i<mNumberEffects;i++) {

-            if(mEffectsSettings[i].xVSS.pFramingBuffer != NULL) {

-                free(mEffectsSettings[i].xVSS.pFramingBuffer->pac_data);

-

-                free(mEffectsSettings[i].xVSS.pFramingBuffer);

-

-                mEffectsSettings[i].xVSS.pFramingBuffer = NULL;

-            }

-        }

-        free(mEffectsSettings);

-        mEffectsSettings = NULL;

-    }

-

-    if(mClipList == NULL) {

-        mNumberClipsInStoryBoard = pSettings->uiClipNumber;

-        LOGV("loadEditSettings: # of Clips = %d", mNumberClipsInStoryBoard);

-

-        mClipList = (M4VSS3GPP_ClipSettings**)M4OSA_32bitAlignedMalloc(

-         sizeof(M4VSS3GPP_ClipSettings*)*pSettings->uiClipNumber, M4VS,

-         (M4OSA_Char*)"LvPP, copy of pClipList");

-

-        if(NULL == mClipList) {

-            LOGE("loadEditSettings: Malloc error");

-            return M4ERR_ALLOC;

-        }

-        memset((void *)mClipList,0,

-         sizeof(M4VSS3GPP_ClipSettings*)*pSettings->uiClipNumber);

-

-        for(i=0;i<pSettings->uiClipNumber;i++) {

-

-            // Allocate current clip

-            mClipList[i] =

-             (M4VSS3GPP_ClipSettings*)M4OSA_32bitAlignedMalloc(

-              sizeof(M4VSS3GPP_ClipSettings),M4VS,(M4OSA_Char*)"clip settings");

-

-            if(mClipList[i] == NULL) {

-

-                LOGE("loadEditSettings: Allocation error for mClipList[%d]", (int)i);

-                return M4ERR_ALLOC;

-            }

-            // Copy plain structure

-            memcpy((void *)mClipList[i],

-             (void *)pSettings->pClipList[i],

-             sizeof(M4VSS3GPP_ClipSettings));

-

-            if(NULL != pSettings->pClipList[i]->pFile) {

-                mClipList[i]->pFile = (M4OSA_Char*)M4OSA_32bitAlignedMalloc(

-                pSettings->pClipList[i]->filePathSize, M4VS,

-                (M4OSA_Char*)"pClipSettingsDest->pFile");

-

-                if(NULL == mClipList[i]->pFile)

-                {

-                    LOGE("loadEditSettings : ERROR allocating filename");

-                    return M4ERR_ALLOC;

-                }

-

-                memcpy((void *)mClipList[i]->pFile,

-                 (void *)pSettings->pClipList[i]->pFile,

-                 pSettings->pClipList[i]->filePathSize);

-            }

-            else {

-                LOGE("NULL file path");

-                return M4ERR_PARAMETER;

-            }

-

-            // Calculate total duration of all clips

-            iClipDuration = pSettings->pClipList[i]->uiEndCutTime -

-             pSettings->pClipList[i]->uiBeginCutTime;

-

-            mClipTotalDuration = mClipTotalDuration+iClipDuration;

-        }

-    }

-

-    if(mEffectsSettings == NULL) {

-        mNumberEffects = pSettings->nbEffects;

-        LOGV("loadEditSettings: mNumberEffects = %d", mNumberEffects);

-

-        if(mNumberEffects != 0) {

-            mEffectsSettings = (M4VSS3GPP_EffectSettings*)M4OSA_32bitAlignedMalloc(

-             mNumberEffects*sizeof(M4VSS3GPP_EffectSettings),

-             M4VS, (M4OSA_Char*)"effects settings");

-

-            if(mEffectsSettings == NULL) {

-                LOGE("loadEffectsSettings: Allocation error");

-                return M4ERR_ALLOC;

-            }

-

-            memset((void *)mEffectsSettings,0,

-             mNumberEffects*sizeof(M4VSS3GPP_EffectSettings));

-

-            for(i=0;i<mNumberEffects;i++) {

-

-                mEffectsSettings[i].xVSS.pFramingFilePath = NULL;

-                mEffectsSettings[i].xVSS.pFramingBuffer = NULL;

-                mEffectsSettings[i].xVSS.pTextBuffer = NULL;

-

-                memcpy((void *)&(mEffectsSettings[i]),

-                 (void *)&(pSettings->Effects[i]),

-                 sizeof(M4VSS3GPP_EffectSettings));

-

-                if(pSettings->Effects[i].VideoEffectType ==

-                 (M4VSS3GPP_VideoEffectType)M4xVSS_kVideoEffectType_Framing) {

-                    // Allocate the pFraming RGB buffer

-                    mEffectsSettings[i].xVSS.pFramingBuffer =

-                    (M4VIFI_ImagePlane *)M4OSA_32bitAlignedMalloc(sizeof(M4VIFI_ImagePlane),

-                     M4VS, (M4OSA_Char*)"lvpp framing buffer");

-

-                    if(mEffectsSettings[i].xVSS.pFramingBuffer == NULL) {

-                        LOGE("loadEffectsSettings:Alloc error for pFramingBuf");

-                        free(mEffectsSettings);

-                        mEffectsSettings = NULL;

-                        return M4ERR_ALLOC;

-                    }

-

-                    // Allocate the pac_data (RGB)

-                    if(pSettings->Effects[i].xVSS.rgbType == M4VSS3GPP_kRGB565){

-                        rgbSize =

-                         pSettings->Effects[i].xVSS.pFramingBuffer->u_width *

-                         pSettings->Effects[i].xVSS.pFramingBuffer->u_height*2;

-                    }

-                    else if(

-                     pSettings->Effects[i].xVSS.rgbType == M4VSS3GPP_kRGB888) {

-                        rgbSize =

-                         pSettings->Effects[i].xVSS.pFramingBuffer->u_width *

-                         pSettings->Effects[i].xVSS.pFramingBuffer->u_height*3;

-                    }

-                    else {

-                        LOGE("loadEffectsSettings: wrong RGB type");

-                        free(mEffectsSettings);

-                        mEffectsSettings = NULL;

-                        return M4ERR_PARAMETER;

-                    }

-

-                    tmp = (M4VIFI_UInt8 *)M4OSA_32bitAlignedMalloc(rgbSize, M4VS,

-                     (M4OSA_Char*)"framing buffer pac_data");

-

-                    if(tmp == NULL) {

-                        LOGE("loadEffectsSettings:Alloc error pFramingBuf pac");

-                        free(mEffectsSettings);

-                        mEffectsSettings = NULL;

-                        free(mEffectsSettings[i].xVSS.pFramingBuffer);

-

-                        mEffectsSettings[i].xVSS.pFramingBuffer = NULL;

-                        return M4ERR_ALLOC;

-                    }

-                    /* Initialize the pFramingBuffer*/

-                    mEffectsSettings[i].xVSS.pFramingBuffer->pac_data = tmp;

-                    mEffectsSettings[i].xVSS.pFramingBuffer->u_height =

-                     pSettings->Effects[i].xVSS.pFramingBuffer->u_height;

-

-                    mEffectsSettings[i].xVSS.pFramingBuffer->u_width =

-                     pSettings->Effects[i].xVSS.pFramingBuffer->u_width;

-

-                    mEffectsSettings[i].xVSS.pFramingBuffer->u_stride =

-                     pSettings->Effects[i].xVSS.pFramingBuffer->u_stride;

-

-                    mEffectsSettings[i].xVSS.pFramingBuffer->u_topleft =

-                     pSettings->Effects[i].xVSS.pFramingBuffer->u_topleft;

-

-                    mEffectsSettings[i].xVSS.uialphaBlendingStart =

-                     pSettings->Effects[i].xVSS.uialphaBlendingStart;

-

-                    mEffectsSettings[i].xVSS.uialphaBlendingMiddle =

-                     pSettings->Effects[i].xVSS.uialphaBlendingMiddle;

-

-                    mEffectsSettings[i].xVSS.uialphaBlendingEnd =

-                     pSettings->Effects[i].xVSS.uialphaBlendingEnd;

-

-                    mEffectsSettings[i].xVSS.uialphaBlendingFadeInTime =

-                     pSettings->Effects[i].xVSS.uialphaBlendingFadeInTime;

-                    mEffectsSettings[i].xVSS.uialphaBlendingFadeOutTime =

-                     pSettings->Effects[i].xVSS.uialphaBlendingFadeOutTime;

-

-                    // Copy the pFraming data

-                    memcpy((void *)

-                    mEffectsSettings[i].xVSS.pFramingBuffer->pac_data,

-                    (void *)pSettings->Effects[i].xVSS.pFramingBuffer->pac_data,

-                    rgbSize);

-

-                    mEffectsSettings[i].xVSS.rgbType =

-                     pSettings->Effects[i].xVSS.rgbType;

-                }

-            }

-        }

-    }

-

-    if (mBackgroundAudioSetting == NULL) {

-

-        mBackgroundAudioSetting = (M4xVSS_AudioMixingSettings*)M4OSA_32bitAlignedMalloc(

-        sizeof(M4xVSS_AudioMixingSettings), M4VS,

-        (M4OSA_Char*)"LvPP, copy of bgmSettings");

-

-        if(NULL == mBackgroundAudioSetting) {

-            LOGE("loadEditSettings: mBackgroundAudioSetting Malloc failed");

-            return M4ERR_ALLOC;

-        }

-

-        memset((void *)mBackgroundAudioSetting, 0,sizeof(M4xVSS_AudioMixingSettings*));

-        memcpy((void *)mBackgroundAudioSetting, (void *)bgmSettings, sizeof(M4xVSS_AudioMixingSettings));

-

-        if ( mBackgroundAudioSetting->pFile != M4OSA_NULL ) {

-

-            mBackgroundAudioSetting->pFile = (M4OSA_Void*) bgmSettings->pPCMFilePath;

-            mBackgroundAudioSetting->uiNbChannels = 2;

-            mBackgroundAudioSetting->uiSamplingFrequency = 32000;

-        }

-

-        // Open the BG file

-        if ( mBackgroundAudioSetting->pFile != M4OSA_NULL ) {

-            err = M4OSA_fileReadOpen(&mAudioMixPCMFileHandle,

-             mBackgroundAudioSetting->pFile, M4OSA_kFileRead);

-

-            if (err != M4NO_ERROR) {

-                LOGE("loadEditSettings: mBackgroundAudio PCM File open failed");

-                return M4ERR_PARAMETER;

-            }

-        }

-    }

-

-    mOutputVideoSize = pSettings->xVSS.outputVideoSize;

-    mFrameStr.pBuffer = M4OSA_NULL;

-    return M4NO_ERROR;

-}

-

-M4OSA_ERR VideoEditorPreviewController::setSurface(const sp<Surface> &surface) {

-    LOGV("setSurface");

-    Mutex::Autolock autoLock(mLock);

-

-    mSurface = surface;

-    return M4NO_ERROR;

-}

-

-M4OSA_ERR VideoEditorPreviewController::startPreview(

-    M4OSA_UInt32 fromMS, M4OSA_Int32 toMs, M4OSA_UInt16 callBackAfterFrameCount,

-    M4OSA_Bool loop) {

-

-    M4OSA_ERR err = M4NO_ERROR;

-    M4OSA_UInt32 i = 0, iIncrementedDuration = 0;

-    LOGV("startPreview");

-

-    if(fromMS > (M4OSA_UInt32)toMs) {

-        LOGE("startPreview: fromMS > toMs");

-        return M4ERR_PARAMETER;

-    }

-

-    if(toMs == 0) {

-        LOGE("startPreview: toMs is 0");

-        return M4ERR_PARAMETER;

-    }

-

-    // If already started, then stop preview first

-    for(int playerInst=0; playerInst<NBPLAYER_INSTANCES; playerInst++) {

-        if(mVePlayer[playerInst] != NULL) {

-            LOGV("startPreview: stopping previously started preview playback");

-            stopPreview();

-            break;

-        }

-    }

-

-    // If renderPreview was called previously, then delete Renderer object first

-    if(mTarget != NULL) {

-        LOGV("startPreview: delete previous PreviewRenderer");

-        delete mTarget;

-        mTarget = NULL;

-    }

-

-    // Create Audio player to be used for entire

-    // storyboard duration

-    mVEAudioSink = new VideoEditorPlayer::VeAudioOutput();

-    mVEAudioPlayer = new VideoEditorAudioPlayer(mVEAudioSink);

-    mVEAudioPlayer->setAudioMixSettings(mBackgroundAudioSetting);

-    mVEAudioPlayer->setAudioMixPCMFileHandle(mAudioMixPCMFileHandle);

-

-    LOGV("startPreview: loop = %d", loop);

-    mPreviewLooping = loop;

-

-    LOGV("startPreview: callBackAfterFrameCount = %d", callBackAfterFrameCount);

-    mCallBackAfterFrameCnt = callBackAfterFrameCount;

-

-    for (int playerInst=0; playerInst<NBPLAYER_INSTANCES; playerInst++) {

-        mVePlayer[playerInst] = new VideoEditorPlayer();

-        if(mVePlayer[playerInst] == NULL) {

-            LOGE("startPreview:Error creating VideoEditorPlayer %d",playerInst);

-            return M4ERR_ALLOC;

-        }

-        LOGV("startPreview: object created");

-

-        mVePlayer[playerInst]->setNotifyCallback(this,(notify_callback_f)notify);

-        LOGV("startPreview: notify callback set");

-

-        mVePlayer[playerInst]->loadEffectsSettings(mEffectsSettings,

-         mNumberEffects);

-        LOGV("startPreview: effects settings loaded");

-

-        mVePlayer[playerInst]->loadAudioMixSettings(mBackgroundAudioSetting);

-        LOGV("startPreview: AudioMixSettings settings loaded");

-

-        mVePlayer[playerInst]->setAudioMixPCMFileHandle(mAudioMixPCMFileHandle);

-        LOGV("startPreview: AudioMixPCMFileHandle set");

-

-        mVePlayer[playerInst]->setProgressCallbackInterval(

-         mCallBackAfterFrameCnt);

-        LOGV("startPreview: setProgressCallBackInterval");

-    }

-

-    mPlayerState = VePlayerIdle;

-    mPrepareReqest = M4OSA_FALSE;

-

-    if(fromMS == 0) {

-        mCurrentClipNumber = -1;

-        // Save original value

-        mFirstPreviewClipBeginTime = mClipList[0]->uiBeginCutTime;

-        mVideoStoryBoardTimeMsUptoFirstPreviewClip = 0;

-    }

-    else {

-        LOGV("startPreview: fromMS=%d", fromMS);

-        if(fromMS >= mClipTotalDuration) {

-            LOGE("startPreview: fromMS >= mClipTotalDuration");

-            return M4ERR_PARAMETER;

-        }

-        for(i=0;i<mNumberClipsInStoryBoard;i++) {

-            if(fromMS < (iIncrementedDuration + (mClipList[i]->uiEndCutTime -

-             mClipList[i]->uiBeginCutTime))) {

-                // Set to 1 index below,

-                // as threadProcess first increments the clip index

-                // and then processes clip in thread loop

-                mCurrentClipNumber = i-1;

-                LOGV("startPreview:mCurrentClipNumber = %d fromMS=%d",i,fromMS);

-

-                // Save original value

-                mFirstPreviewClipBeginTime = mClipList[i]->uiBeginCutTime;

-

-                // Set correct begin time to start playback

-                if((fromMS+mClipList[i]->uiBeginCutTime) >

-                (iIncrementedDuration+mClipList[i]->uiBeginCutTime)) {

-

-                    mClipList[i]->uiBeginCutTime =

-                     mClipList[i]->uiBeginCutTime +

-                     (fromMS - iIncrementedDuration);

-                }

-                break;

-            }

-            else {

-                iIncrementedDuration = iIncrementedDuration +

-                 (mClipList[i]->uiEndCutTime - mClipList[i]->uiBeginCutTime);

-            }

-        }

-        mVideoStoryBoardTimeMsUptoFirstPreviewClip = iIncrementedDuration;

-    }

-

-    for (int playerInst=0; playerInst<NBPLAYER_INSTANCES; playerInst++) {

-        mVePlayer[playerInst]->setAudioMixStoryBoardParam(fromMS,

-         mFirstPreviewClipBeginTime,

-         mClipList[i]->ClipProperties.uiClipAudioVolumePercentage);

-

-        LOGV("startPreview:setAudioMixStoryBoardSkimTimeStamp set %d cuttime \

-         %d", fromMS, mFirstPreviewClipBeginTime);

-    }

-

-    mStartingClipIndex = mCurrentClipNumber+1;

-

-    // Start playing with player instance 0

-    mCurrentPlayer = 0;

-    mActivePlayerIndex = 0;

-

-    if(toMs == -1) {

-        LOGV("startPreview: Preview till end of storyboard");

-        mNumberClipsToPreview = mNumberClipsInStoryBoard;

-        // Save original value

-        mLastPreviewClipEndTime =

-         mClipList[mNumberClipsToPreview-1]->uiEndCutTime;

-    }

-    else {

-        LOGV("startPreview: toMs=%d", toMs);

-        if((M4OSA_UInt32)toMs > mClipTotalDuration) {

-            LOGE("startPreview: toMs > mClipTotalDuration");

-            return M4ERR_PARAMETER;

-        }

-

-        iIncrementedDuration = 0;

-

-        for(i=0;i<mNumberClipsInStoryBoard;i++) {

-            if((M4OSA_UInt32)toMs <= (iIncrementedDuration +

-             (mClipList[i]->uiEndCutTime - mClipList[i]->uiBeginCutTime))) {

-                // Save original value

-                mLastPreviewClipEndTime = mClipList[i]->uiEndCutTime;

-                // Set the end cut time of clip index i to toMs

-                mClipList[i]->uiEndCutTime = toMs;

-

-                // Number of clips to be previewed is from index 0 to i

-                // increment by 1 as i starts from 0

-                mNumberClipsToPreview = i+1;

-                break;

-            }

-            else {

-                iIncrementedDuration = iIncrementedDuration +

-                 (mClipList[i]->uiEndCutTime - mClipList[i]->uiBeginCutTime);

-            }

-        }

-    }

-

-    // Open the thread semaphore

-    M4OSA_semaphoreOpen(&mSemThreadWait, 1);

-

-    // Open the preview process thread

-    err = M4OSA_threadSyncOpen(&mThreadContext, (M4OSA_ThreadDoIt)threadProc);

-    if (M4NO_ERROR != err) {

-        LOGE("VideoEditorPreviewController:M4OSA_threadSyncOpen error %d", (int) err);

-        return err;

-    }

-

-    // Set the stacksize

-    err = M4OSA_threadSyncSetOption(mThreadContext, M4OSA_ThreadStackSize,

-     (M4OSA_DataOption)PREVIEW_THREAD_STACK_SIZE);

-

-    if (M4NO_ERROR != err) {

-        LOGE("VideoEditorPreviewController: threadSyncSetOption error %d", (int) err);

-        M4OSA_threadSyncClose(mThreadContext);

-        mThreadContext = NULL;

-        return err;

-    }

-

-     // Start the thread

-     err = M4OSA_threadSyncStart(mThreadContext, (M4OSA_Void*)this);

-     if (M4NO_ERROR != err) {

-        LOGE("VideoEditorPreviewController: threadSyncStart error %d", (int) err);

-        M4OSA_threadSyncClose(mThreadContext);

-        mThreadContext = NULL;

-        return err;

-    }

-    bStopThreadInProgress = false;

-

-    LOGV("startPreview: process thread started");

-    return M4NO_ERROR;

-}

-

-M4OSA_UInt32 VideoEditorPreviewController::stopPreview() {

-    M4OSA_ERR err = M4NO_ERROR;

-    uint32_t lastRenderedFrameTimeMs = 0;

-    LOGV("stopPreview");

-

-    // Stop the thread

-    if(mThreadContext != NULL) {

-        bStopThreadInProgress = true;

-        {

-            Mutex::Autolock autoLock(mLockSem);

-            if (mSemThreadWait != NULL) {

-                err = M4OSA_semaphorePost(mSemThreadWait);

-            }

-        }

-

-        err = M4OSA_threadSyncStop(mThreadContext);

-        if(err != M4NO_ERROR) {

-            LOGV("stopPreview: error 0x%x in trying to stop thread", err);

-            // Continue even if error

-        }

-

-        err = M4OSA_threadSyncClose(mThreadContext);

-        if(err != M4NO_ERROR) {

-            LOGE("stopPreview: error 0x%x in trying to close thread", (unsigned int)err);

-            // Continue even if error

-        }

-

-        mThreadContext = NULL;

-    }

-

-    // Close the semaphore first

-    {

-        Mutex::Autolock autoLock(mLockSem);

-        if(mSemThreadWait != NULL) {

-            err = M4OSA_semaphoreClose(mSemThreadWait);

-            LOGV("stopPreview: close semaphore returns 0x%x", err);

-            mSemThreadWait = NULL;

-        }

-    }

-

-    for (int playerInst=0; playerInst<NBPLAYER_INSTANCES; playerInst++) {

-        if(mVePlayer[playerInst] != NULL) {

-            if(mVePlayer[playerInst]->isPlaying()) {

-                LOGV("stop the player first");

-                mVePlayer[playerInst]->stop();

-            }

-            if (playerInst == mActivePlayerIndex) {

-                // Return the last rendered frame time stamp

-                mVePlayer[mActivePlayerIndex]->getLastRenderedTimeMs(&lastRenderedFrameTimeMs);

-            }

+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 1
+#define LOG_TAG "VideoEditorPreviewController"
+#include "VideoEditorPreviewController.h"
+
+namespace android {
+
+#define PREVIEW_THREAD_STACK_SIZE                           (65536)
+
+VideoEditorPreviewController::VideoEditorPreviewController()
+    : mCurrentPlayer(0),
+      mThreadContext(NULL),
+      mPlayerState(VePlayerIdle),
+      mPrepareReqest(M4OSA_FALSE),
+      mClipList(NULL),
+      mNumberClipsInStoryBoard(0),
+      mNumberClipsToPreview(0),
+      mStartingClipIndex(0),
+      mPreviewLooping(M4OSA_FALSE),
+      mCallBackAfterFrameCnt(0),
+      mEffectsSettings(NULL),
+      mNumberEffects(0),
+      mCurrentClipNumber(-1),
+      mClipTotalDuration(0),
+      mCurrentVideoEffect(VIDEO_EFFECT_NONE),
+      mBackgroundAudioSetting(NULL),
+      mAudioMixPCMFileHandle(NULL),
+      mTarget(NULL),
+      mJniCookie(NULL),
+      mJniCallback(NULL),
+      mCurrentPlayedDuration(0),
+      mCurrentClipDuration(0),
+      mVideoStoryBoardTimeMsUptoFirstPreviewClip(0),
+      mOverlayState(OVERLAY_CLEAR),
+      mActivePlayerIndex(0),
+      mOutputVideoWidth(0),
+      mOutputVideoHeight(0),
+      bStopThreadInProgress(false),
+      mSemThreadWait(NULL) {
+    LOGV("VideoEditorPreviewController");
+    mRenderingMode = M4xVSS_kBlackBorders;
+    mIsFiftiesEffectStarted = false;
+
+    for (int i=0; i<NBPLAYER_INSTANCES; i++) {
+        mVePlayer[i] = NULL;
+    }
+}
+
+VideoEditorPreviewController::~VideoEditorPreviewController() {
+    M4OSA_UInt32 i = 0;
+    M4OSA_ERR err = M4NO_ERROR;
+    LOGV("~VideoEditorPreviewController");
+
+    // Stop the thread if its still running
+    if(mThreadContext != NULL) {
+        err = M4OSA_threadSyncStop(mThreadContext);
+        if(err != M4NO_ERROR) {
+            LOGV("~VideoEditorPreviewController: error 0x%x \
+            in trying to stop thread", err);
+            // Continue even if error
+        }
+
+        err = M4OSA_threadSyncClose(mThreadContext);
+        if(err != M4NO_ERROR) {
+            LOGE("~VideoEditorPreviewController: error 0x%x \
+            in trying to close thread", (unsigned int) err);
+            // Continue even if error
+        }
+
+        mThreadContext = NULL;
+    }
+
+    for (int playerInst=0; playerInst<NBPLAYER_INSTANCES;
+         playerInst++) {
+        if(mVePlayer[playerInst] != NULL) {
+            LOGV("clearing mVePlayer %d", playerInst);
+            mVePlayer[playerInst].clear();
+        }
+    }
+
+    if(mClipList != NULL) {
+        // Clean up
+        for(i=0;i<mNumberClipsInStoryBoard;i++)
+        {
+            if(mClipList[i]->pFile != NULL) {
+                free(mClipList[i]->pFile);
+                mClipList[i]->pFile = NULL;
+            }
+
+            free(mClipList[i]);
+        }
+        free(mClipList);
+        mClipList = NULL;
+    }
+
+    if(mEffectsSettings) {
+        for(i=0;i<mNumberEffects;i++) {
+            if(mEffectsSettings[i].xVSS.pFramingBuffer != NULL) {
+                free(mEffectsSettings[i].xVSS.pFramingBuffer->pac_data);
+
+                free(mEffectsSettings[i].xVSS.pFramingBuffer);
+
+                mEffectsSettings[i].xVSS.pFramingBuffer = NULL;
+            }
+        }
+        free(mEffectsSettings);
+        mEffectsSettings = NULL;
+    }
+
+    if (mAudioMixPCMFileHandle) {
+        err = M4OSA_fileReadClose (mAudioMixPCMFileHandle);
+        mAudioMixPCMFileHandle = M4OSA_NULL;
+    }
+
+    if (mBackgroundAudioSetting != NULL) {
+        free(mBackgroundAudioSetting);
+        mBackgroundAudioSetting = NULL;
+    }
+
+    if(mTarget != NULL) {
+        delete mTarget;
+        mTarget = NULL;
+    }
+
+    mOverlayState = OVERLAY_CLEAR;
+
+    LOGV("~VideoEditorPreviewController returns");
+}
+
+M4OSA_ERR VideoEditorPreviewController::loadEditSettings(
+    M4VSS3GPP_EditSettings* pSettings,M4xVSS_AudioMixingSettings* bgmSettings) {
+
+    M4OSA_UInt32 i = 0, iClipDuration = 0, rgbSize = 0;
+    M4VIFI_UInt8 *tmp = NULL;
+    M4OSA_ERR err = M4NO_ERROR;
+
+    LOGV("loadEditSettings");
+    LOGV("loadEditSettings Channels = %d, sampling Freq %d",
+          bgmSettings->uiNbChannels, bgmSettings->uiSamplingFrequency  );
+          bgmSettings->uiSamplingFrequency = 32000;
+
+    LOGV("loadEditSettings Channels = %d, sampling Freq %d",
+          bgmSettings->uiNbChannels, bgmSettings->uiSamplingFrequency  );
+    Mutex::Autolock autoLock(mLock);
+
+    // Clean up any previous Edit settings before loading new ones
+    mCurrentVideoEffect = VIDEO_EFFECT_NONE;
+
+    if(mAudioMixPCMFileHandle) {
+        err = M4OSA_fileReadClose (mAudioMixPCMFileHandle);
+        mAudioMixPCMFileHandle = M4OSA_NULL;
+    }
+
+    if(mBackgroundAudioSetting != NULL) {
+        free(mBackgroundAudioSetting);
+        mBackgroundAudioSetting = NULL;
+    }
+
+    if(mClipList != NULL) {
+        // Clean up
+        for(i=0;i<mNumberClipsInStoryBoard;i++)
+        {
+            if(mClipList[i]->pFile != NULL) {
+                free(mClipList[i]->pFile);
+                mClipList[i]->pFile = NULL;
+            }
+
+            free(mClipList[i]);
+        }
+        free(mClipList);
+        mClipList = NULL;
+    }
+
+    if(mEffectsSettings) {
+        for(i=0;i<mNumberEffects;i++) {
+            if(mEffectsSettings[i].xVSS.pFramingBuffer != NULL) {
+                free(mEffectsSettings[i].xVSS.pFramingBuffer->pac_data);
+
+                free(mEffectsSettings[i].xVSS.pFramingBuffer);
+
+                mEffectsSettings[i].xVSS.pFramingBuffer = NULL;
+            }
+        }
+        free(mEffectsSettings);
+        mEffectsSettings = NULL;
+    }
+
+    if(mClipList == NULL) {
+        mNumberClipsInStoryBoard = pSettings->uiClipNumber;
+        LOGV("loadEditSettings: # of Clips = %d", mNumberClipsInStoryBoard);
+
+        mClipList = (M4VSS3GPP_ClipSettings**)M4OSA_32bitAlignedMalloc(
+         sizeof(M4VSS3GPP_ClipSettings*)*pSettings->uiClipNumber, M4VS,
+         (M4OSA_Char*)"LvPP, copy of pClipList");
+
+        if(NULL == mClipList) {
+            LOGE("loadEditSettings: Malloc error");
+            return M4ERR_ALLOC;
+        }
+        memset((void *)mClipList,0,
+         sizeof(M4VSS3GPP_ClipSettings*)*pSettings->uiClipNumber);
+
+        for(i=0;i<pSettings->uiClipNumber;i++) {
+
+            // Allocate current clip
+            mClipList[i] =
+             (M4VSS3GPP_ClipSettings*)M4OSA_32bitAlignedMalloc(
+              sizeof(M4VSS3GPP_ClipSettings),M4VS,(M4OSA_Char*)"clip settings");
+
+            if(mClipList[i] == NULL) {
+
+                LOGE("loadEditSettings: Allocation error for mClipList[%d]", (int)i);
+                return M4ERR_ALLOC;
+            }
+            // Copy plain structure
+            memcpy((void *)mClipList[i],
+             (void *)pSettings->pClipList[i],
+             sizeof(M4VSS3GPP_ClipSettings));
+
+            if(NULL != pSettings->pClipList[i]->pFile) {
+                mClipList[i]->pFile = (M4OSA_Char*)M4OSA_32bitAlignedMalloc(
+                pSettings->pClipList[i]->filePathSize, M4VS,
+                (M4OSA_Char*)"pClipSettingsDest->pFile");
+
+                if(NULL == mClipList[i]->pFile)
+                {
+                    LOGE("loadEditSettings : ERROR allocating filename");
+                    return M4ERR_ALLOC;
+                }
+
+                memcpy((void *)mClipList[i]->pFile,
+                 (void *)pSettings->pClipList[i]->pFile,
+                 pSettings->pClipList[i]->filePathSize);
+            }
+            else {
+                LOGE("NULL file path");
+                return M4ERR_PARAMETER;
+            }
+
+            // Calculate total duration of all clips
+            iClipDuration = pSettings->pClipList[i]->uiEndCutTime -
+             pSettings->pClipList[i]->uiBeginCutTime;
+
+            mClipTotalDuration = mClipTotalDuration+iClipDuration;
+        }
+    }
+
+    if(mEffectsSettings == NULL) {
+        mNumberEffects = pSettings->nbEffects;
+        LOGV("loadEditSettings: mNumberEffects = %d", mNumberEffects);
+
+        if(mNumberEffects != 0) {
+            mEffectsSettings = (M4VSS3GPP_EffectSettings*)M4OSA_32bitAlignedMalloc(
+             mNumberEffects*sizeof(M4VSS3GPP_EffectSettings),
+             M4VS, (M4OSA_Char*)"effects settings");
+
+            if(mEffectsSettings == NULL) {
+                LOGE("loadEffectsSettings: Allocation error");
+                return M4ERR_ALLOC;
+            }
+
+            memset((void *)mEffectsSettings,0,
+             mNumberEffects*sizeof(M4VSS3GPP_EffectSettings));
+
+            for(i=0;i<mNumberEffects;i++) {
+
+                mEffectsSettings[i].xVSS.pFramingFilePath = NULL;
+                mEffectsSettings[i].xVSS.pFramingBuffer = NULL;
+                mEffectsSettings[i].xVSS.pTextBuffer = NULL;
+
+                memcpy((void *)&(mEffectsSettings[i]),
+                 (void *)&(pSettings->Effects[i]),
+                 sizeof(M4VSS3GPP_EffectSettings));
+
+                if(pSettings->Effects[i].VideoEffectType ==
+                 (M4VSS3GPP_VideoEffectType)M4xVSS_kVideoEffectType_Framing) {
+                    // Allocate the pFraming RGB buffer
+                    mEffectsSettings[i].xVSS.pFramingBuffer =
+                    (M4VIFI_ImagePlane *)M4OSA_32bitAlignedMalloc(sizeof(M4VIFI_ImagePlane),
+                     M4VS, (M4OSA_Char*)"lvpp framing buffer");
+
+                    if(mEffectsSettings[i].xVSS.pFramingBuffer == NULL) {
+                        LOGE("loadEffectsSettings:Alloc error for pFramingBuf");
+                        free(mEffectsSettings);
+                        mEffectsSettings = NULL;
+                        return M4ERR_ALLOC;
+                    }
+
+                    // Allocate the pac_data (RGB)
+                    if(pSettings->Effects[i].xVSS.rgbType == M4VSS3GPP_kRGB565){
+                        rgbSize =
+                         pSettings->Effects[i].xVSS.pFramingBuffer->u_width *
+                         pSettings->Effects[i].xVSS.pFramingBuffer->u_height*2;
+                    }
+                    else if(
+                     pSettings->Effects[i].xVSS.rgbType == M4VSS3GPP_kRGB888) {
+                        rgbSize =
+                         pSettings->Effects[i].xVSS.pFramingBuffer->u_width *
+                         pSettings->Effects[i].xVSS.pFramingBuffer->u_height*3;
+                    }
+                    else {
+                        LOGE("loadEffectsSettings: wrong RGB type");
+                        free(mEffectsSettings);
+                        mEffectsSettings = NULL;
+                        return M4ERR_PARAMETER;
+                    }
+
+                    tmp = (M4VIFI_UInt8 *)M4OSA_32bitAlignedMalloc(rgbSize, M4VS,
+                     (M4OSA_Char*)"framing buffer pac_data");
+
+                    if(tmp == NULL) {
+                        LOGE("loadEffectsSettings:Alloc error pFramingBuf pac");
+                        free(mEffectsSettings);
+                        mEffectsSettings = NULL;
+                        free(mEffectsSettings[i].xVSS.pFramingBuffer);
+
+                        mEffectsSettings[i].xVSS.pFramingBuffer = NULL;
+                        return M4ERR_ALLOC;
+                    }
+                    /* Initialize the pFramingBuffer*/
+                    mEffectsSettings[i].xVSS.pFramingBuffer->pac_data = tmp;
+                    mEffectsSettings[i].xVSS.pFramingBuffer->u_height =
+                     pSettings->Effects[i].xVSS.pFramingBuffer->u_height;
+
+                    mEffectsSettings[i].xVSS.pFramingBuffer->u_width =
+                     pSettings->Effects[i].xVSS.pFramingBuffer->u_width;
+
+                    mEffectsSettings[i].xVSS.pFramingBuffer->u_stride =
+                     pSettings->Effects[i].xVSS.pFramingBuffer->u_stride;
+
+                    mEffectsSettings[i].xVSS.pFramingBuffer->u_topleft =
+                     pSettings->Effects[i].xVSS.pFramingBuffer->u_topleft;
+
+                    mEffectsSettings[i].xVSS.uialphaBlendingStart =
+                     pSettings->Effects[i].xVSS.uialphaBlendingStart;
+
+                    mEffectsSettings[i].xVSS.uialphaBlendingMiddle =
+                     pSettings->Effects[i].xVSS.uialphaBlendingMiddle;
+
+                    mEffectsSettings[i].xVSS.uialphaBlendingEnd =
+                     pSettings->Effects[i].xVSS.uialphaBlendingEnd;
+
+                    mEffectsSettings[i].xVSS.uialphaBlendingFadeInTime =
+                     pSettings->Effects[i].xVSS.uialphaBlendingFadeInTime;
+                    mEffectsSettings[i].xVSS.uialphaBlendingFadeOutTime =
+                     pSettings->Effects[i].xVSS.uialphaBlendingFadeOutTime;
+
+                    // Copy the pFraming data
+                    memcpy((void *)
+                    mEffectsSettings[i].xVSS.pFramingBuffer->pac_data,
+                    (void *)pSettings->Effects[i].xVSS.pFramingBuffer->pac_data,
+                    rgbSize);
+
+                    mEffectsSettings[i].xVSS.rgbType =
+                     pSettings->Effects[i].xVSS.rgbType;
+                }
+            }
+        }
+    }
+
+    if (mBackgroundAudioSetting == NULL) {
+
+        mBackgroundAudioSetting = (M4xVSS_AudioMixingSettings*)M4OSA_32bitAlignedMalloc(
+        sizeof(M4xVSS_AudioMixingSettings), M4VS,
+        (M4OSA_Char*)"LvPP, copy of bgmSettings");
+
+        if(NULL == mBackgroundAudioSetting) {
+            LOGE("loadEditSettings: mBackgroundAudioSetting Malloc failed");
+            return M4ERR_ALLOC;
+        }
+
+        memset((void *)mBackgroundAudioSetting, 0,sizeof(M4xVSS_AudioMixingSettings*));
+        memcpy((void *)mBackgroundAudioSetting, (void *)bgmSettings, sizeof(M4xVSS_AudioMixingSettings));
+
+        if ( mBackgroundAudioSetting->pFile != M4OSA_NULL ) {
+
+            mBackgroundAudioSetting->pFile = (M4OSA_Void*) bgmSettings->pPCMFilePath;
+            mBackgroundAudioSetting->uiNbChannels = 2;
+            mBackgroundAudioSetting->uiSamplingFrequency = 32000;
+        }
+
+        // Open the BG file
+        if ( mBackgroundAudioSetting->pFile != M4OSA_NULL ) {
+            err = M4OSA_fileReadOpen(&mAudioMixPCMFileHandle,
+             mBackgroundAudioSetting->pFile, M4OSA_kFileRead);
+
+            if (err != M4NO_ERROR) {
+                LOGE("loadEditSettings: mBackgroundAudio PCM File open failed");
+                return M4ERR_PARAMETER;
+            }
+        }
+    }
+
+    mOutputVideoSize = pSettings->xVSS.outputVideoSize;
+    mFrameStr.pBuffer = M4OSA_NULL;
+    return M4NO_ERROR;
+}
+
+M4OSA_ERR VideoEditorPreviewController::setSurface(const sp<Surface> &surface) {
+    LOGV("setSurface");
+    Mutex::Autolock autoLock(mLock);
+
+    mSurface = surface;
+    return M4NO_ERROR;
+}
+
+M4OSA_ERR VideoEditorPreviewController::startPreview(
+    M4OSA_UInt32 fromMS, M4OSA_Int32 toMs, M4OSA_UInt16 callBackAfterFrameCount,
+    M4OSA_Bool loop) {
+
+    M4OSA_ERR err = M4NO_ERROR;
+    M4OSA_UInt32 i = 0, iIncrementedDuration = 0;
+    LOGV("startPreview");
+
+    if(fromMS > (M4OSA_UInt32)toMs) {
+        LOGE("startPreview: fromMS > toMs");
+        return M4ERR_PARAMETER;
+    }
+
+    if(toMs == 0) {
+        LOGE("startPreview: toMs is 0");
+        return M4ERR_PARAMETER;
+    }
+
+    // If already started, then stop preview first
+    for(int playerInst=0; playerInst<NBPLAYER_INSTANCES; playerInst++) {
+        if(mVePlayer[playerInst] != NULL) {
+            LOGV("startPreview: stopping previously started preview playback");
+            stopPreview();
+            break;
+        }
+    }
+
+    // If renderPreview was called previously, then delete Renderer object first
+    if(mTarget != NULL) {
+        LOGV("startPreview: delete previous PreviewRenderer");
+        delete mTarget;
+        mTarget = NULL;
+    }
+
+    // Create Audio player to be used for entire
+    // storyboard duration
+    mVEAudioSink = new VideoEditorPlayer::VeAudioOutput();
+    mVEAudioPlayer = new VideoEditorAudioPlayer(mVEAudioSink);
+    mVEAudioPlayer->setAudioMixSettings(mBackgroundAudioSetting);
+    mVEAudioPlayer->setAudioMixPCMFileHandle(mAudioMixPCMFileHandle);
+
+    LOGV("startPreview: loop = %d", loop);
+    mPreviewLooping = loop;
+
+    LOGV("startPreview: callBackAfterFrameCount = %d", callBackAfterFrameCount);
+    mCallBackAfterFrameCnt = callBackAfterFrameCount;
+
+    for (int playerInst=0; playerInst<NBPLAYER_INSTANCES; playerInst++) {
+        mVePlayer[playerInst] = new VideoEditorPlayer();
+        if(mVePlayer[playerInst] == NULL) {
+            LOGE("startPreview:Error creating VideoEditorPlayer %d",playerInst);
+            return M4ERR_ALLOC;
+        }
+        LOGV("startPreview: object created");
+
+        mVePlayer[playerInst]->setNotifyCallback(this,(notify_callback_f)notify);
+        LOGV("startPreview: notify callback set");
+
+        mVePlayer[playerInst]->loadEffectsSettings(mEffectsSettings,
+         mNumberEffects);
+        LOGV("startPreview: effects settings loaded");
+
+        mVePlayer[playerInst]->loadAudioMixSettings(mBackgroundAudioSetting);
+        LOGV("startPreview: AudioMixSettings settings loaded");
+
+        mVePlayer[playerInst]->setAudioMixPCMFileHandle(mAudioMixPCMFileHandle);
+        LOGV("startPreview: AudioMixPCMFileHandle set");
+
+        mVePlayer[playerInst]->setProgressCallbackInterval(
+         mCallBackAfterFrameCnt);
+        LOGV("startPreview: setProgressCallBackInterval");
+    }
+
+    mPlayerState = VePlayerIdle;
+    mPrepareReqest = M4OSA_FALSE;
+
+    if(fromMS == 0) {
+        mCurrentClipNumber = -1;
+        // Save original value
+        mFirstPreviewClipBeginTime = mClipList[0]->uiBeginCutTime;
+        mVideoStoryBoardTimeMsUptoFirstPreviewClip = 0;
+    }
+    else {
+        LOGV("startPreview: fromMS=%d", fromMS);
+        if(fromMS >= mClipTotalDuration) {
+            LOGE("startPreview: fromMS >= mClipTotalDuration");
+            return M4ERR_PARAMETER;
+        }
+        for(i=0;i<mNumberClipsInStoryBoard;i++) {
+            if(fromMS < (iIncrementedDuration + (mClipList[i]->uiEndCutTime -
+             mClipList[i]->uiBeginCutTime))) {
+                // Set to 1 index below,
+                // as threadProcess first increments the clip index
+                // and then processes clip in thread loop
+                mCurrentClipNumber = i-1;
+                LOGV("startPreview:mCurrentClipNumber = %d fromMS=%d",i,fromMS);
+
+                // Save original value
+                mFirstPreviewClipBeginTime = mClipList[i]->uiBeginCutTime;
+
+                // Set correct begin time to start playback
+                if((fromMS+mClipList[i]->uiBeginCutTime) >
+                (iIncrementedDuration+mClipList[i]->uiBeginCutTime)) {
+
+                    mClipList[i]->uiBeginCutTime =
+                     mClipList[i]->uiBeginCutTime +
+                     (fromMS - iIncrementedDuration);
+                }
+                break;
+            }
+            else {
+                iIncrementedDuration = iIncrementedDuration +
+                 (mClipList[i]->uiEndCutTime - mClipList[i]->uiBeginCutTime);
+            }
+        }
+        mVideoStoryBoardTimeMsUptoFirstPreviewClip = iIncrementedDuration;
+    }
+
+    for (int playerInst=0; playerInst<NBPLAYER_INSTANCES; playerInst++) {
+        mVePlayer[playerInst]->setAudioMixStoryBoardParam(fromMS,
+         mFirstPreviewClipBeginTime,
+         mClipList[i]->ClipProperties.uiClipAudioVolumePercentage);
+
+        LOGV("startPreview:setAudioMixStoryBoardSkimTimeStamp set %d cuttime \
+         %d", fromMS, mFirstPreviewClipBeginTime);
+    }
+
+    mStartingClipIndex = mCurrentClipNumber+1;
+
+    // Start playing with player instance 0
+    mCurrentPlayer = 0;
+    mActivePlayerIndex = 0;
+
+    if(toMs == -1) {
+        LOGV("startPreview: Preview till end of storyboard");
+        mNumberClipsToPreview = mNumberClipsInStoryBoard;
+        // Save original value
+        mLastPreviewClipEndTime =
+         mClipList[mNumberClipsToPreview-1]->uiEndCutTime;
+    }
+    else {
+        LOGV("startPreview: toMs=%d", toMs);
+        if((M4OSA_UInt32)toMs > mClipTotalDuration) {
+            LOGE("startPreview: toMs > mClipTotalDuration");
+            return M4ERR_PARAMETER;
+        }
+
+        iIncrementedDuration = 0;
+
+        for(i=0;i<mNumberClipsInStoryBoard;i++) {
+            if((M4OSA_UInt32)toMs <= (iIncrementedDuration +
+             (mClipList[i]->uiEndCutTime - mClipList[i]->uiBeginCutTime))) {
+                // Save original value
+                mLastPreviewClipEndTime = mClipList[i]->uiEndCutTime;
+                // Set the end cut time of clip index i to toMs
+                mClipList[i]->uiEndCutTime = toMs;
+
+                // Number of clips to be previewed is from index 0 to i
+                // increment by 1 as i starts from 0
+                mNumberClipsToPreview = i+1;
+                break;
+            }
+            else {
+                iIncrementedDuration = iIncrementedDuration +
+                 (mClipList[i]->uiEndCutTime - mClipList[i]->uiBeginCutTime);
+            }
+        }
+    }
+
+    // Open the thread semaphore
+    M4OSA_semaphoreOpen(&mSemThreadWait, 1);
+
+    // Open the preview process thread
+    err = M4OSA_threadSyncOpen(&mThreadContext, (M4OSA_ThreadDoIt)threadProc);
+    if (M4NO_ERROR != err) {
+        LOGE("VideoEditorPreviewController:M4OSA_threadSyncOpen error %d", (int) err);
+        return err;
+    }
+
+    // Set the stacksize
+    err = M4OSA_threadSyncSetOption(mThreadContext, M4OSA_ThreadStackSize,
+     (M4OSA_DataOption)PREVIEW_THREAD_STACK_SIZE);
+
+    if (M4NO_ERROR != err) {
+        LOGE("VideoEditorPreviewController: threadSyncSetOption error %d", (int) err);
+        M4OSA_threadSyncClose(mThreadContext);
+        mThreadContext = NULL;
+        return err;
+    }
+
+     // Start the thread
+     err = M4OSA_threadSyncStart(mThreadContext, (M4OSA_Void*)this);
+     if (M4NO_ERROR != err) {
+        LOGE("VideoEditorPreviewController: threadSyncStart error %d", (int) err);
+        M4OSA_threadSyncClose(mThreadContext);
+        mThreadContext = NULL;
+        return err;
+    }
+    bStopThreadInProgress = false;
+
+    LOGV("startPreview: process thread started");
+    return M4NO_ERROR;
+}
+
+M4OSA_UInt32 VideoEditorPreviewController::stopPreview() {
+    M4OSA_ERR err = M4NO_ERROR;
+    uint32_t lastRenderedFrameTimeMs = 0;
+    LOGV("stopPreview");
+
+    // Stop the thread
+    if(mThreadContext != NULL) {
+        bStopThreadInProgress = true;
+        {
+            Mutex::Autolock autoLock(mLockSem);
+            if (mSemThreadWait != NULL) {
+                err = M4OSA_semaphorePost(mSemThreadWait);
+            }
+        }
+
+        err = M4OSA_threadSyncStop(mThreadContext);
+        if(err != M4NO_ERROR) {
+            LOGV("stopPreview: error 0x%x in trying to stop thread", err);
+            // Continue even if error
+        }
+
+        err = M4OSA_threadSyncClose(mThreadContext);
+        if(err != M4NO_ERROR) {
+            LOGE("stopPreview: error 0x%x in trying to close thread", (unsigned int)err);
+            // Continue even if error
+        }
+
+        mThreadContext = NULL;
+    }
+
+    // Close the semaphore first
+    {
+        Mutex::Autolock autoLock(mLockSem);
+        if(mSemThreadWait != NULL) {
+            err = M4OSA_semaphoreClose(mSemThreadWait);
+            LOGV("stopPreview: close semaphore returns 0x%x", err);
+            mSemThreadWait = NULL;
+        }
+    }
+
+    for (int playerInst=0; playerInst<NBPLAYER_INSTANCES; playerInst++) {
+        if(mVePlayer[playerInst] != NULL) {
+            if(mVePlayer[playerInst]->isPlaying()) {
+                LOGV("stop the player first");
+                mVePlayer[playerInst]->stop();
+            }
+            if (playerInst == mActivePlayerIndex) {
+                // Return the last rendered frame time stamp
+                mVePlayer[mActivePlayerIndex]->getLastRenderedTimeMs(&lastRenderedFrameTimeMs);
+            }
 
             //This is used to syncronize onStreamDone() in PreviewPlayer and
-            //stopPreview() in PreviewController

+            //stopPreview() in PreviewController
             sp<VideoEditorPlayer> temp = mVePlayer[playerInst];
             temp->acquireLock();
             LOGV("stopPreview: clearing mVePlayer");
             mVePlayer[playerInst].clear();
-            mVePlayer[playerInst] = NULL;

+            mVePlayer[playerInst] = NULL;
             temp->releaseLock();
-        }

-    }

-    LOGV("stopPreview: clear audioSink and audioPlayer");

-    mVEAudioSink.clear();

-    if (mVEAudioPlayer) {

-        delete mVEAudioPlayer;

-        mVEAudioPlayer = NULL;

-    }

-

-    // If image file playing, then free the buffer pointer

-    if(mFrameStr.pBuffer != M4OSA_NULL) {

-        free(mFrameStr.pBuffer);

-        mFrameStr.pBuffer = M4OSA_NULL;

-    }

-

-    // Reset original begin cuttime of first previewed clip*/

-    mClipList[mStartingClipIndex]->uiBeginCutTime = mFirstPreviewClipBeginTime;

-    // Reset original end cuttime of last previewed clip*/

-    mClipList[mNumberClipsToPreview-1]->uiEndCutTime = mLastPreviewClipEndTime;

-

-    mPlayerState = VePlayerIdle;

-    mPrepareReqest = M4OSA_FALSE;

-

-    mCurrentPlayedDuration = 0;

-    mCurrentClipDuration = 0;

-    mRenderingMode = M4xVSS_kBlackBorders;

-    mOutputVideoWidth = 0;

-    mOutputVideoHeight = 0;

-

-    LOGV("stopPreview() lastRenderedFrameTimeMs %ld", lastRenderedFrameTimeMs);

-    return lastRenderedFrameTimeMs;

-}

-

-M4OSA_ERR VideoEditorPreviewController::clearSurface(

-    const sp<Surface> &surface, VideoEditor_renderPreviewFrameStr* pFrameInfo) {

-

-    M4OSA_ERR err = M4NO_ERROR;

-    VideoEditor_renderPreviewFrameStr* pFrameStr = pFrameInfo;

-    M4OSA_UInt32 outputBufferWidth =0, outputBufferHeight=0;

-    M4VIFI_ImagePlane planeOut[3];

-    LOGV("Inside preview clear frame");

-

-    Mutex::Autolock autoLock(mLock);

-

-    // Delete previous renderer instance

-    if(mTarget != NULL) {

-        delete mTarget;

-        mTarget = NULL;

-    }

-

-    outputBufferWidth = pFrameStr->uiFrameWidth;

-    outputBufferHeight = pFrameStr->uiFrameHeight;

-

-    // Initialize the renderer

-    if(mTarget == NULL) {

-

-        mTarget = PreviewRenderer::CreatePreviewRenderer(

-            OMX_COLOR_FormatYUV420Planar, surface, outputBufferWidth, outputBufferHeight,

-            outputBufferWidth, outputBufferHeight, 0);

-

-        if(mTarget == NULL) {

-            LOGE("renderPreviewFrame: cannot create PreviewRenderer");

-            return M4ERR_ALLOC;

-        }

-    }

-

-    // Out plane

-    uint8_t* outBuffer;

-    size_t outBufferStride = 0;

-

-    LOGV("doMediaRendering CALL getBuffer()");

-    mTarget->getBufferYV12(&outBuffer, &outBufferStride);

-

-    // Set the output YUV420 plane to be compatible with YV12 format

-    //In YV12 format, sizes must be even

-    M4OSA_UInt32 yv12PlaneWidth = ((outputBufferWidth +1)>>1)<<1;

-    M4OSA_UInt32 yv12PlaneHeight = ((outputBufferHeight+1)>>1)<<1;

-

-    prepareYV12ImagePlane(planeOut, yv12PlaneWidth, yv12PlaneHeight,

-     (M4OSA_UInt32)outBufferStride, (M4VIFI_UInt8 *)outBuffer);

-

-    /* Fill the surface with black frame */

-    memset((void *)planeOut[0].pac_data,0x00,planeOut[0].u_width *

-                            planeOut[0].u_height * 1.5);

-    memset((void *)planeOut[1].pac_data,128,planeOut[1].u_width *

-                            planeOut[1].u_height);

-    memset((void *)planeOut[2].pac_data,128,planeOut[2].u_width *

-                             planeOut[2].u_height);

-

-    mTarget->renderYV12();

-    return err;

-}

-

-M4OSA_ERR VideoEditorPreviewController::renderPreviewFrame(

-            const sp<Surface> &surface,

-            VideoEditor_renderPreviewFrameStr* pFrameInfo,

-            VideoEditorCurretEditInfo *pCurrEditInfo) {

-

-    M4OSA_ERR err = M4NO_ERROR;

-    M4OSA_UInt32 i = 0, iIncrementedDuration = 0, tnTimeMs=0, framesize =0;

-    VideoEditor_renderPreviewFrameStr* pFrameStr = pFrameInfo;

-    M4VIFI_UInt8 *pixelArray = NULL;

-    Mutex::Autolock autoLock(mLock);

-

-    if (pCurrEditInfo != NULL) {

-        pCurrEditInfo->overlaySettingsIndex = -1;

-    }

-    // Delete previous renderer instance

-    if(mTarget != NULL) {

-        delete mTarget;

-        mTarget = NULL;

-    }

-

-    if(mOutputVideoWidth == 0) {

-        mOutputVideoWidth = pFrameStr->uiFrameWidth;

-    }

-

-    if(mOutputVideoHeight == 0) {

-        mOutputVideoHeight = pFrameStr->uiFrameHeight;

-    }

-

-    // Initialize the renderer

-    if(mTarget == NULL) {

-        /*mTarget = new PreviewRenderer(

-            OMX_COLOR_FormatYUV420Planar, surface, mOutputVideoWidth, mOutputVideoHeight,

-            mOutputVideoWidth, mOutputVideoHeight, 0);*/

-

-         mTarget = PreviewRenderer::CreatePreviewRenderer(

-            OMX_COLOR_FormatYUV420Planar, surface, mOutputVideoWidth, mOutputVideoHeight,

-            mOutputVideoWidth, mOutputVideoHeight, 0);

-

-        if(mTarget == NULL) {

-            LOGE("renderPreviewFrame: cannot create PreviewRenderer");

-            return M4ERR_ALLOC;

-        }

-    }

-

-    pixelArray = NULL;

-

-    // Postprocessing (apply video effect)

-    if(pFrameStr->bApplyEffect == M4OSA_TRUE) {

-

-        for(i=0;i<mNumberEffects;i++) {

-            // First check if effect starttime matches the clip being previewed

-            if((mEffectsSettings[i].uiStartTime < pFrameStr->clipBeginCutTime)

-             ||(mEffectsSettings[i].uiStartTime >= pFrameStr->clipEndCutTime)) {

-                // This effect doesn't belong to this clip, check next one

-                continue;

-            }

-            if((mEffectsSettings[i].uiStartTime <= pFrameStr->timeMs) &&

-            ((mEffectsSettings[i].uiStartTime+mEffectsSettings[i].uiDuration) >=

-             pFrameStr->timeMs) && (mEffectsSettings[i].uiDuration != 0)) {

-                setVideoEffectType(mEffectsSettings[i].VideoEffectType, TRUE);

-            }

-            else {

-                setVideoEffectType(mEffectsSettings[i].VideoEffectType, FALSE);

-            }

-        }

-

-        //Provide the overlay Update indication when there is an overlay effect

-        if (mCurrentVideoEffect & VIDEO_EFFECT_FRAMING) {

-            M4OSA_UInt32 index;

-            mCurrentVideoEffect &= ~VIDEO_EFFECT_FRAMING; //never apply framing here.

-

-            // Find the effect in effectSettings array

-            for (index = 0; index < mNumberEffects; index++) {

-                if(mEffectsSettings[index].VideoEffectType ==

-                    (M4VSS3GPP_VideoEffectType)M4xVSS_kVideoEffectType_Framing) {

-

-                    if((mEffectsSettings[index].uiStartTime <= pFrameInfo->timeMs) &&

-                        ((mEffectsSettings[index].uiStartTime+

-                        mEffectsSettings[index].uiDuration) >= pFrameInfo->timeMs))

-                    {

-                        break;

-                    }

-                }

-            }

-            if ((index < mNumberEffects) && (pCurrEditInfo != NULL)) {

-                pCurrEditInfo->overlaySettingsIndex = index;

-                LOGV("Framing index = %d", index);

-            } else {

-                LOGV("No framing effects found");

-            }

-        }

-

-        if(mCurrentVideoEffect != VIDEO_EFFECT_NONE) {

-            err = applyVideoEffect((M4OSA_Void *)pFrameStr->pBuffer,

-             OMX_COLOR_FormatYUV420Planar, pFrameStr->uiFrameWidth,

-             pFrameStr->uiFrameHeight, pFrameStr->timeMs,

-             (M4OSA_Void *)pixelArray);

-

-            if(err != M4NO_ERROR) {

-                LOGE("renderPreviewFrame: applyVideoEffect error 0x%x", (unsigned int)err);

-                delete mTarget;

-                mTarget = NULL;

-                free(pixelArray);

-                pixelArray = NULL;

-                return err;

-           }

-           mCurrentVideoEffect = VIDEO_EFFECT_NONE;

-        }

-        else {

-            // Apply the rendering mode

-            err = doImageRenderingMode((M4OSA_Void *)pFrameStr->pBuffer,

-             OMX_COLOR_FormatYUV420Planar, pFrameStr->uiFrameWidth,

-             pFrameStr->uiFrameHeight, (M4OSA_Void *)pixelArray);

-

-            if(err != M4NO_ERROR) {

-                LOGE("renderPreviewFrame:doImageRenderingMode error 0x%x", (unsigned int)err);

-                delete mTarget;

-                mTarget = NULL;

-                free(pixelArray);

-                pixelArray = NULL;

-                return err;

-            }

-        }

-    }

-    else {

-        // Apply the rendering mode

-        err = doImageRenderingMode((M4OSA_Void *)pFrameStr->pBuffer,

-         OMX_COLOR_FormatYUV420Planar, pFrameStr->uiFrameWidth,

-         pFrameStr->uiFrameHeight, (M4OSA_Void *)pixelArray);

-

-        if(err != M4NO_ERROR) {

-            LOGE("renderPreviewFrame: doImageRenderingMode error 0x%x", (unsigned int)err);

-            delete mTarget;

-            mTarget = NULL;

-            free(pixelArray);

-            pixelArray = NULL;

-            return err;

-        }

-    }

-

-    mTarget->renderYV12();

-    return err;

-}

-

-M4OSA_Void VideoEditorPreviewController::setJniCallback(void* cookie,

-    jni_progress_callback_fct callbackFct) {

-    //LOGV("setJniCallback");

-    mJniCookie = cookie;

-    mJniCallback = callbackFct;

-}

-

-M4OSA_ERR VideoEditorPreviewController::preparePlayer(

-    void* param, int playerInstance, int index) {

-

-    M4OSA_ERR err = M4NO_ERROR;

-    VideoEditorPreviewController *pController =

-     (VideoEditorPreviewController *)param;

-

-    LOGV("preparePlayer: instance %d file %d", playerInstance, index);

-

-    pController->mVePlayer[playerInstance]->setDataSource(

-    (const char *)pController->mClipList[index]->pFile, NULL);

-    LOGV("preparePlayer: setDataSource instance %s",

-     (const char *)pController->mClipList[index]->pFile);

-

-    pController->mVePlayer[playerInstance]->setVideoSurface(

-     pController->mSurface);

-    LOGV("preparePlayer: setVideoSurface");

-

-    pController->mVePlayer[playerInstance]->setMediaRenderingMode(

-     pController->mClipList[index]->xVSS.MediaRendering,

-     pController->mOutputVideoSize);

-    LOGV("preparePlayer: setMediaRenderingMode");

-

-    if((M4OSA_UInt32)index == pController->mStartingClipIndex) {

-        pController->mVePlayer[playerInstance]->setPlaybackBeginTime(

-        pController->mFirstPreviewClipBeginTime);

-    }

-    else {

-        pController->mVePlayer[playerInstance]->setPlaybackBeginTime(

-        pController->mClipList[index]->uiBeginCutTime);

-    }

-    LOGV("preparePlayer: setPlaybackBeginTime(%d)",

-     pController->mClipList[index]->uiBeginCutTime);

-

-    pController->mVePlayer[playerInstance]->setPlaybackEndTime(

-     pController->mClipList[index]->uiEndCutTime);

-    LOGV("preparePlayer: setPlaybackEndTime(%d)",

-     pController->mClipList[index]->uiEndCutTime);

-

-    if(pController->mClipList[index]->FileType == M4VIDEOEDITING_kFileType_ARGB8888) {

-        pController->mVePlayer[playerInstance]->setImageClipProperties(

-                 pController->mClipList[index]->ClipProperties.uiVideoWidth,

-                 pController->mClipList[index]->ClipProperties.uiVideoHeight);

-        LOGV("preparePlayer: setImageClipProperties");

-    }

-

-    pController->mVePlayer[playerInstance]->prepare();

-    LOGV("preparePlayer: prepared");

-

-    if(pController->mClipList[index]->uiBeginCutTime > 0) {

-        pController->mVePlayer[playerInstance]->seekTo(

-         pController->mClipList[index]->uiBeginCutTime);

-

-        LOGV("preparePlayer: seekTo(%d)",

-         pController->mClipList[index]->uiBeginCutTime);

-    }

-    pController->mVePlayer[pController->mCurrentPlayer]->setAudioPlayer(pController->mVEAudioPlayer);

-

-    pController->mVePlayer[playerInstance]->readFirstVideoFrame();

-    LOGV("preparePlayer: readFirstVideoFrame of clip");

-

-    return err;

-}

-

-M4OSA_ERR VideoEditorPreviewController::threadProc(M4OSA_Void* param) {

-    M4OSA_ERR err = M4NO_ERROR;

-    M4OSA_Int32 index = 0;

-    VideoEditorPreviewController *pController =

-     (VideoEditorPreviewController *)param;

-

-    LOGV("inside threadProc");

-    if(pController->mPlayerState == VePlayerIdle) {

-        (pController->mCurrentClipNumber)++;

-

-        LOGV("threadProc: playing file index %d total clips %d",

-         pController->mCurrentClipNumber, pController->mNumberClipsToPreview);

-

-        if((M4OSA_UInt32)pController->mCurrentClipNumber >=

-         pController->mNumberClipsToPreview) {

-

-            LOGV("All clips previewed");

-

-            pController->mCurrentPlayedDuration = 0;

-            pController->mCurrentClipDuration = 0;

-            pController->mCurrentPlayer = 0;

-

-            if(pController->mPreviewLooping == M4OSA_TRUE) {

-                pController->mCurrentClipNumber =

-                 pController->mStartingClipIndex;

-

-                LOGV("Preview looping TRUE, restarting from clip index %d",

-                 pController->mCurrentClipNumber);

-

-                // Reset the story board timestamp inside the player

-                for (int playerInst=0; playerInst<NBPLAYER_INSTANCES;

-                 playerInst++) {

-                    pController->mVePlayer[playerInst]->resetJniCallbackTimeStamp();

-                }

-            }

-            else {

-                M4OSA_UInt32 endArgs = 0;

-                if(pController->mJniCallback != NULL) {

-                    pController->mJniCallback(

-                     pController->mJniCookie, MSG_TYPE_PREVIEW_END, &endArgs);

-                }

-                pController->mPlayerState = VePlayerAutoStop;

-

-                // Reset original begin cuttime of first previewed clip

-                pController->mClipList[pController->mStartingClipIndex]->uiBeginCutTime =

-                 pController->mFirstPreviewClipBeginTime;

-                // Reset original end cuttime of last previewed clip

-                pController->mClipList[pController->mNumberClipsToPreview-1]->uiEndCutTime =

-                 pController->mLastPreviewClipEndTime;

-

-                // Return a warning to M4OSA thread handler

-                // so that thread is moved from executing state to open state

-                return M4WAR_NO_MORE_STREAM;

-            }

-        }

-

-        index=pController->mCurrentClipNumber;

-        if((M4OSA_UInt32)pController->mCurrentClipNumber == pController->mStartingClipIndex) {

-            pController->mCurrentPlayedDuration +=

-             pController->mVideoStoryBoardTimeMsUptoFirstPreviewClip;

-

-            pController->mCurrentClipDuration =

-             pController->mClipList[pController->mCurrentClipNumber]->uiEndCutTime

-              - pController->mFirstPreviewClipBeginTime;

-

-            preparePlayer((void*)pController, pController->mCurrentPlayer, index);

-        }

-        else {

-            pController->mCurrentPlayedDuration +=

-             pController->mCurrentClipDuration;

-

-            pController->mCurrentClipDuration =

-             pController->mClipList[pController->mCurrentClipNumber]->uiEndCutTime -

-             pController->mClipList[pController->mCurrentClipNumber]->uiBeginCutTime;

-        }

-

-        pController->mVePlayer[pController->mCurrentPlayer]->setStoryboardStartTime(

-         pController->mCurrentPlayedDuration);

-        LOGV("threadProc: setStoryboardStartTime");

-

-        // Set the next clip duration for Audio mix here

-        if((M4OSA_UInt32)pController->mCurrentClipNumber != pController->mStartingClipIndex) {

-

-            pController->mVePlayer[pController->mCurrentPlayer]->setAudioMixStoryBoardParam(

-             pController->mCurrentPlayedDuration,

-             pController->mClipList[index]->uiBeginCutTime,

-             pController->mClipList[index]->ClipProperties.uiClipAudioVolumePercentage);

-

-            LOGV("threadProc: setAudioMixStoryBoardParam fromMS %d \

-             ClipBeginTime %d", pController->mCurrentPlayedDuration +

-             pController->mClipList[index]->uiBeginCutTime,

-             pController->mClipList[index]->uiBeginCutTime,

-             pController->mClipList[index]->ClipProperties.uiClipAudioVolumePercentage);

-        }

-        // Capture the active player being used

-        pController->mActivePlayerIndex = pController->mCurrentPlayer;

-

-        pController->mVePlayer[pController->mCurrentPlayer]->start();

-        LOGV("threadProc: started");

-

-        pController->mPlayerState = VePlayerBusy;

-

-    } else if(pController->mPlayerState == VePlayerAutoStop) {

-        LOGV("Preview completed..auto stop the player");

-    } else if ((pController->mPlayerState == VePlayerBusy) && (pController->mPrepareReqest)) {

-        // Prepare the player here

-        pController->mPrepareReqest = M4OSA_FALSE;

-        preparePlayer((void*)pController, pController->mCurrentPlayer,

-            pController->mCurrentClipNumber+1);

-        if (pController->mSemThreadWait != NULL) {

-            err = M4OSA_semaphoreWait(pController->mSemThreadWait,

-                M4OSA_WAIT_FOREVER);

-        }

-    } else {

-        if (!pController->bStopThreadInProgress) {

-            LOGV("threadProc: state busy...wait for sem");

-            if (pController->mSemThreadWait != NULL) {

-                err = M4OSA_semaphoreWait(pController->mSemThreadWait,

-                 M4OSA_WAIT_FOREVER);

-             }

-        }

-        LOGV("threadProc: sem wait returned err = 0x%x", err);

+        }
     }
-

-    //Always return M4NO_ERROR to ensure the thread keeps running

-    return M4NO_ERROR;

-}

-

-void VideoEditorPreviewController::notify(

-    void* cookie, int msg, int ext1, int ext2)

-{

-    VideoEditorPreviewController *pController =

-     (VideoEditorPreviewController *)cookie;

-

-    M4OSA_ERR err = M4NO_ERROR;

-    uint32_t clipDuration = 0;

-    switch (msg) {

-        case MEDIA_NOP: // interface test message

-            LOGV("MEDIA_NOP");

-            break;

-        case MEDIA_PREPARED:

-            LOGV("MEDIA_PREPARED");

-            break;

-        case MEDIA_PLAYBACK_COMPLETE:

-        {

-            LOGV("notify:MEDIA_PLAYBACK_COMPLETE");

-            pController->mPlayerState = VePlayerIdle;

-

-            //send progress callback with last frame timestamp

-            if((M4OSA_UInt32)pController->mCurrentClipNumber ==

-             pController->mStartingClipIndex) {

-                clipDuration =

-                 pController->mClipList[pController->mCurrentClipNumber]->uiEndCutTime

-                  - pController->mFirstPreviewClipBeginTime;

-            }

-            else {

-                clipDuration =

-                 pController->mClipList[pController->mCurrentClipNumber]->uiEndCutTime

-                  - pController->mClipList[pController->mCurrentClipNumber]->uiBeginCutTime;

-            }

-

-            M4OSA_UInt32 playedDuration = clipDuration+pController->mCurrentPlayedDuration;

-            pController->mJniCallback(

-                 pController->mJniCookie, MSG_TYPE_PROGRESS_INDICATION,

-                 &playedDuration);

-

-            if ((pController->mOverlayState == OVERLAY_UPDATE) &&

-                ((M4OSA_UInt32)pController->mCurrentClipNumber !=

-                (pController->mNumberClipsToPreview-1))) {

-                VideoEditorCurretEditInfo *pEditInfo =

-                    (VideoEditorCurretEditInfo*)M4OSA_32bitAlignedMalloc(sizeof(VideoEditorCurretEditInfo),

-                    M4VS, (M4OSA_Char*)"Current Edit info");

-                pEditInfo->overlaySettingsIndex = ext2;

-                pEditInfo->clipIndex = pController->mCurrentClipNumber;

-                pController->mOverlayState == OVERLAY_CLEAR;

-                if (pController->mJniCallback != NULL) {

-                        pController->mJniCallback(pController->mJniCookie,

-                            MSG_TYPE_OVERLAY_CLEAR, pEditInfo);

-                }

-                free(pEditInfo);

-            }

-            {

-                Mutex::Autolock autoLock(pController->mLockSem);

-                if (pController->mSemThreadWait != NULL) {

+    LOGV("stopPreview: clear audioSink and audioPlayer");
+    mVEAudioSink.clear();
+    if (mVEAudioPlayer) {
+        delete mVEAudioPlayer;
+        mVEAudioPlayer = NULL;
+    }
+
+    // If image file playing, then free the buffer pointer
+    if(mFrameStr.pBuffer != M4OSA_NULL) {
+        free(mFrameStr.pBuffer);
+        mFrameStr.pBuffer = M4OSA_NULL;
+    }
+
+    // Reset original begin cuttime of first previewed clip*/
+    mClipList[mStartingClipIndex]->uiBeginCutTime = mFirstPreviewClipBeginTime;
+    // Reset original end cuttime of last previewed clip*/
+    mClipList[mNumberClipsToPreview-1]->uiEndCutTime = mLastPreviewClipEndTime;
+
+    mPlayerState = VePlayerIdle;
+    mPrepareReqest = M4OSA_FALSE;
+
+    mCurrentPlayedDuration = 0;
+    mCurrentClipDuration = 0;
+    mRenderingMode = M4xVSS_kBlackBorders;
+    mOutputVideoWidth = 0;
+    mOutputVideoHeight = 0;
+
+    LOGV("stopPreview() lastRenderedFrameTimeMs %ld", lastRenderedFrameTimeMs);
+    return lastRenderedFrameTimeMs;
+}
+
+M4OSA_ERR VideoEditorPreviewController::clearSurface(
+    const sp<Surface> &surface, VideoEditor_renderPreviewFrameStr* pFrameInfo) {
+
+    M4OSA_ERR err = M4NO_ERROR;
+    VideoEditor_renderPreviewFrameStr* pFrameStr = pFrameInfo;
+    M4OSA_UInt32 outputBufferWidth =0, outputBufferHeight=0;
+    M4VIFI_ImagePlane planeOut[3];
+    LOGV("Inside preview clear frame");
+
+    Mutex::Autolock autoLock(mLock);
+
+    // Delete previous renderer instance
+    if(mTarget != NULL) {
+        delete mTarget;
+        mTarget = NULL;
+    }
+
+    outputBufferWidth = pFrameStr->uiFrameWidth;
+    outputBufferHeight = pFrameStr->uiFrameHeight;
+
+    // Initialize the renderer
+    if(mTarget == NULL) {
+
+        mTarget = PreviewRenderer::CreatePreviewRenderer(
+            OMX_COLOR_FormatYUV420Planar, surface, outputBufferWidth, outputBufferHeight,
+            outputBufferWidth, outputBufferHeight, 0);
+
+        if(mTarget == NULL) {
+            LOGE("renderPreviewFrame: cannot create PreviewRenderer");
+            return M4ERR_ALLOC;
+        }
+    }
+
+    // Out plane
+    uint8_t* outBuffer;
+    size_t outBufferStride = 0;
+
+    LOGV("doMediaRendering CALL getBuffer()");
+    mTarget->getBufferYV12(&outBuffer, &outBufferStride);
+
+    // Set the output YUV420 plane to be compatible with YV12 format
+    //In YV12 format, sizes must be even
+    M4OSA_UInt32 yv12PlaneWidth = ((outputBufferWidth +1)>>1)<<1;
+    M4OSA_UInt32 yv12PlaneHeight = ((outputBufferHeight+1)>>1)<<1;
+
+    prepareYV12ImagePlane(planeOut, yv12PlaneWidth, yv12PlaneHeight,
+     (M4OSA_UInt32)outBufferStride, (M4VIFI_UInt8 *)outBuffer);
+
+    /* Fill the surface with black frame */
+    memset((void *)planeOut[0].pac_data,0x00,planeOut[0].u_width *
+                            planeOut[0].u_height * 1.5);
+    memset((void *)planeOut[1].pac_data,128,planeOut[1].u_width *
+                            planeOut[1].u_height);
+    memset((void *)planeOut[2].pac_data,128,planeOut[2].u_width *
+                             planeOut[2].u_height);
+
+    mTarget->renderYV12();
+    return err;
+}
+
+M4OSA_ERR VideoEditorPreviewController::renderPreviewFrame(
+            const sp<Surface> &surface,
+            VideoEditor_renderPreviewFrameStr* pFrameInfo,
+            VideoEditorCurretEditInfo *pCurrEditInfo) {
+
+    M4OSA_ERR err = M4NO_ERROR;
+    M4OSA_UInt32 i = 0, iIncrementedDuration = 0, tnTimeMs=0, framesize =0;
+    VideoEditor_renderPreviewFrameStr* pFrameStr = pFrameInfo;
+    M4VIFI_UInt8 *pixelArray = NULL;
+    Mutex::Autolock autoLock(mLock);
+
+    if (pCurrEditInfo != NULL) {
+        pCurrEditInfo->overlaySettingsIndex = -1;
+    }
+    // Delete previous renderer instance
+    if(mTarget != NULL) {
+        delete mTarget;
+        mTarget = NULL;
+    }
+
+    if(mOutputVideoWidth == 0) {
+        mOutputVideoWidth = pFrameStr->uiFrameWidth;
+    }
+
+    if(mOutputVideoHeight == 0) {
+        mOutputVideoHeight = pFrameStr->uiFrameHeight;
+    }
+
+    // Initialize the renderer
+    if(mTarget == NULL) {
+        /*mTarget = new PreviewRenderer(
+            OMX_COLOR_FormatYUV420Planar, surface, mOutputVideoWidth, mOutputVideoHeight,
+            mOutputVideoWidth, mOutputVideoHeight, 0);*/
+
+         mTarget = PreviewRenderer::CreatePreviewRenderer(
+            OMX_COLOR_FormatYUV420Planar, surface, mOutputVideoWidth, mOutputVideoHeight,
+            mOutputVideoWidth, mOutputVideoHeight, 0);
+
+        if(mTarget == NULL) {
+            LOGE("renderPreviewFrame: cannot create PreviewRenderer");
+            return M4ERR_ALLOC;
+        }
+    }
+
+    pixelArray = NULL;
+
+    // Postprocessing (apply video effect)
+    if(pFrameStr->bApplyEffect == M4OSA_TRUE) {
+
+        for(i=0;i<mNumberEffects;i++) {
+            // First check if effect starttime matches the clip being previewed
+            if((mEffectsSettings[i].uiStartTime < pFrameStr->clipBeginCutTime)
+             ||(mEffectsSettings[i].uiStartTime >= pFrameStr->clipEndCutTime)) {
+                // This effect doesn't belong to this clip, check next one
+                continue;
+            }
+            if((mEffectsSettings[i].uiStartTime <= pFrameStr->timeMs) &&
+            ((mEffectsSettings[i].uiStartTime+mEffectsSettings[i].uiDuration) >=
+             pFrameStr->timeMs) && (mEffectsSettings[i].uiDuration != 0)) {
+                setVideoEffectType(mEffectsSettings[i].VideoEffectType, TRUE);
+            }
+            else {
+                setVideoEffectType(mEffectsSettings[i].VideoEffectType, FALSE);
+            }
+        }
+
+        //Provide the overlay Update indication when there is an overlay effect
+        if (mCurrentVideoEffect & VIDEO_EFFECT_FRAMING) {
+            M4OSA_UInt32 index;
+            mCurrentVideoEffect &= ~VIDEO_EFFECT_FRAMING; //never apply framing here.
+
+            // Find the effect in effectSettings array
+            for (index = 0; index < mNumberEffects; index++) {
+                if(mEffectsSettings[index].VideoEffectType ==
+                    (M4VSS3GPP_VideoEffectType)M4xVSS_kVideoEffectType_Framing) {
+
+                    if((mEffectsSettings[index].uiStartTime <= pFrameInfo->timeMs) &&
+                        ((mEffectsSettings[index].uiStartTime+
+                        mEffectsSettings[index].uiDuration) >= pFrameInfo->timeMs))
+                    {
+                        break;
+                    }
+                }
+            }
+            if ((index < mNumberEffects) && (pCurrEditInfo != NULL)) {
+                pCurrEditInfo->overlaySettingsIndex = index;
+                LOGV("Framing index = %d", index);
+            } else {
+                LOGV("No framing effects found");
+            }
+        }
+
+        if(mCurrentVideoEffect != VIDEO_EFFECT_NONE) {
+            err = applyVideoEffect((M4OSA_Void *)pFrameStr->pBuffer,
+             OMX_COLOR_FormatYUV420Planar, pFrameStr->uiFrameWidth,
+             pFrameStr->uiFrameHeight, pFrameStr->timeMs,
+             (M4OSA_Void *)pixelArray);
+
+            if(err != M4NO_ERROR) {
+                LOGE("renderPreviewFrame: applyVideoEffect error 0x%x", (unsigned int)err);
+                delete mTarget;
+                mTarget = NULL;
+                free(pixelArray);
+                pixelArray = NULL;
+                return err;
+           }
+           mCurrentVideoEffect = VIDEO_EFFECT_NONE;
+        }
+        else {
+            // Apply the rendering mode
+            err = doImageRenderingMode((M4OSA_Void *)pFrameStr->pBuffer,
+             OMX_COLOR_FormatYUV420Planar, pFrameStr->uiFrameWidth,
+             pFrameStr->uiFrameHeight, (M4OSA_Void *)pixelArray);
+
+            if(err != M4NO_ERROR) {
+                LOGE("renderPreviewFrame:doImageRenderingMode error 0x%x", (unsigned int)err);
+                delete mTarget;
+                mTarget = NULL;
+                free(pixelArray);
+                pixelArray = NULL;
+                return err;
+            }
+        }
+    }
+    else {
+        // Apply the rendering mode
+        err = doImageRenderingMode((M4OSA_Void *)pFrameStr->pBuffer,
+         OMX_COLOR_FormatYUV420Planar, pFrameStr->uiFrameWidth,
+         pFrameStr->uiFrameHeight, (M4OSA_Void *)pixelArray);
+
+        if(err != M4NO_ERROR) {
+            LOGE("renderPreviewFrame: doImageRenderingMode error 0x%x", (unsigned int)err);
+            delete mTarget;
+            mTarget = NULL;
+            free(pixelArray);
+            pixelArray = NULL;
+            return err;
+        }
+    }
+
+    mTarget->renderYV12();
+    return err;
+}
+
+M4OSA_Void VideoEditorPreviewController::setJniCallback(void* cookie,
+    jni_progress_callback_fct callbackFct) {
+    //LOGV("setJniCallback");
+    mJniCookie = cookie;
+    mJniCallback = callbackFct;
+}
+
+M4OSA_ERR VideoEditorPreviewController::preparePlayer(
+    void* param, int playerInstance, int index) {
+
+    M4OSA_ERR err = M4NO_ERROR;
+    VideoEditorPreviewController *pController =
+     (VideoEditorPreviewController *)param;
+
+    LOGV("preparePlayer: instance %d file %d", playerInstance, index);
+
+    pController->mVePlayer[playerInstance]->setDataSource(
+    (const char *)pController->mClipList[index]->pFile, NULL);
+    LOGV("preparePlayer: setDataSource instance %s",
+     (const char *)pController->mClipList[index]->pFile);
+
+    pController->mVePlayer[playerInstance]->setVideoSurface(
+     pController->mSurface);
+    LOGV("preparePlayer: setVideoSurface");
+
+    pController->mVePlayer[playerInstance]->setMediaRenderingMode(
+     pController->mClipList[index]->xVSS.MediaRendering,
+     pController->mOutputVideoSize);
+    LOGV("preparePlayer: setMediaRenderingMode");
+
+    if((M4OSA_UInt32)index == pController->mStartingClipIndex) {
+        pController->mVePlayer[playerInstance]->setPlaybackBeginTime(
+        pController->mFirstPreviewClipBeginTime);
+    }
+    else {
+        pController->mVePlayer[playerInstance]->setPlaybackBeginTime(
+        pController->mClipList[index]->uiBeginCutTime);
+    }
+    LOGV("preparePlayer: setPlaybackBeginTime(%d)",
+     pController->mClipList[index]->uiBeginCutTime);
+
+    pController->mVePlayer[playerInstance]->setPlaybackEndTime(
+     pController->mClipList[index]->uiEndCutTime);
+    LOGV("preparePlayer: setPlaybackEndTime(%d)",
+     pController->mClipList[index]->uiEndCutTime);
+
+    if(pController->mClipList[index]->FileType == M4VIDEOEDITING_kFileType_ARGB8888) {
+        pController->mVePlayer[playerInstance]->setImageClipProperties(
+                 pController->mClipList[index]->ClipProperties.uiVideoWidth,
+                 pController->mClipList[index]->ClipProperties.uiVideoHeight);
+        LOGV("preparePlayer: setImageClipProperties");
+    }
+
+    pController->mVePlayer[playerInstance]->prepare();
+    LOGV("preparePlayer: prepared");
+
+    if(pController->mClipList[index]->uiBeginCutTime > 0) {
+        pController->mVePlayer[playerInstance]->seekTo(
+         pController->mClipList[index]->uiBeginCutTime);
+
+        LOGV("preparePlayer: seekTo(%d)",
+         pController->mClipList[index]->uiBeginCutTime);
+    }
+    pController->mVePlayer[pController->mCurrentPlayer]->setAudioPlayer(pController->mVEAudioPlayer);
+
+    pController->mVePlayer[playerInstance]->readFirstVideoFrame();
+    LOGV("preparePlayer: readFirstVideoFrame of clip");
+
+    return err;
+}
+
+M4OSA_ERR VideoEditorPreviewController::threadProc(M4OSA_Void* param) {
+    M4OSA_ERR err = M4NO_ERROR;
+    M4OSA_Int32 index = 0;
+    VideoEditorPreviewController *pController =
+     (VideoEditorPreviewController *)param;
+
+    LOGV("inside threadProc");
+    if(pController->mPlayerState == VePlayerIdle) {
+        (pController->mCurrentClipNumber)++;
+
+        LOGV("threadProc: playing file index %d total clips %d",
+         pController->mCurrentClipNumber, pController->mNumberClipsToPreview);
+
+        if((M4OSA_UInt32)pController->mCurrentClipNumber >=
+         pController->mNumberClipsToPreview) {
+
+            LOGV("All clips previewed");
+
+            pController->mCurrentPlayedDuration = 0;
+            pController->mCurrentClipDuration = 0;
+            pController->mCurrentPlayer = 0;
+
+            if(pController->mPreviewLooping == M4OSA_TRUE) {
+                pController->mCurrentClipNumber =
+                 pController->mStartingClipIndex;
+
+                LOGV("Preview looping TRUE, restarting from clip index %d",
+                 pController->mCurrentClipNumber);
+
+                // Reset the story board timestamp inside the player
+                for (int playerInst=0; playerInst<NBPLAYER_INSTANCES;
+                 playerInst++) {
+                    pController->mVePlayer[playerInst]->resetJniCallbackTimeStamp();
+                }
+            }
+            else {
+                M4OSA_UInt32 endArgs = 0;
+                if(pController->mJniCallback != NULL) {
+                    pController->mJniCallback(
+                     pController->mJniCookie, MSG_TYPE_PREVIEW_END, &endArgs);
+                }
+                pController->mPlayerState = VePlayerAutoStop;
+
+                // Reset original begin cuttime of first previewed clip
+                pController->mClipList[pController->mStartingClipIndex]->uiBeginCutTime =
+                 pController->mFirstPreviewClipBeginTime;
+                // Reset original end cuttime of last previewed clip
+                pController->mClipList[pController->mNumberClipsToPreview-1]->uiEndCutTime =
+                 pController->mLastPreviewClipEndTime;
+
+                // Return a warning to M4OSA thread handler
+                // so that thread is moved from executing state to open state
+                return M4WAR_NO_MORE_STREAM;
+            }
+        }
+
+        index=pController->mCurrentClipNumber;
+        if((M4OSA_UInt32)pController->mCurrentClipNumber == pController->mStartingClipIndex) {
+            pController->mCurrentPlayedDuration +=
+             pController->mVideoStoryBoardTimeMsUptoFirstPreviewClip;
+
+            pController->mCurrentClipDuration =
+             pController->mClipList[pController->mCurrentClipNumber]->uiEndCutTime
+              - pController->mFirstPreviewClipBeginTime;
+
+            preparePlayer((void*)pController, pController->mCurrentPlayer, index);
+        }
+        else {
+            pController->mCurrentPlayedDuration +=
+             pController->mCurrentClipDuration;
+
+            pController->mCurrentClipDuration =
+             pController->mClipList[pController->mCurrentClipNumber]->uiEndCutTime -
+             pController->mClipList[pController->mCurrentClipNumber]->uiBeginCutTime;
+        }
+
+        pController->mVePlayer[pController->mCurrentPlayer]->setStoryboardStartTime(
+         pController->mCurrentPlayedDuration);
+        LOGV("threadProc: setStoryboardStartTime");
+
+        // Set the next clip duration for Audio mix here
+        if((M4OSA_UInt32)pController->mCurrentClipNumber != pController->mStartingClipIndex) {
+
+            pController->mVePlayer[pController->mCurrentPlayer]->setAudioMixStoryBoardParam(
+             pController->mCurrentPlayedDuration,
+             pController->mClipList[index]->uiBeginCutTime,
+             pController->mClipList[index]->ClipProperties.uiClipAudioVolumePercentage);
+
+            LOGV("threadProc: setAudioMixStoryBoardParam fromMS %d \
+             ClipBeginTime %d", pController->mCurrentPlayedDuration +
+             pController->mClipList[index]->uiBeginCutTime,
+             pController->mClipList[index]->uiBeginCutTime,
+             pController->mClipList[index]->ClipProperties.uiClipAudioVolumePercentage);
+        }
+        // Capture the active player being used
+        pController->mActivePlayerIndex = pController->mCurrentPlayer;
+
+        pController->mVePlayer[pController->mCurrentPlayer]->start();
+        LOGV("threadProc: started");
+
+        pController->mPlayerState = VePlayerBusy;
+
+    } else if(pController->mPlayerState == VePlayerAutoStop) {
+        LOGV("Preview completed..auto stop the player");
+    } else if ((pController->mPlayerState == VePlayerBusy) && (pController->mPrepareReqest)) {
+        // Prepare the player here
+        pController->mPrepareReqest = M4OSA_FALSE;
+        preparePlayer((void*)pController, pController->mCurrentPlayer,
+            pController->mCurrentClipNumber+1);
+        if (pController->mSemThreadWait != NULL) {
+            err = M4OSA_semaphoreWait(pController->mSemThreadWait,
+                M4OSA_WAIT_FOREVER);
+        }
+    } else {
+        if (!pController->bStopThreadInProgress) {
+            LOGV("threadProc: state busy...wait for sem");
+            if (pController->mSemThreadWait != NULL) {
+                err = M4OSA_semaphoreWait(pController->mSemThreadWait,
+                 M4OSA_WAIT_FOREVER);
+             }
+        }
+        LOGV("threadProc: sem wait returned err = 0x%x", err);
+    }
+
+    //Always return M4NO_ERROR to ensure the thread keeps running
+    return M4NO_ERROR;
+}
+
+void VideoEditorPreviewController::notify(
+    void* cookie, int msg, int ext1, int ext2)
+{
+    VideoEditorPreviewController *pController =
+     (VideoEditorPreviewController *)cookie;
+
+    M4OSA_ERR err = M4NO_ERROR;
+    uint32_t clipDuration = 0;
+    switch (msg) {
+        case MEDIA_NOP: // interface test message
+            LOGV("MEDIA_NOP");
+            break;
+        case MEDIA_PREPARED:
+            LOGV("MEDIA_PREPARED");
+            break;
+        case MEDIA_PLAYBACK_COMPLETE:
+        {
+            LOGV("notify:MEDIA_PLAYBACK_COMPLETE");
+            pController->mPlayerState = VePlayerIdle;
+
+            //send progress callback with last frame timestamp
+            if((M4OSA_UInt32)pController->mCurrentClipNumber ==
+             pController->mStartingClipIndex) {
+                clipDuration =
+                 pController->mClipList[pController->mCurrentClipNumber]->uiEndCutTime
+                  - pController->mFirstPreviewClipBeginTime;
+            }
+            else {
+                clipDuration =
+                 pController->mClipList[pController->mCurrentClipNumber]->uiEndCutTime
+                  - pController->mClipList[pController->mCurrentClipNumber]->uiBeginCutTime;
+            }
+
+            M4OSA_UInt32 playedDuration = clipDuration+pController->mCurrentPlayedDuration;
+            pController->mJniCallback(
+                 pController->mJniCookie, MSG_TYPE_PROGRESS_INDICATION,
+                 &playedDuration);
+
+            if ((pController->mOverlayState == OVERLAY_UPDATE) &&
+                ((M4OSA_UInt32)pController->mCurrentClipNumber !=
+                (pController->mNumberClipsToPreview-1))) {
+                VideoEditorCurretEditInfo *pEditInfo =
+                    (VideoEditorCurretEditInfo*)M4OSA_32bitAlignedMalloc(sizeof(VideoEditorCurretEditInfo),
+                    M4VS, (M4OSA_Char*)"Current Edit info");
+                pEditInfo->overlaySettingsIndex = ext2;
+                pEditInfo->clipIndex = pController->mCurrentClipNumber;
+                pController->mOverlayState == OVERLAY_CLEAR;
+                if (pController->mJniCallback != NULL) {
+                        pController->mJniCallback(pController->mJniCookie,
+                            MSG_TYPE_OVERLAY_CLEAR, pEditInfo);
+                }
+                free(pEditInfo);
+            }
+            {
+                Mutex::Autolock autoLock(pController->mLockSem);
+                if (pController->mSemThreadWait != NULL) {
                     M4OSA_semaphorePost(pController->mSemThreadWait);
                     return;
-                }

-            }

-

-            break;

-        }

-        case MEDIA_ERROR:

-        {

-            int err_val = ext1;

-          // Always log errors.

-          // ext1: Media framework error code.

-          // ext2: Implementation dependant error code.

-            LOGE("MEDIA_ERROR; error (%d, %d)", ext1, ext2);

-            if(pController->mJniCallback != NULL) {

-                pController->mJniCallback(pController->mJniCookie,

-                 MSG_TYPE_PLAYER_ERROR, &err_val);

-            }

-            break;

-        }

-        case MEDIA_INFO:

-        {

-            int info_val = ext2;

-            // ext1: Media framework error code.

-            // ext2: Implementation dependant error code.

-            //LOGW("MEDIA_INFO; info/warning (%d, %d)", ext1, ext2);

-            if(pController->mJniCallback != NULL) {

-                pController->mJniCallback(pController->mJniCookie,

-                 MSG_TYPE_PROGRESS_INDICATION, &info_val);

-            }

-            break;

-        }

-        case MEDIA_SEEK_COMPLETE:

-            LOGV("MEDIA_SEEK_COMPLETE; Received seek complete");

-            break;

-        case MEDIA_BUFFERING_UPDATE:

-            LOGV("MEDIA_BUFFERING_UPDATE; buffering %d", ext1);

-            break;

-        case MEDIA_SET_VIDEO_SIZE:

-            LOGV("MEDIA_SET_VIDEO_SIZE; New video size %d x %d", ext1, ext2);

-            break;

-        case 0xAAAAAAAA:

-            LOGV("VIDEO PLAYBACK ALMOST over, prepare next player");

-            // Select next player and prepare it

-            // If there is a clip after this one

-            if ((M4OSA_UInt32)(pController->mCurrentClipNumber+1) <

-             pController->mNumberClipsToPreview) {

-                pController->mPrepareReqest = M4OSA_TRUE;

-                pController->mCurrentPlayer++;

-                if (pController->mCurrentPlayer >= NBPLAYER_INSTANCES) {

-                    pController->mCurrentPlayer = 0;

-                }

-                // Prepare the first clip to be played

-                {

-                    Mutex::Autolock autoLock(pController->mLockSem);

-                    if (pController->mSemThreadWait != NULL) {

-                        M4OSA_semaphorePost(pController->mSemThreadWait);

-                    }

-                }

-            }

-            break;

-        case 0xBBBBBBBB:

-        {

-            LOGV("VIDEO PLAYBACK, Update Overlay");

-            int overlayIndex = ext2;

-            VideoEditorCurretEditInfo *pEditInfo =

-                    (VideoEditorCurretEditInfo*)M4OSA_32bitAlignedMalloc(sizeof(VideoEditorCurretEditInfo),

-                    M4VS, (M4OSA_Char*)"Current Edit info");

-            //ext1 = 1; start the overlay display

-            //     = 2; Clear the overlay.

-            pEditInfo->overlaySettingsIndex = ext2;

-            pEditInfo->clipIndex = pController->mCurrentClipNumber;

-            LOGV("pController->mCurrentClipNumber = %d",pController->mCurrentClipNumber);

-            if (pController->mJniCallback != NULL) {

-                if (ext1 == 1) {

-                    pController->mOverlayState = OVERLAY_UPDATE;

-                    pController->mJniCallback(pController->mJniCookie,

-                        MSG_TYPE_OVERLAY_UPDATE, pEditInfo);

-                } else {

-                    pController->mOverlayState = OVERLAY_CLEAR;

-                    pController->mJniCallback(pController->mJniCookie,

-                        MSG_TYPE_OVERLAY_CLEAR, pEditInfo);

-                }

-            }

-            free(pEditInfo);

-            break;

-        }

-        default:

-            LOGV("unrecognized message: (%d, %d, %d)", msg, ext1, ext2);

-            break;

-    }

-}

-

-void VideoEditorPreviewController::setVideoEffectType(

-    M4VSS3GPP_VideoEffectType type, M4OSA_Bool enable) {

-

-    M4OSA_UInt32 effect = VIDEO_EFFECT_NONE;

-

-    // map M4VSS3GPP_VideoEffectType to local enum

-    switch(type) {

-        case M4VSS3GPP_kVideoEffectType_FadeFromBlack:

-            effect = VIDEO_EFFECT_FADEFROMBLACK;

-            break;

-

-        case M4VSS3GPP_kVideoEffectType_FadeToBlack:

-            effect = VIDEO_EFFECT_FADETOBLACK;

-            break;

-

-        case M4VSS3GPP_kVideoEffectType_CurtainOpening:

-            effect = VIDEO_EFFECT_CURTAINOPEN;

-            break;

-

-        case M4VSS3GPP_kVideoEffectType_CurtainClosing:

-            effect = VIDEO_EFFECT_CURTAINCLOSE;

-            break;

-

-        case M4xVSS_kVideoEffectType_BlackAndWhite:

-            effect = VIDEO_EFFECT_BLACKANDWHITE;

-            break;

-

-        case M4xVSS_kVideoEffectType_Pink:

-            effect = VIDEO_EFFECT_PINK;

-            break;

-

-        case M4xVSS_kVideoEffectType_Green:

-            effect = VIDEO_EFFECT_GREEN;

-            break;

-

-        case M4xVSS_kVideoEffectType_Sepia:

-            effect = VIDEO_EFFECT_SEPIA;

-            break;

-

-        case M4xVSS_kVideoEffectType_Negative:

-            effect = VIDEO_EFFECT_NEGATIVE;

-            break;

-

-        case M4xVSS_kVideoEffectType_Framing:

-            effect = VIDEO_EFFECT_FRAMING;

-            break;

-

-        case M4xVSS_kVideoEffectType_Fifties:

-            effect = VIDEO_EFFECT_FIFTIES;

-            break;

-

-        case M4xVSS_kVideoEffectType_ColorRGB16:

-            effect = VIDEO_EFFECT_COLOR_RGB16;

-            break;

-

-        case M4xVSS_kVideoEffectType_Gradient:

-            effect = VIDEO_EFFECT_GRADIENT;

-            break;

-

-        default:

-            effect = VIDEO_EFFECT_NONE;

-            break;

-    }

-

-    if(enable == M4OSA_TRUE) {

-        // If already set, then no need to set again

-        if(!(mCurrentVideoEffect & effect))

-            mCurrentVideoEffect |= effect;

-            if(effect == VIDEO_EFFECT_FIFTIES) {

-                mIsFiftiesEffectStarted = true;

-            }

-    }

-    else  {

-        // Reset only if already set

-        if(mCurrentVideoEffect & effect)

-            mCurrentVideoEffect &= ~effect;

-    }

-

-    return;

-}

-

-

-M4OSA_ERR VideoEditorPreviewController::applyVideoEffect(

-    M4OSA_Void * dataPtr, M4OSA_UInt32 colorFormat, M4OSA_UInt32 videoWidth,

-    M4OSA_UInt32 videoHeight, M4OSA_UInt32 timeMs, M4OSA_Void* outPtr) {

-

-    M4OSA_ERR err = M4NO_ERROR;

-    vePostProcessParams postProcessParams;

-

-    postProcessParams.vidBuffer = (M4VIFI_UInt8*)dataPtr;

-    postProcessParams.videoWidth = videoWidth;

-    postProcessParams.videoHeight = videoHeight;

-    postProcessParams.timeMs = timeMs;

-    postProcessParams.timeOffset = 0; //Since timeMS already takes care of offset in this case

-    postProcessParams.effectsSettings = mEffectsSettings;

-    postProcessParams.numberEffects = mNumberEffects;

-    postProcessParams.outVideoWidth = mOutputVideoWidth;

-    postProcessParams.outVideoHeight = mOutputVideoHeight;

-    postProcessParams.currentVideoEffect = mCurrentVideoEffect;

-    postProcessParams.renderingMode = mRenderingMode;

-    if(mIsFiftiesEffectStarted == M4OSA_TRUE) {

-        postProcessParams.isFiftiesEffectStarted = M4OSA_TRUE;

-        mIsFiftiesEffectStarted = M4OSA_FALSE;

-    }

-    else {

-       postProcessParams.isFiftiesEffectStarted = M4OSA_FALSE;

-    }

-    //postProcessParams.renderer = mTarget;

-    postProcessParams.overlayFrameRGBBuffer = NULL;

-    postProcessParams.overlayFrameYUVBuffer = NULL;

-

-    mTarget->getBufferYV12(&(postProcessParams.pOutBuffer), &(postProcessParams.outBufferStride));

-

-    err = applyEffectsAndRenderingMode(&postProcessParams, videoWidth, videoHeight);

-    return err;

-}

-

-M4OSA_ERR VideoEditorPreviewController::setPreviewFrameRenderingMode(

-    M4xVSS_MediaRendering mode, M4VIDEOEDITING_VideoFrameSize outputVideoSize) {

-

-    LOGV("setMediaRenderingMode: outputVideoSize = %d", outputVideoSize);

-    mRenderingMode = mode;

-

-    switch(outputVideoSize) {

-        case M4VIDEOEDITING_kSQCIF:

-            mOutputVideoWidth = 128;

-            mOutputVideoHeight = 96;

-            break;

-

-        case M4VIDEOEDITING_kQQVGA:

-            mOutputVideoWidth = 160;

-            mOutputVideoHeight = 120;

-            break;

-

-        case M4VIDEOEDITING_kQCIF:

-            mOutputVideoWidth = 176;

-            mOutputVideoHeight = 144;

-            break;

-

-        case M4VIDEOEDITING_kQVGA:

-            mOutputVideoWidth = 320;

-            mOutputVideoHeight = 240;

-            break;

-

-        case M4VIDEOEDITING_kCIF:

-            mOutputVideoWidth = 352;

-            mOutputVideoHeight = 288;

-            break;

-

-        case M4VIDEOEDITING_kVGA:

-            mOutputVideoWidth = 640;

-            mOutputVideoHeight = 480;

-            break;

-

-        case M4VIDEOEDITING_kWVGA:

-            mOutputVideoWidth = 800;

-            mOutputVideoHeight = 480;

-            break;

-

-        case M4VIDEOEDITING_kNTSC:

-            mOutputVideoWidth = 720;

-            mOutputVideoHeight = 480;

-            break;

-

-        case M4VIDEOEDITING_k640_360:

-            mOutputVideoWidth = 640;

-            mOutputVideoHeight = 360;

-            break;

-

-        case M4VIDEOEDITING_k854_480:

-            mOutputVideoWidth = 854;

-            mOutputVideoHeight = 480;

-            break;

-

-        case M4VIDEOEDITING_kHD1280:

-            mOutputVideoWidth = 1280;

-            mOutputVideoHeight = 720;

-            break;

-

-        case M4VIDEOEDITING_kHD1080:

-            mOutputVideoWidth = 1080;

-            mOutputVideoHeight = 720;

-            break;

-

-        case M4VIDEOEDITING_kHD960:

-            mOutputVideoWidth = 960;

-            mOutputVideoHeight = 720;

-            break;

-

-        default:

-            mOutputVideoWidth = 0;

-            mOutputVideoHeight = 0;

-            break;

-    }

-

-    return OK;

-}

-

-M4OSA_ERR VideoEditorPreviewController::doImageRenderingMode(

-    M4OSA_Void * dataPtr, M4OSA_UInt32 colorFormat, M4OSA_UInt32 videoWidth,

-    M4OSA_UInt32 videoHeight, M4OSA_Void* outPtr) {

-

-    M4OSA_ERR err = M4NO_ERROR;

-    M4VIFI_ImagePlane planeIn[3], planeOut[3];

-    M4VIFI_UInt8 *inBuffer = M4OSA_NULL;

-    M4OSA_UInt32 outputBufferWidth =0, outputBufferHeight=0;

-

-    //frameSize = (videoWidth*videoHeight*3) >> 1;

-    inBuffer = (M4OSA_UInt8 *)dataPtr;

-

-    // In plane

-    prepareYUV420ImagePlane(planeIn, videoWidth,

-      videoHeight, (M4VIFI_UInt8 *)inBuffer, videoWidth, videoHeight);

-

-    outputBufferWidth = mOutputVideoWidth;

-    outputBufferHeight = mOutputVideoHeight;

-

-    // Out plane

-    uint8_t* outBuffer;

-    size_t outBufferStride = 0;

-

-    LOGV("doMediaRendering CALL getBuffer()");

-    mTarget->getBufferYV12(&outBuffer, &outBufferStride);

-

-    // Set the output YUV420 plane to be compatible with YV12 format

-    //In YV12 format, sizes must be even

-    M4OSA_UInt32 yv12PlaneWidth = ((mOutputVideoWidth +1)>>1)<<1;

-    M4OSA_UInt32 yv12PlaneHeight = ((mOutputVideoHeight+1)>>1)<<1;

-

-    prepareYV12ImagePlane(planeOut, yv12PlaneWidth, yv12PlaneHeight,

-     (M4OSA_UInt32)outBufferStride, (M4VIFI_UInt8 *)outBuffer);

-

-    err = applyRenderingMode(planeIn, planeOut, mRenderingMode);

-    if(err != M4NO_ERROR) {

-        LOGE("doImageRenderingMode: applyRenderingMode returned err=0x%x", (unsigned int)err);

-    }

-    return err;

-}

-

-} //namespace android

+                }
+            }
+
+            break;
+        }
+        case MEDIA_ERROR:
+        {
+            int err_val = ext1;
+          // Always log errors.
+          // ext1: Media framework error code.
+          // ext2: Implementation dependant error code.
+            LOGE("MEDIA_ERROR; error (%d, %d)", ext1, ext2);
+            if(pController->mJniCallback != NULL) {
+                pController->mJniCallback(pController->mJniCookie,
+                 MSG_TYPE_PLAYER_ERROR, &err_val);
+            }
+            break;
+        }
+        case MEDIA_INFO:
+        {
+            int info_val = ext2;
+            // ext1: Media framework error code.
+            // ext2: Implementation dependant error code.
+            //LOGW("MEDIA_INFO; info/warning (%d, %d)", ext1, ext2);
+            if(pController->mJniCallback != NULL) {
+                pController->mJniCallback(pController->mJniCookie,
+                 MSG_TYPE_PROGRESS_INDICATION, &info_val);
+            }
+            break;
+        }
+        case MEDIA_SEEK_COMPLETE:
+            LOGV("MEDIA_SEEK_COMPLETE; Received seek complete");
+            break;
+        case MEDIA_BUFFERING_UPDATE:
+            LOGV("MEDIA_BUFFERING_UPDATE; buffering %d", ext1);
+            break;
+        case MEDIA_SET_VIDEO_SIZE:
+            LOGV("MEDIA_SET_VIDEO_SIZE; New video size %d x %d", ext1, ext2);
+            break;
+        case 0xAAAAAAAA:
+            LOGV("VIDEO PLAYBACK ALMOST over, prepare next player");
+            // Select next player and prepare it
+            // If there is a clip after this one
+            if ((M4OSA_UInt32)(pController->mCurrentClipNumber+1) <
+             pController->mNumberClipsToPreview) {
+                pController->mPrepareReqest = M4OSA_TRUE;
+                pController->mCurrentPlayer++;
+                if (pController->mCurrentPlayer >= NBPLAYER_INSTANCES) {
+                    pController->mCurrentPlayer = 0;
+                }
+                // Prepare the first clip to be played
+                {
+                    Mutex::Autolock autoLock(pController->mLockSem);
+                    if (pController->mSemThreadWait != NULL) {
+                        M4OSA_semaphorePost(pController->mSemThreadWait);
+                    }
+                }
+            }
+            break;
+        case 0xBBBBBBBB:
+        {
+            LOGV("VIDEO PLAYBACK, Update Overlay");
+            int overlayIndex = ext2;
+            VideoEditorCurretEditInfo *pEditInfo =
+                    (VideoEditorCurretEditInfo*)M4OSA_32bitAlignedMalloc(sizeof(VideoEditorCurretEditInfo),
+                    M4VS, (M4OSA_Char*)"Current Edit info");
+            //ext1 = 1; start the overlay display
+            //     = 2; Clear the overlay.
+            pEditInfo->overlaySettingsIndex = ext2;
+            pEditInfo->clipIndex = pController->mCurrentClipNumber;
+            LOGV("pController->mCurrentClipNumber = %d",pController->mCurrentClipNumber);
+            if (pController->mJniCallback != NULL) {
+                if (ext1 == 1) {
+                    pController->mOverlayState = OVERLAY_UPDATE;
+                    pController->mJniCallback(pController->mJniCookie,
+                        MSG_TYPE_OVERLAY_UPDATE, pEditInfo);
+                } else {
+                    pController->mOverlayState = OVERLAY_CLEAR;
+                    pController->mJniCallback(pController->mJniCookie,
+                        MSG_TYPE_OVERLAY_CLEAR, pEditInfo);
+                }
+            }
+            free(pEditInfo);
+            break;
+        }
+        default:
+            LOGV("unrecognized message: (%d, %d, %d)", msg, ext1, ext2);
+            break;
+    }
+}
+
+void VideoEditorPreviewController::setVideoEffectType(
+    M4VSS3GPP_VideoEffectType type, M4OSA_Bool enable) {
+
+    M4OSA_UInt32 effect = VIDEO_EFFECT_NONE;
+
+    // map M4VSS3GPP_VideoEffectType to local enum
+    switch(type) {
+        case M4VSS3GPP_kVideoEffectType_FadeFromBlack:
+            effect = VIDEO_EFFECT_FADEFROMBLACK;
+            break;
+
+        case M4VSS3GPP_kVideoEffectType_FadeToBlack:
+            effect = VIDEO_EFFECT_FADETOBLACK;
+            break;
+
+        case M4VSS3GPP_kVideoEffectType_CurtainOpening:
+            effect = VIDEO_EFFECT_CURTAINOPEN;
+            break;
+
+        case M4VSS3GPP_kVideoEffectType_CurtainClosing:
+            effect = VIDEO_EFFECT_CURTAINCLOSE;
+            break;
+
+        case M4xVSS_kVideoEffectType_BlackAndWhite:
+            effect = VIDEO_EFFECT_BLACKANDWHITE;
+            break;
+
+        case M4xVSS_kVideoEffectType_Pink:
+            effect = VIDEO_EFFECT_PINK;
+            break;
+
+        case M4xVSS_kVideoEffectType_Green:
+            effect = VIDEO_EFFECT_GREEN;
+            break;
+
+        case M4xVSS_kVideoEffectType_Sepia:
+            effect = VIDEO_EFFECT_SEPIA;
+            break;
+
+        case M4xVSS_kVideoEffectType_Negative:
+            effect = VIDEO_EFFECT_NEGATIVE;
+            break;
+
+        case M4xVSS_kVideoEffectType_Framing:
+            effect = VIDEO_EFFECT_FRAMING;
+            break;
+
+        case M4xVSS_kVideoEffectType_Fifties:
+            effect = VIDEO_EFFECT_FIFTIES;
+            break;
+
+        case M4xVSS_kVideoEffectType_ColorRGB16:
+            effect = VIDEO_EFFECT_COLOR_RGB16;
+            break;
+
+        case M4xVSS_kVideoEffectType_Gradient:
+            effect = VIDEO_EFFECT_GRADIENT;
+            break;
+
+        default:
+            effect = VIDEO_EFFECT_NONE;
+            break;
+    }
+
+    if(enable == M4OSA_TRUE) {
+        // If already set, then no need to set again
+        if(!(mCurrentVideoEffect & effect))
+            mCurrentVideoEffect |= effect;
+            if(effect == VIDEO_EFFECT_FIFTIES) {
+                mIsFiftiesEffectStarted = true;
+            }
+    }
+    else  {
+        // Reset only if already set
+        if(mCurrentVideoEffect & effect)
+            mCurrentVideoEffect &= ~effect;
+    }
+
+    return;
+}
+
+
+M4OSA_ERR VideoEditorPreviewController::applyVideoEffect(
+    M4OSA_Void * dataPtr, M4OSA_UInt32 colorFormat, M4OSA_UInt32 videoWidth,
+    M4OSA_UInt32 videoHeight, M4OSA_UInt32 timeMs, M4OSA_Void* outPtr) {
+
+    M4OSA_ERR err = M4NO_ERROR;
+    vePostProcessParams postProcessParams;
+
+    postProcessParams.vidBuffer = (M4VIFI_UInt8*)dataPtr;
+    postProcessParams.videoWidth = videoWidth;
+    postProcessParams.videoHeight = videoHeight;
+    postProcessParams.timeMs = timeMs;
+    postProcessParams.timeOffset = 0; //Since timeMS already takes care of offset in this case
+    postProcessParams.effectsSettings = mEffectsSettings;
+    postProcessParams.numberEffects = mNumberEffects;
+    postProcessParams.outVideoWidth = mOutputVideoWidth;
+    postProcessParams.outVideoHeight = mOutputVideoHeight;
+    postProcessParams.currentVideoEffect = mCurrentVideoEffect;
+    postProcessParams.renderingMode = mRenderingMode;
+    if(mIsFiftiesEffectStarted == M4OSA_TRUE) {
+        postProcessParams.isFiftiesEffectStarted = M4OSA_TRUE;
+        mIsFiftiesEffectStarted = M4OSA_FALSE;
+    }
+    else {
+       postProcessParams.isFiftiesEffectStarted = M4OSA_FALSE;
+    }
+    //postProcessParams.renderer = mTarget;
+    postProcessParams.overlayFrameRGBBuffer = NULL;
+    postProcessParams.overlayFrameYUVBuffer = NULL;
+
+    mTarget->getBufferYV12(&(postProcessParams.pOutBuffer), &(postProcessParams.outBufferStride));
+
+    err = applyEffectsAndRenderingMode(&postProcessParams, videoWidth, videoHeight);
+    return err;
+}
+
+M4OSA_ERR VideoEditorPreviewController::setPreviewFrameRenderingMode(
+    M4xVSS_MediaRendering mode, M4VIDEOEDITING_VideoFrameSize outputVideoSize) {
+
+    LOGV("setMediaRenderingMode: outputVideoSize = %d", outputVideoSize);
+    mRenderingMode = mode;
+
+    switch(outputVideoSize) {
+        case M4VIDEOEDITING_kSQCIF:
+            mOutputVideoWidth = 128;
+            mOutputVideoHeight = 96;
+            break;
+
+        case M4VIDEOEDITING_kQQVGA:
+            mOutputVideoWidth = 160;
+            mOutputVideoHeight = 120;
+            break;
+
+        case M4VIDEOEDITING_kQCIF:
+            mOutputVideoWidth = 176;
+            mOutputVideoHeight = 144;
+            break;
+
+        case M4VIDEOEDITING_kQVGA:
+            mOutputVideoWidth = 320;
+            mOutputVideoHeight = 240;
+            break;
+
+        case M4VIDEOEDITING_kCIF:
+            mOutputVideoWidth = 352;
+            mOutputVideoHeight = 288;
+            break;
+
+        case M4VIDEOEDITING_kVGA:
+            mOutputVideoWidth = 640;
+            mOutputVideoHeight = 480;
+            break;
+
+        case M4VIDEOEDITING_kWVGA:
+            mOutputVideoWidth = 800;
+            mOutputVideoHeight = 480;
+            break;
+
+        case M4VIDEOEDITING_kNTSC:
+            mOutputVideoWidth = 720;
+            mOutputVideoHeight = 480;
+            break;
+
+        case M4VIDEOEDITING_k640_360:
+            mOutputVideoWidth = 640;
+            mOutputVideoHeight = 360;
+            break;
+
+        case M4VIDEOEDITING_k854_480:
+            mOutputVideoWidth = 854;
+            mOutputVideoHeight = 480;
+            break;
+
+        case M4VIDEOEDITING_kHD1280:
+            mOutputVideoWidth = 1280;
+            mOutputVideoHeight = 720;
+            break;
+
+        case M4VIDEOEDITING_kHD1080:
+            mOutputVideoWidth = 1080;
+            mOutputVideoHeight = 720;
+            break;
+
+        case M4VIDEOEDITING_kHD960:
+            mOutputVideoWidth = 960;
+            mOutputVideoHeight = 720;
+            break;
+
+        default:
+            mOutputVideoWidth = 0;
+            mOutputVideoHeight = 0;
+            break;
+    }
+
+    return OK;
+}
+
+M4OSA_ERR VideoEditorPreviewController::doImageRenderingMode(
+    M4OSA_Void * dataPtr, M4OSA_UInt32 colorFormat, M4OSA_UInt32 videoWidth,
+    M4OSA_UInt32 videoHeight, M4OSA_Void* outPtr) {
+
+    M4OSA_ERR err = M4NO_ERROR;
+    M4VIFI_ImagePlane planeIn[3], planeOut[3];
+    M4VIFI_UInt8 *inBuffer = M4OSA_NULL;
+    M4OSA_UInt32 outputBufferWidth =0, outputBufferHeight=0;
+
+    //frameSize = (videoWidth*videoHeight*3) >> 1;
+    inBuffer = (M4OSA_UInt8 *)dataPtr;
+
+    // In plane
+    prepareYUV420ImagePlane(planeIn, videoWidth,
+      videoHeight, (M4VIFI_UInt8 *)inBuffer, videoWidth, videoHeight);
+
+    outputBufferWidth = mOutputVideoWidth;
+    outputBufferHeight = mOutputVideoHeight;
+
+    // Out plane
+    uint8_t* outBuffer;
+    size_t outBufferStride = 0;
+
+    LOGV("doMediaRendering CALL getBuffer()");
+    mTarget->getBufferYV12(&outBuffer, &outBufferStride);
+
+    // Set the output YUV420 plane to be compatible with YV12 format
+    //In YV12 format, sizes must be even
+    M4OSA_UInt32 yv12PlaneWidth = ((mOutputVideoWidth +1)>>1)<<1;
+    M4OSA_UInt32 yv12PlaneHeight = ((mOutputVideoHeight+1)>>1)<<1;
+
+    prepareYV12ImagePlane(planeOut, yv12PlaneWidth, yv12PlaneHeight,
+     (M4OSA_UInt32)outBufferStride, (M4VIFI_UInt8 *)outBuffer);
+
+    err = applyRenderingMode(planeIn, planeOut, mRenderingMode);
+    if(err != M4NO_ERROR) {
+        LOGE("doImageRenderingMode: applyRenderingMode returned err=0x%x", (unsigned int)err);
+    }
+    return err;
+}
+
+} //namespace android
diff --git a/libvideoeditor/lvpp/VideoEditorPreviewController.h b/libvideoeditor/lvpp/VideoEditorPreviewController.h
index 772aaff..18b61b5 100755
--- a/libvideoeditor/lvpp/VideoEditorPreviewController.h
+++ b/libvideoeditor/lvpp/VideoEditorPreviewController.h
@@ -1,159 +1,159 @@
-/*

- * Copyright (C) 2011 The Android Open Source Project

- *

- * Licensed under the Apache License, Version 2.0 (the "License");

- * you may not use this file except in compliance with the License.

- * You may obtain a copy of the License at

- *

- *      http://www.apache.org/licenses/LICENSE-2.0

- *

- * Unless required by applicable law or agreed to in writing, software

- * distributed under the License is distributed on an "AS IS" BASIS,

- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

- * See the License for the specific language governing permissions and

- * limitations under the License.

- */

-

-#ifndef ANDROID_VE_PREVIEWCONTROLLER_H

-#define ANDROID_VE_PREVIEWCONTROLLER_H

-

-#include <utils/Log.h>

-#include "VideoEditorPlayer.h"

-#include "VideoEditorAudioPlayer.h"

-#include "M4OSA_Semaphore.h"

-#include "M4OSA_Thread.h"

-#include "M4OSA_Clock.h"

-#include "M4OSA_Time.h"

-#include "M4xVSS_API.h"

-#include "M4xVSS_Internal.h"

-#include "M4VFL_transition.h"

-#include "VideoEditorTools.h"

-#include "VideoEditorThumbnailMain.h"

-#include "VideoEditorMain.h"

-

-#include "OMX_IVCommon.h"

-#include "mediaplayer.h"

-#include <surfaceflinger/Surface.h>

-#include <surfaceflinger/ISurface.h>

-#include <surfaceflinger/ISurfaceComposer.h>

-#include <surfaceflinger/SurfaceComposerClient.h>

-#include <media/stagefright/MediaBuffer.h>

-#include "PreviewRenderer.h"

-

-

-#define NBPLAYER_INSTANCES 2

-

-namespace android {

-

-typedef enum {

-    VePlayerIdle=0,

-    VePlayerBusy,

-    VePlayerAutoStop

-} VePlayerState;

-

-typedef enum {

-    OVERLAY_UPDATE = 0,

-    OVERLAY_CLEAR

-} OverlayState;

-

-// Callback mechanism from PreviewController to Jni  */

-typedef void (*jni_progress_callback_fct)(void* cookie, M4OSA_UInt32 msgType, void *argc);

-

-

-class VideoEditorPreviewController {

-

-public:

-    VideoEditorPreviewController();

-    virtual ~VideoEditorPreviewController();

-

-    M4OSA_ERR loadEditSettings(M4VSS3GPP_EditSettings* pSettings,

-        M4xVSS_AudioMixingSettings* bgmSettings);

-

-    M4OSA_ERR setSurface(const sp<Surface> &surface);

-

-    M4OSA_ERR startPreview(M4OSA_UInt32 fromMS, M4OSA_Int32 toMs,

-        M4OSA_UInt16 callBackAfterFrameCount, M4OSA_Bool loop) ;

-

-    M4OSA_UInt32 stopPreview();

-

-    M4OSA_ERR renderPreviewFrame(const sp<Surface> &surface,

-        VideoEditor_renderPreviewFrameStr* pFrameInfo,

-        VideoEditorCurretEditInfo *pCurrEditInfo);

-

-    M4OSA_ERR clearSurface(const sp<Surface> &surface,

-     VideoEditor_renderPreviewFrameStr* pFrameInfo);

-

-    M4OSA_Void setJniCallback(void* cookie,

-        jni_progress_callback_fct callbackFct);

-

-    M4OSA_ERR setPreviewFrameRenderingMode(M4xVSS_MediaRendering mode,

-        M4VIDEOEDITING_VideoFrameSize outputVideoSize);

-

-private:

-    sp<VideoEditorPlayer> mVePlayer[NBPLAYER_INSTANCES];

-    int mCurrentPlayer; //Instance of the player currently being used

-    sp<Surface>  mSurface;

-    mutable Mutex mLock;

-    M4OSA_Context mThreadContext;

-    VePlayerState mPlayerState;

-    M4OSA_Bool    mPrepareReqest;

-    M4VSS3GPP_ClipSettings **mClipList; //Pointer to an array of clip settings

-    M4OSA_UInt32 mNumberClipsInStoryBoard;

-    M4OSA_UInt32 mNumberClipsToPreview;

-    M4OSA_UInt32 mStartingClipIndex;

-    M4OSA_Bool mPreviewLooping;

-    M4OSA_UInt32 mCallBackAfterFrameCnt;

-    M4VSS3GPP_EffectSettings* mEffectsSettings;

-    M4OSA_UInt32 mNumberEffects;

-    M4OSA_Int32 mCurrentClipNumber;

-    M4OSA_UInt32 mClipTotalDuration;

-    M4OSA_UInt32 mCurrentVideoEffect;

-    M4xVSS_AudioMixingSettings* mBackgroundAudioSetting;

-    M4OSA_Context mAudioMixPCMFileHandle;

-    PreviewRenderer *mTarget;

-    M4OSA_Context mJniCookie;

-    jni_progress_callback_fct mJniCallback;

-    VideoEditor_renderPreviewFrameStr mFrameStr;

-    M4OSA_UInt32 mCurrentPlayedDuration;

-    M4OSA_UInt32 mCurrentClipDuration;

-    M4VIDEOEDITING_VideoFrameSize mOutputVideoSize;

-    M4OSA_UInt32 mFirstPreviewClipBeginTime;

-    M4OSA_UInt32 mLastPreviewClipEndTime;

-    M4OSA_UInt32 mVideoStoryBoardTimeMsUptoFirstPreviewClip;

-    OverlayState mOverlayState;

-    int mActivePlayerIndex;

-

-    M4xVSS_MediaRendering mRenderingMode;

-    uint32_t mOutputVideoWidth;

-    uint32_t mOutputVideoHeight;

-    bool bStopThreadInProgress;

-    M4OSA_Context mSemThreadWait;

-    bool mIsFiftiesEffectStarted;

-

-    sp<VideoEditorPlayer::VeAudioOutput> mVEAudioSink;

-    VideoEditorAudioPlayer *mVEAudioPlayer;

-

-    M4VIFI_UInt8*  mFrameRGBBuffer;

-    M4VIFI_UInt8*  mFrameYUVBuffer;

-    mutable Mutex mLockSem;

-    static M4OSA_ERR preparePlayer(void* param, int playerInstance, int index);

-    static M4OSA_ERR threadProc(M4OSA_Void* param);

-    static void notify(void* cookie, int msg, int ext1, int ext2);

-

-    void setVideoEffectType(M4VSS3GPP_VideoEffectType type, M4OSA_Bool enable);

-

-    M4OSA_ERR applyVideoEffect(M4OSA_Void * dataPtr, M4OSA_UInt32 colorFormat,

-     M4OSA_UInt32 videoWidth, M4OSA_UInt32 videoHeight,

-     M4OSA_UInt32 timeMs, M4OSA_Void* outPtr);

-

-    M4OSA_ERR doImageRenderingMode(M4OSA_Void * dataPtr,

-     M4OSA_UInt32 colorFormat, M4OSA_UInt32 videoWidth,

-     M4OSA_UInt32 videoHeight, M4OSA_Void* outPtr);

-

-    VideoEditorPreviewController(const VideoEditorPreviewController &);

-    VideoEditorPreviewController &operator=(const VideoEditorPreviewController &);

-};

-

-}

-

-#endif //ANDROID_VE_PREVIEWCONTROLLER_H

+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_VE_PREVIEWCONTROLLER_H
+#define ANDROID_VE_PREVIEWCONTROLLER_H
+
+#include <utils/Log.h>
+#include "VideoEditorPlayer.h"
+#include "VideoEditorAudioPlayer.h"
+#include "M4OSA_Semaphore.h"
+#include "M4OSA_Thread.h"
+#include "M4OSA_Clock.h"
+#include "M4OSA_Time.h"
+#include "M4xVSS_API.h"
+#include "M4xVSS_Internal.h"
+#include "M4VFL_transition.h"
+#include "VideoEditorTools.h"
+#include "VideoEditorThumbnailMain.h"
+#include "VideoEditorMain.h"
+
+#include "OMX_IVCommon.h"
+#include "mediaplayer.h"
+#include <surfaceflinger/Surface.h>
+#include <surfaceflinger/ISurface.h>
+#include <surfaceflinger/ISurfaceComposer.h>
+#include <surfaceflinger/SurfaceComposerClient.h>
+#include <media/stagefright/MediaBuffer.h>
+#include "PreviewRenderer.h"
+
+
+#define NBPLAYER_INSTANCES 2
+
+namespace android {
+
+typedef enum {
+    VePlayerIdle=0,
+    VePlayerBusy,
+    VePlayerAutoStop
+} VePlayerState;
+
+typedef enum {
+    OVERLAY_UPDATE = 0,
+    OVERLAY_CLEAR
+} OverlayState;
+
+// Callback mechanism from PreviewController to Jni  */
+typedef void (*jni_progress_callback_fct)(void* cookie, M4OSA_UInt32 msgType, void *argc);
+
+
+class VideoEditorPreviewController {
+
+public:
+    VideoEditorPreviewController();
+    virtual ~VideoEditorPreviewController();
+
+    M4OSA_ERR loadEditSettings(M4VSS3GPP_EditSettings* pSettings,
+        M4xVSS_AudioMixingSettings* bgmSettings);
+
+    M4OSA_ERR setSurface(const sp<Surface> &surface);
+
+    M4OSA_ERR startPreview(M4OSA_UInt32 fromMS, M4OSA_Int32 toMs,
+        M4OSA_UInt16 callBackAfterFrameCount, M4OSA_Bool loop) ;
+
+    M4OSA_UInt32 stopPreview();
+
+    M4OSA_ERR renderPreviewFrame(const sp<Surface> &surface,
+        VideoEditor_renderPreviewFrameStr* pFrameInfo,
+        VideoEditorCurretEditInfo *pCurrEditInfo);
+
+    M4OSA_ERR clearSurface(const sp<Surface> &surface,
+     VideoEditor_renderPreviewFrameStr* pFrameInfo);
+
+    M4OSA_Void setJniCallback(void* cookie,
+        jni_progress_callback_fct callbackFct);
+
+    M4OSA_ERR setPreviewFrameRenderingMode(M4xVSS_MediaRendering mode,
+        M4VIDEOEDITING_VideoFrameSize outputVideoSize);
+
+private:
+    sp<VideoEditorPlayer> mVePlayer[NBPLAYER_INSTANCES];
+    int mCurrentPlayer; //Instance of the player currently being used
+    sp<Surface>  mSurface;
+    mutable Mutex mLock;
+    M4OSA_Context mThreadContext;
+    VePlayerState mPlayerState;
+    M4OSA_Bool    mPrepareReqest;
+    M4VSS3GPP_ClipSettings **mClipList; //Pointer to an array of clip settings
+    M4OSA_UInt32 mNumberClipsInStoryBoard;
+    M4OSA_UInt32 mNumberClipsToPreview;
+    M4OSA_UInt32 mStartingClipIndex;
+    M4OSA_Bool mPreviewLooping;
+    M4OSA_UInt32 mCallBackAfterFrameCnt;
+    M4VSS3GPP_EffectSettings* mEffectsSettings;
+    M4OSA_UInt32 mNumberEffects;
+    M4OSA_Int32 mCurrentClipNumber;
+    M4OSA_UInt32 mClipTotalDuration;
+    M4OSA_UInt32 mCurrentVideoEffect;
+    M4xVSS_AudioMixingSettings* mBackgroundAudioSetting;
+    M4OSA_Context mAudioMixPCMFileHandle;
+    PreviewRenderer *mTarget;
+    M4OSA_Context mJniCookie;
+    jni_progress_callback_fct mJniCallback;
+    VideoEditor_renderPreviewFrameStr mFrameStr;
+    M4OSA_UInt32 mCurrentPlayedDuration;
+    M4OSA_UInt32 mCurrentClipDuration;
+    M4VIDEOEDITING_VideoFrameSize mOutputVideoSize;
+    M4OSA_UInt32 mFirstPreviewClipBeginTime;
+    M4OSA_UInt32 mLastPreviewClipEndTime;
+    M4OSA_UInt32 mVideoStoryBoardTimeMsUptoFirstPreviewClip;
+    OverlayState mOverlayState;
+    int mActivePlayerIndex;
+
+    M4xVSS_MediaRendering mRenderingMode;
+    uint32_t mOutputVideoWidth;
+    uint32_t mOutputVideoHeight;
+    bool bStopThreadInProgress;
+    M4OSA_Context mSemThreadWait;
+    bool mIsFiftiesEffectStarted;
+
+    sp<VideoEditorPlayer::VeAudioOutput> mVEAudioSink;
+    VideoEditorAudioPlayer *mVEAudioPlayer;
+
+    M4VIFI_UInt8*  mFrameRGBBuffer;
+    M4VIFI_UInt8*  mFrameYUVBuffer;
+    mutable Mutex mLockSem;
+    static M4OSA_ERR preparePlayer(void* param, int playerInstance, int index);
+    static M4OSA_ERR threadProc(M4OSA_Void* param);
+    static void notify(void* cookie, int msg, int ext1, int ext2);
+
+    void setVideoEffectType(M4VSS3GPP_VideoEffectType type, M4OSA_Bool enable);
+
+    M4OSA_ERR applyVideoEffect(M4OSA_Void * dataPtr, M4OSA_UInt32 colorFormat,
+     M4OSA_UInt32 videoWidth, M4OSA_UInt32 videoHeight,
+     M4OSA_UInt32 timeMs, M4OSA_Void* outPtr);
+
+    M4OSA_ERR doImageRenderingMode(M4OSA_Void * dataPtr,
+     M4OSA_UInt32 colorFormat, M4OSA_UInt32 videoWidth,
+     M4OSA_UInt32 videoHeight, M4OSA_Void* outPtr);
+
+    VideoEditorPreviewController(const VideoEditorPreviewController &);
+    VideoEditorPreviewController &operator=(const VideoEditorPreviewController &);
+};
+
+}
+
+#endif //ANDROID_VE_PREVIEWCONTROLLER_H
diff --git a/libvideoeditor/lvpp/VideoEditorSRC.cpp b/libvideoeditor/lvpp/VideoEditorSRC.cpp
index 17eb55a..9c50299 100755
--- a/libvideoeditor/lvpp/VideoEditorSRC.cpp
+++ b/libvideoeditor/lvpp/VideoEditorSRC.cpp
@@ -1,369 +1,369 @@
-/*

- * Copyright (C) 2011 The Android Open Source Project

- *

- * Licensed under the Apache License, Version 2.0 (the "License");

- * you may not use this file except in compliance with the License.

- * You may obtain a copy of the License at

- *

- *      http://www.apache.org/licenses/LICENSE-2.0

- *

- * Unless required by applicable law or agreed to in writing, software

- * distributed under the License is distributed on an "AS IS" BASIS,

- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

- * See the License for the specific language governing permissions and

- * limitations under the License.

- */

-

-#define LOG_NDEBUG 1

-#define LOG_TAG "VEAudioSource"

-#include <utils/Log.h>

-

-

-#include "VideoEditorSRC.h"

-#include <media/stagefright/MetaData.h>

-#include <media/stagefright/MediaDebug.h>

-#include <media/stagefright/MediaBuffer.h>

-#include <media/stagefright/MediaDefs.h>

-#include "AudioMixer.h"

-

-

-namespace android {

-

-VideoEditorSRC::VideoEditorSRC(

-        const sp<MediaSource> &source) {

-

-    LOGV("VideoEditorSRC::Create");

-    mSource = source;

-    mResampler = NULL;

-    mBitDepth = 16;

-    mChannelCnt = 0;

-    mSampleRate = 0;

-    mOutputSampleRate = DEFAULT_SAMPLING_FREQ;

-    mStarted = false;

-    mIsResamplingRequired = false;

-    mIsChannelConvertionRequired = false;

-    mInitialTimeStampUs = -1;

-    mAccuOutBufferSize  = 0;

-    mSeekTimeUs = -1;

-    mLeftover = 0;

-    mLastReadSize = 0;

-    mReSampledBuffer = NULL;

-    mSeekMode =  ReadOptions::SEEK_PREVIOUS_SYNC;

-

-    mOutputFormat = new MetaData;

-

-    // Input Source validation

-    sp<MetaData> format = mSource->getFormat();

-    const char *mime;

-    bool success = format->findCString(kKeyMIMEType, &mime);

-    CHECK(success);

-    CHECK(!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW));

-

-    //set the meta data of the output after convertion.

-    if(mOutputFormat != NULL) {

-        mOutputFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_RAW);

-        mOutputFormat->setInt32(kKeySampleRate, DEFAULT_SAMPLING_FREQ);

-

-        //by default we convert all data to stereo

-        mOutputFormat->setInt32(kKeyChannelCount, 2);

-    } else {

-        LOGE("Meta data was not allocated.");

-    }

-

-    // Allocate a  1 sec buffer (test only, to be refined)

-    mInterframeBufferPosition = 0;

-    mInterframeBuffer = new uint8_t[DEFAULT_SAMPLING_FREQ * 2 * 2]; //stereo=2 * bytespersample=2

-

-

-}

-

-VideoEditorSRC::~VideoEditorSRC(){

-    if (mStarted == true)

-        stop();

-

-    if(mOutputFormat != NULL) {

-        mOutputFormat.clear();

-        mOutputFormat = NULL;

-    }

-

-    if (mInterframeBuffer != NULL){

-        delete mInterframeBuffer;

-        mInterframeBuffer = NULL;

-    }

-}

-

-void VideoEditorSRC::setResampling(int32_t sampleRate) {

-    Mutex::Autolock autoLock(mLock);

-    LOGV("VideoEditorSRC::setResampling called with samplreRate = %d", sampleRate);

-    if(sampleRate != DEFAULT_SAMPLING_FREQ) { //default case

-        LOGV("VideoEditor Audio resampler, freq set is other than default");

-        CHECK(mOutputFormat->setInt32(kKeySampleRate, DEFAULT_SAMPLING_FREQ));

-    }

-    mOutputSampleRate = sampleRate;

-    return;

-}

-

-status_t  VideoEditorSRC::start (MetaData *params) {

-    Mutex::Autolock autoLock(mLock);

-

-    CHECK(!mStarted);

-    LOGV(" VideoEditorSRC:start() called");

-

-    sp<MetaData> format = mSource->getFormat();

-    const char *mime;

-    bool success = format->findCString(kKeyMIMEType, &mime);

-    CHECK(success);

-    CHECK(!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW));

-

-    success = format->findInt32(kKeySampleRate, &mSampleRate);

-    CHECK(success);

-

-    int32_t numChannels;

-    success = format->findInt32(kKeyChannelCount, &mChannelCnt);

-    CHECK(success);

-

-    if(mSampleRate != mOutputSampleRate) {

-        LOGV("Resampling required (%d != %d)", mSampleRate, mOutputSampleRate);

-        mIsResamplingRequired = true;

-        LOGV("Create resampler %d %d %d", mBitDepth, mChannelCnt, mOutputSampleRate);

-

-        mResampler = AudioResampler::create(

-                        mBitDepth, mChannelCnt, mOutputSampleRate, AudioResampler::DEFAULT);

-

-        if(mResampler == NULL) {

-            return NO_MEMORY;

-        }

-        LOGV("Set input rate %d", mSampleRate);

-        mResampler->setSampleRate(mSampleRate);

-        mResampler->setVolume(UNITY_GAIN, UNITY_GAIN);

-

-    } else {

-        if(mChannelCnt != 2) { //we always make sure to provide stereo

-            LOGV("Only Channel convertion required");

-            mIsChannelConvertionRequired = true;

-        }

-    }

-    mSeekTimeUs = -1;

-    mSeekMode =  ReadOptions::SEEK_PREVIOUS_SYNC;

-    mStarted = true;

-    mSource->start();

-

-    return OK;

-}

-

-status_t VideoEditorSRC::stop() {

-

-    Mutex::Autolock autoLock(mLock);

-    LOGV("VideoEditorSRC::stop()");

-    mSource->stop();

-    if(mResampler != NULL) {

-        delete mResampler;

-        mResampler = NULL;

-    }

-    mStarted = false;

-    mInitialTimeStampUs = -1;

-    mAccuOutBufferSize  = 0;

-    mLeftover = 0;

-    mLastReadSize = 0;

-    if (mReSampledBuffer != NULL) {

-        free(mReSampledBuffer);

-        mReSampledBuffer = NULL;

-    }

-

-    return OK;

-}

-

-sp<MetaData> VideoEditorSRC::getFormat() {

-    LOGV("AudioSRC getFormat");

-    //Mutex::Autolock autoLock(mLock);

-    return mOutputFormat;

-}

-

-status_t VideoEditorSRC::read (

-        MediaBuffer **buffer_out, const ReadOptions *options) {

-    Mutex::Autolock autoLock(mLock);

-    *buffer_out = NULL;

-    int32_t leftover = 0;

-

-    LOGV("VideoEditorSRC::read");

-

-    if (!mStarted) {

-        return ERROR_END_OF_STREAM;

-    }

-

-    if(mIsResamplingRequired == true) {

-

-        LOGV("mIsResamplingRequired = true");

-

-        // Store the seek parameters

-        int64_t seekTimeUs;

-        ReadOptions::SeekMode mode = ReadOptions::SEEK_PREVIOUS_SYNC;

-        if (options && options->getSeekTo(&seekTimeUs, &mode)) {

-            LOGV("read Seek %lld", seekTimeUs);

-            mInitialTimeStampUs = -1;

-            mSeekTimeUs = seekTimeUs;

-            mSeekMode = mode;

-        }

-

-        // We ask for 1024 frames in output

-        size_t outFrameCnt = 1024;

-        int32_t outBufferSize = (outFrameCnt) * 2 * sizeof(int16_t); //out is always 2 channels & 16 bits

-        int64_t outDurationUs = (outBufferSize * 1000000) /(mOutputSampleRate * 2 * sizeof(int16_t)); //2 channels out * 2 bytes per sample

-        LOGV("outBufferSize            %d", outBufferSize);

-        LOGV("outFrameCnt              %d", outFrameCnt);

-

-        int32_t *pTmpBuffer = (int32_t*)malloc(outFrameCnt * 2 * sizeof(int32_t)); //out is always 2 channels and resampler out is 32 bits

-        memset(pTmpBuffer, 0x00, outFrameCnt * 2 * sizeof(int32_t));

-        // Resample to target quality

-        mResampler->resample(pTmpBuffer, outFrameCnt, this);

-

-        // Free previous allocation

-        if (mReSampledBuffer != NULL) {

-            free(mReSampledBuffer);

-            mReSampledBuffer = NULL;

-        }

-        mReSampledBuffer = (int16_t*)malloc(outBufferSize);

-        memset(mReSampledBuffer, 0x00, outBufferSize);

-

-        // Convert back to 16 bits

-        AudioMixer::ditherAndClamp((int32_t*)mReSampledBuffer, pTmpBuffer, outFrameCnt);

-        LOGV("Resampled buffer size %d", outFrameCnt* 2 * sizeof(int16_t));

-

-        // Create new MediaBuffer

-        mCopyBuffer = new MediaBuffer((void*)mReSampledBuffer, outBufferSize);

-

-        // Compute and set the new timestamp

-        sp<MetaData> to = mCopyBuffer->meta_data();

-        int64_t totalOutDurationUs = (mAccuOutBufferSize * 1000000) /(mOutputSampleRate * 2 * 2); //2 channels out * 2 bytes per sample

-        int64_t timeUs = mInitialTimeStampUs + totalOutDurationUs;

-        to->setInt64(kKeyTime, timeUs);

-        LOGV("buffer duration %lld   timestamp %lld   init %lld", outDurationUs, timeUs, mInitialTimeStampUs);

-

-        // update the accumulate size

-        mAccuOutBufferSize += outBufferSize;

-

-        mCopyBuffer->set_range(0, outBufferSize);

-        *buffer_out = mCopyBuffer;

-

-        free(pTmpBuffer);

-

-    } else if(mIsChannelConvertionRequired == true) {

-        //TODO convert to stereo here.

-    } else {

-        //LOGI("Resampling not required");

-        MediaBuffer *aBuffer;

-        status_t err = mSource->read(&aBuffer, options);

-        LOGV("mSource->read returned %d", err);

-        if(err != OK) {

-            *buffer_out = NULL;

-            mStarted = false;

-            return err;

-        }

-        *buffer_out = aBuffer;

-    }

-

-    return OK;

-}

-

-status_t VideoEditorSRC::getNextBuffer(AudioBufferProvider::Buffer *pBuffer) {

-    LOGV("Requesting        %d", pBuffer->frameCount);

-    uint32_t availableFrames;

-    bool lastBuffer = false;

-    MediaBuffer *aBuffer;

-

-

-    //update the internal buffer

-    // Store the leftover at the beginning of the local buffer

-    if (mLeftover > 0) {

-        LOGV("Moving mLeftover =%d  from  %d", mLeftover, mLastReadSize);

-        if (mLastReadSize > 0) {

-            memcpy(mInterframeBuffer, (uint8_t*) (mInterframeBuffer + mLastReadSize), mLeftover);

-        }

-        mInterframeBufferPosition = mLeftover;

-    }

-    else {

-        mInterframeBufferPosition = 0;

-    }

-

-    availableFrames = mInterframeBufferPosition / (mChannelCnt*2);

-

-    while ((availableFrames < pBuffer->frameCount)&&(mStarted)) {

-        // if we seek, reset the initial time stamp and accumulated time

-        ReadOptions options;

-        if (mSeekTimeUs >= 0) {

-            LOGV("%p cacheMore_l Seek requested = %lld", this, mSeekTimeUs);

-            ReadOptions::SeekMode mode = mSeekMode;

-            options.setSeekTo(mSeekTimeUs, mode);

-            mSeekTimeUs = -1;

-        }

-        /* The first call to read() will require to buffer twice as much data */

-        /* This will be needed by the resampler */

-        status_t err = mSource->read(&aBuffer, &options);

-        LOGV("mSource->read returned %d", err);

-        if(err != OK) {

-            if (mInterframeBufferPosition == 0) {

-                mStarted = false;

-            }

-            //Empty the internal buffer if there is no more data left in the source

-            else {

-                lastBuffer = true;

-                //clear the end of the buffer, just in case

-                memset(mInterframeBuffer+mInterframeBufferPosition, 0x00, DEFAULT_SAMPLING_FREQ * 2 * 2 - mInterframeBufferPosition);

-                mStarted = false;

-            }

-        }

-        else {

-            //copy the buffer

-            memcpy(((uint8_t*) mInterframeBuffer) + mInterframeBufferPosition,

-                    ((uint8_t*) aBuffer->data()) + aBuffer->range_offset(),

-                    aBuffer->range_length());

-            LOGV("Read from buffer  %d", aBuffer->range_length());

-

-            mInterframeBufferPosition += aBuffer->range_length();

-            LOGV("Stored            %d", mInterframeBufferPosition);

-

-            // Get the time stamp of the first buffer

-            if (mInitialTimeStampUs == -1) {

-                int64_t curTS;

-                sp<MetaData> from = aBuffer->meta_data();

-                from->findInt64(kKeyTime, &curTS);

-                LOGV("setting mInitialTimeStampUs to %lld", mInitialTimeStampUs);

-                mInitialTimeStampUs = curTS;

-            }

-

-            // release the buffer

-            aBuffer->release();

-        }

-        availableFrames = mInterframeBufferPosition / (mChannelCnt*2);

-        LOGV("availableFrames   %d", availableFrames);

-    }

-

-    if (lastBuffer) {

-        pBuffer->frameCount = availableFrames;

-    }

-

-    //update the input buffer

-    pBuffer->raw        = (void*)(mInterframeBuffer);

-

-    // Update how many bytes are left

-    // (actualReadSize is updated in getNextBuffer() called from resample())

-    int32_t actualReadSize = pBuffer->frameCount * mChannelCnt * 2;

-    mLeftover = mInterframeBufferPosition - actualReadSize;

-    LOGV("mLeftover         %d", mLeftover);

-

-    mLastReadSize = actualReadSize;

-

-    LOGV("inFrameCount     %d", pBuffer->frameCount);

-

-    return OK;

-}

-

-

-void VideoEditorSRC::releaseBuffer(AudioBufferProvider::Buffer *pBuffer) {

-    if(pBuffer->raw != NULL) {

-        pBuffer->raw = NULL;

-    }

-    pBuffer->frameCount = 0;

-}

-

-} //namespce android

+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 1
+#define LOG_TAG "VEAudioSource"
+#include <utils/Log.h>
+
+
+#include "VideoEditorSRC.h"
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaDefs.h>
+#include "AudioMixer.h"
+
+
+namespace android {
+
+VideoEditorSRC::VideoEditorSRC(
+        const sp<MediaSource> &source) {
+
+    LOGV("VideoEditorSRC::Create");
+    mSource = source;
+    mResampler = NULL;
+    mBitDepth = 16;
+    mChannelCnt = 0;
+    mSampleRate = 0;
+    mOutputSampleRate = DEFAULT_SAMPLING_FREQ;
+    mStarted = false;
+    mIsResamplingRequired = false;
+    mIsChannelConvertionRequired = false;
+    mInitialTimeStampUs = -1;
+    mAccuOutBufferSize  = 0;
+    mSeekTimeUs = -1;
+    mLeftover = 0;
+    mLastReadSize = 0;
+    mReSampledBuffer = NULL;
+    mSeekMode =  ReadOptions::SEEK_PREVIOUS_SYNC;
+
+    mOutputFormat = new MetaData;
+
+    // Input Source validation
+    sp<MetaData> format = mSource->getFormat();
+    const char *mime;
+    bool success = format->findCString(kKeyMIMEType, &mime);
+    CHECK(success);
+    CHECK(!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW));
+
+    //set the meta data of the output after convertion.
+    if(mOutputFormat != NULL) {
+        mOutputFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_RAW);
+        mOutputFormat->setInt32(kKeySampleRate, DEFAULT_SAMPLING_FREQ);
+
+        //by default we convert all data to stereo
+        mOutputFormat->setInt32(kKeyChannelCount, 2);
+    } else {
+        LOGE("Meta data was not allocated.");
+    }
+
+    // Allocate a  1 sec buffer (test only, to be refined)
+    mInterframeBufferPosition = 0;
+    mInterframeBuffer = new uint8_t[DEFAULT_SAMPLING_FREQ * 2 * 2]; //stereo=2 * bytespersample=2
+
+
+}
+
+VideoEditorSRC::~VideoEditorSRC(){
+    if (mStarted == true)
+        stop();
+
+    if(mOutputFormat != NULL) {
+        mOutputFormat.clear();
+        mOutputFormat = NULL;
+    }
+
+    if (mInterframeBuffer != NULL){
+        delete mInterframeBuffer;
+        mInterframeBuffer = NULL;
+    }
+}
+
+void VideoEditorSRC::setResampling(int32_t sampleRate) {
+    Mutex::Autolock autoLock(mLock);
+    LOGV("VideoEditorSRC::setResampling called with samplreRate = %d", sampleRate);
+    if(sampleRate != DEFAULT_SAMPLING_FREQ) { //default case
+        LOGV("VideoEditor Audio resampler, freq set is other than default");
+        CHECK(mOutputFormat->setInt32(kKeySampleRate, DEFAULT_SAMPLING_FREQ));
+    }
+    mOutputSampleRate = sampleRate;
+    return;
+}
+
+status_t  VideoEditorSRC::start (MetaData *params) {
+    Mutex::Autolock autoLock(mLock);
+
+    CHECK(!mStarted);
+    LOGV(" VideoEditorSRC:start() called");
+
+    sp<MetaData> format = mSource->getFormat();
+    const char *mime;
+    bool success = format->findCString(kKeyMIMEType, &mime);
+    CHECK(success);
+    CHECK(!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW));
+
+    success = format->findInt32(kKeySampleRate, &mSampleRate);
+    CHECK(success);
+
+    int32_t numChannels;
+    success = format->findInt32(kKeyChannelCount, &mChannelCnt);
+    CHECK(success);
+
+    if(mSampleRate != mOutputSampleRate) {
+        LOGV("Resampling required (%d != %d)", mSampleRate, mOutputSampleRate);
+        mIsResamplingRequired = true;
+        LOGV("Create resampler %d %d %d", mBitDepth, mChannelCnt, mOutputSampleRate);
+
+        mResampler = AudioResampler::create(
+                        mBitDepth, mChannelCnt, mOutputSampleRate, AudioResampler::DEFAULT);
+
+        if(mResampler == NULL) {
+            return NO_MEMORY;
+        }
+        LOGV("Set input rate %d", mSampleRate);
+        mResampler->setSampleRate(mSampleRate);
+        mResampler->setVolume(UNITY_GAIN, UNITY_GAIN);
+
+    } else {
+        if(mChannelCnt != 2) { //we always make sure to provide stereo
+            LOGV("Only Channel convertion required");
+            mIsChannelConvertionRequired = true;
+        }
+    }
+    mSeekTimeUs = -1;
+    mSeekMode =  ReadOptions::SEEK_PREVIOUS_SYNC;
+    mStarted = true;
+    mSource->start();
+
+    return OK;
+}
+
+status_t VideoEditorSRC::stop() {
+
+    Mutex::Autolock autoLock(mLock);
+    LOGV("VideoEditorSRC::stop()");
+    mSource->stop();
+    if(mResampler != NULL) {
+        delete mResampler;
+        mResampler = NULL;
+    }
+    mStarted = false;
+    mInitialTimeStampUs = -1;
+    mAccuOutBufferSize  = 0;
+    mLeftover = 0;
+    mLastReadSize = 0;
+    if (mReSampledBuffer != NULL) {
+        free(mReSampledBuffer);
+        mReSampledBuffer = NULL;
+    }
+
+    return OK;
+}
+
+sp<MetaData> VideoEditorSRC::getFormat() {
+    LOGV("AudioSRC getFormat");
+    //Mutex::Autolock autoLock(mLock);
+    return mOutputFormat;
+}
+
+status_t VideoEditorSRC::read (
+        MediaBuffer **buffer_out, const ReadOptions *options) {
+    Mutex::Autolock autoLock(mLock);
+    *buffer_out = NULL;
+    int32_t leftover = 0;
+
+    LOGV("VideoEditorSRC::read");
+
+    if (!mStarted) {
+        return ERROR_END_OF_STREAM;
+    }
+
+    if(mIsResamplingRequired == true) {
+
+        LOGV("mIsResamplingRequired = true");
+
+        // Store the seek parameters
+        int64_t seekTimeUs;
+        ReadOptions::SeekMode mode = ReadOptions::SEEK_PREVIOUS_SYNC;
+        if (options && options->getSeekTo(&seekTimeUs, &mode)) {
+            LOGV("read Seek %lld", seekTimeUs);
+            mInitialTimeStampUs = -1;
+            mSeekTimeUs = seekTimeUs;
+            mSeekMode = mode;
+        }
+
+        // We ask for 1024 frames in output
+        size_t outFrameCnt = 1024;
+        int32_t outBufferSize = (outFrameCnt) * 2 * sizeof(int16_t); //out is always 2 channels & 16 bits
+        int64_t outDurationUs = (outBufferSize * 1000000) /(mOutputSampleRate * 2 * sizeof(int16_t)); //2 channels out * 2 bytes per sample
+        LOGV("outBufferSize            %d", outBufferSize);
+        LOGV("outFrameCnt              %d", outFrameCnt);
+
+        int32_t *pTmpBuffer = (int32_t*)malloc(outFrameCnt * 2 * sizeof(int32_t)); //out is always 2 channels and resampler out is 32 bits
+        memset(pTmpBuffer, 0x00, outFrameCnt * 2 * sizeof(int32_t));
+        // Resample to target quality
+        mResampler->resample(pTmpBuffer, outFrameCnt, this);
+
+        // Free previous allocation
+        if (mReSampledBuffer != NULL) {
+            free(mReSampledBuffer);
+            mReSampledBuffer = NULL;
+        }
+        mReSampledBuffer = (int16_t*)malloc(outBufferSize);
+        memset(mReSampledBuffer, 0x00, outBufferSize);
+
+        // Convert back to 16 bits
+        AudioMixer::ditherAndClamp((int32_t*)mReSampledBuffer, pTmpBuffer, outFrameCnt);
+        LOGV("Resampled buffer size %d", outFrameCnt* 2 * sizeof(int16_t));
+
+        // Create new MediaBuffer
+        mCopyBuffer = new MediaBuffer((void*)mReSampledBuffer, outBufferSize);
+
+        // Compute and set the new timestamp
+        sp<MetaData> to = mCopyBuffer->meta_data();
+        int64_t totalOutDurationUs = (mAccuOutBufferSize * 1000000) /(mOutputSampleRate * 2 * 2); //2 channels out * 2 bytes per sample
+        int64_t timeUs = mInitialTimeStampUs + totalOutDurationUs;
+        to->setInt64(kKeyTime, timeUs);
+        LOGV("buffer duration %lld   timestamp %lld   init %lld", outDurationUs, timeUs, mInitialTimeStampUs);
+
+        // update the accumulate size
+        mAccuOutBufferSize += outBufferSize;
+
+        mCopyBuffer->set_range(0, outBufferSize);
+        *buffer_out = mCopyBuffer;
+
+        free(pTmpBuffer);
+
+    } else if(mIsChannelConvertionRequired == true) {
+        //TODO convert to stereo here.
+    } else {
+        //LOGI("Resampling not required");
+        MediaBuffer *aBuffer;
+        status_t err = mSource->read(&aBuffer, options);
+        LOGV("mSource->read returned %d", err);
+        if(err != OK) {
+            *buffer_out = NULL;
+            mStarted = false;
+            return err;
+        }
+        *buffer_out = aBuffer;
+    }
+
+    return OK;
+}
+
+status_t VideoEditorSRC::getNextBuffer(AudioBufferProvider::Buffer *pBuffer) {
+    LOGV("Requesting        %d", pBuffer->frameCount);
+    uint32_t availableFrames;
+    bool lastBuffer = false;
+    MediaBuffer *aBuffer;
+
+
+    //update the internal buffer
+    // Store the leftover at the beginning of the local buffer
+    if (mLeftover > 0) {
+        LOGV("Moving mLeftover =%d  from  %d", mLeftover, mLastReadSize);
+        if (mLastReadSize > 0) {
+            memcpy(mInterframeBuffer, (uint8_t*) (mInterframeBuffer + mLastReadSize), mLeftover);
+        }
+        mInterframeBufferPosition = mLeftover;
+    }
+    else {
+        mInterframeBufferPosition = 0;
+    }
+
+    availableFrames = mInterframeBufferPosition / (mChannelCnt*2);
+
+    while ((availableFrames < pBuffer->frameCount)&&(mStarted)) {
+        // if we seek, reset the initial time stamp and accumulated time
+        ReadOptions options;
+        if (mSeekTimeUs >= 0) {
+            LOGV("%p cacheMore_l Seek requested = %lld", this, mSeekTimeUs);
+            ReadOptions::SeekMode mode = mSeekMode;
+            options.setSeekTo(mSeekTimeUs, mode);
+            mSeekTimeUs = -1;
+        }
+        /* The first call to read() will require to buffer twice as much data */
+        /* This will be needed by the resampler */
+        status_t err = mSource->read(&aBuffer, &options);
+        LOGV("mSource->read returned %d", err);
+        if(err != OK) {
+            if (mInterframeBufferPosition == 0) {
+                mStarted = false;
+            }
+            //Empty the internal buffer if there is no more data left in the source
+            else {
+                lastBuffer = true;
+                //clear the end of the buffer, just in case
+                memset(mInterframeBuffer+mInterframeBufferPosition, 0x00, DEFAULT_SAMPLING_FREQ * 2 * 2 - mInterframeBufferPosition);
+                mStarted = false;
+            }
+        }
+        else {
+            //copy the buffer
+            memcpy(((uint8_t*) mInterframeBuffer) + mInterframeBufferPosition,
+                    ((uint8_t*) aBuffer->data()) + aBuffer->range_offset(),
+                    aBuffer->range_length());
+            LOGV("Read from buffer  %d", aBuffer->range_length());
+
+            mInterframeBufferPosition += aBuffer->range_length();
+            LOGV("Stored            %d", mInterframeBufferPosition);
+
+            // Get the time stamp of the first buffer
+            if (mInitialTimeStampUs == -1) {
+                int64_t curTS;
+                sp<MetaData> from = aBuffer->meta_data();
+                from->findInt64(kKeyTime, &curTS);
+                LOGV("setting mInitialTimeStampUs to %lld", mInitialTimeStampUs);
+                mInitialTimeStampUs = curTS;
+            }
+
+            // release the buffer
+            aBuffer->release();
+        }
+        availableFrames = mInterframeBufferPosition / (mChannelCnt*2);
+        LOGV("availableFrames   %d", availableFrames);
+    }
+
+    if (lastBuffer) {
+        pBuffer->frameCount = availableFrames;
+    }
+
+    //update the input buffer
+    pBuffer->raw        = (void*)(mInterframeBuffer);
+
+    // Update how many bytes are left
+    // (actualReadSize is updated in getNextBuffer() called from resample())
+    int32_t actualReadSize = pBuffer->frameCount * mChannelCnt * 2;
+    mLeftover = mInterframeBufferPosition - actualReadSize;
+    LOGV("mLeftover         %d", mLeftover);
+
+    mLastReadSize = actualReadSize;
+
+    LOGV("inFrameCount     %d", pBuffer->frameCount);
+
+    return OK;
+}
+
+
+void VideoEditorSRC::releaseBuffer(AudioBufferProvider::Buffer *pBuffer) {
+    if(pBuffer->raw != NULL) {
+        pBuffer->raw = NULL;
+    }
+    pBuffer->frameCount = 0;
+}
+
+} //namespce android
diff --git a/libvideoeditor/lvpp/VideoEditorSRC.h b/libvideoeditor/lvpp/VideoEditorSRC.h
index 85829e7..069665a 100755
--- a/libvideoeditor/lvpp/VideoEditorSRC.h
+++ b/libvideoeditor/lvpp/VideoEditorSRC.h
@@ -1,98 +1,98 @@
-/*

- * Copyright (C) 2011 The Android Open Source Project

- *

- * Licensed under the Apache License, Version 2.0 (the "License");

- * you may not use this file except in compliance with the License.

- * You may obtain a copy of the License at

- *

- *      http://www.apache.org/licenses/LICENSE-2.0

- *

- * Unless required by applicable law or agreed to in writing, software

- * distributed under the License is distributed on an "AS IS" BASIS,

- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

- * See the License for the specific language governing permissions and

- * limitations under the License.

- */

-

-#include <stdint.h>

-

-#include <utils/RefBase.h>

-#include <utils/threads.h>

-

-#include <media/stagefright/MediaSource.h>

-

-#include "AudioBufferProvider.h"

-#include "AudioResampler.h"

-

-namespace android {

-

-struct MediaBuffer;

-

-class VideoEditorSRC : public MediaSource , public AudioBufferProvider {

-

-    public:

-        VideoEditorSRC(

-            const sp<MediaSource> &source);

-

-        virtual status_t start (MetaData *params = NULL);

-        virtual status_t stop();

-        virtual sp<MetaData> getFormat();

-        virtual status_t read (

-            MediaBuffer **buffer, const ReadOptions *options = NULL);

-

-        virtual status_t getNextBuffer(Buffer* buffer);

-        virtual void releaseBuffer(Buffer* buffer);

-

-        void setResampling(int32_t sampleRate=kFreq32000Hz);

-

-    enum { //Sampling freq

-        kFreq8000Hz = 8000,

-        kFreq11025Hz = 11025,

-        kFreq12000Hz = 12000,

-        kFreq16000Hz = 16000,

-        kFreq22050Hz = 22050,

-        kFreq240000Hz = 24000,

-        kFreq32000Hz = 32000,

-        kFreq44100 = 44100,

-        kFreq48000 = 48000,

-    };

-

-    static const uint16_t UNITY_GAIN = 0x1000;

-    static const int32_t DEFAULT_SAMPLING_FREQ = (int32_t)kFreq32000Hz; // kFreq44100;

-

-    protected :

-        virtual ~VideoEditorSRC();

-    private:

-

-        VideoEditorSRC();

-        VideoEditorSRC &operator=(const VideoEditorSRC &);

-

-        AudioResampler        *mResampler;

-        sp<MediaSource>      mSource;

-        MediaBuffer      *mCopyBuffer;

-        int mBitDepth;

-        int mChannelCnt;

-        int mSampleRate;

-        int32_t mOutputSampleRate;

-        bool mStarted;

-        bool mIsResamplingRequired;

-        bool mIsChannelConvertionRequired; // for mono to stereo

-        sp<MetaData> mOutputFormat;

-        Mutex mLock;

-

-        uint8_t* mInterframeBuffer;

-        int32_t mInterframeBufferPosition;

-        int32_t mLeftover;

-        int32_t mLastReadSize ;

-

-        int64_t mInitialTimeStampUs;

-        int64_t mAccuOutBufferSize;

-

-        int64_t mSeekTimeUs;

-        ReadOptions::SeekMode mSeekMode;

-        int16_t *mReSampledBuffer;

-

-};

-

-} //namespce android

-

+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdint.h>
+
+#include <utils/RefBase.h>
+#include <utils/threads.h>
+
+#include <media/stagefright/MediaSource.h>
+
+#include "AudioBufferProvider.h"
+#include "AudioResampler.h"
+
+namespace android {
+
+struct MediaBuffer;
+
+class VideoEditorSRC : public MediaSource , public AudioBufferProvider {
+
+    public:
+        VideoEditorSRC(
+            const sp<MediaSource> &source);
+
+        virtual status_t start (MetaData *params = NULL);
+        virtual status_t stop();
+        virtual sp<MetaData> getFormat();
+        virtual status_t read (
+            MediaBuffer **buffer, const ReadOptions *options = NULL);
+
+        virtual status_t getNextBuffer(Buffer* buffer);
+        virtual void releaseBuffer(Buffer* buffer);
+
+        void setResampling(int32_t sampleRate=kFreq32000Hz);
+
+    enum { //Sampling freq
+        kFreq8000Hz = 8000,
+        kFreq11025Hz = 11025,
+        kFreq12000Hz = 12000,
+        kFreq16000Hz = 16000,
+        kFreq22050Hz = 22050,
+        kFreq240000Hz = 24000,
+        kFreq32000Hz = 32000,
+        kFreq44100 = 44100,
+        kFreq48000 = 48000,
+    };
+
+    static const uint16_t UNITY_GAIN = 0x1000;
+    static const int32_t DEFAULT_SAMPLING_FREQ = (int32_t)kFreq32000Hz; // kFreq44100;
+
+    protected :
+        virtual ~VideoEditorSRC();
+    private:
+
+        VideoEditorSRC();
+        VideoEditorSRC &operator=(const VideoEditorSRC &);
+
+        AudioResampler        *mResampler;
+        sp<MediaSource>      mSource;
+        MediaBuffer      *mCopyBuffer;
+        int mBitDepth;
+        int mChannelCnt;
+        int mSampleRate;
+        int32_t mOutputSampleRate;
+        bool mStarted;
+        bool mIsResamplingRequired;
+        bool mIsChannelConvertionRequired; // for mono to stereo
+        sp<MetaData> mOutputFormat;
+        Mutex mLock;
+
+        uint8_t* mInterframeBuffer;
+        int32_t mInterframeBufferPosition;
+        int32_t mLeftover;
+        int32_t mLastReadSize ;
+
+        int64_t mInitialTimeStampUs;
+        int64_t mAccuOutBufferSize;
+
+        int64_t mSeekTimeUs;
+        ReadOptions::SeekMode mSeekMode;
+        int16_t *mReSampledBuffer;
+
+};
+
+} //namespce android
+
diff --git a/libvideoeditor/lvpp/VideoEditorTools.cpp b/libvideoeditor/lvpp/VideoEditorTools.cpp
index 3f5f6f8..73f81a8 100755
--- a/libvideoeditor/lvpp/VideoEditorTools.cpp
+++ b/libvideoeditor/lvpp/VideoEditorTools.cpp
@@ -1,3787 +1,3787 @@
-/*

- * Copyright (C) 2011 The Android Open Source Project

- *

- * Licensed under the Apache License, Version 2.0 (the "License");

- * you may not use this file except in compliance with the License.

- * You may obtain a copy of the License at

- *

- *      http://www.apache.org/licenses/LICENSE-2.0

- *

- * Unless required by applicable law or agreed to in writing, software

- * distributed under the License is distributed on an "AS IS" BASIS,

- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

- * See the License for the specific language governing permissions and

- * limitations under the License.

- */

-

-#include "VideoEditorTools.h"

-#include "PreviewRenderer.h"

-/*+ Handle the image files here */

-#include <utils/Log.h>

-/*- Handle the image files here */

-

-const M4VIFI_UInt8   M4VIFI_ClipTable[1256]

-= {

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,

-0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x02, 0x03,

-0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b,

-0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13,

-0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b,

-0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23,

-0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b,

-0x2c, 0x2d, 0x2e, 0x2f, 0x30, 0x31, 0x32, 0x33,

-0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x3b,

-0x3c, 0x3d, 0x3e, 0x3f, 0x40, 0x41, 0x42, 0x43,

-0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x4a, 0x4b,

-0x4c, 0x4d, 0x4e, 0x4f, 0x50, 0x51, 0x52, 0x53,

-0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5a, 0x5b,

-0x5c, 0x5d, 0x5e, 0x5f, 0x60, 0x61, 0x62, 0x63,

-0x64, 0x65, 0x66, 0x67, 0x68, 0x69, 0x6a, 0x6b,

-0x6c, 0x6d, 0x6e, 0x6f, 0x70, 0x71, 0x72, 0x73,

-0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7a, 0x7b,

-0x7c, 0x7d, 0x7e, 0x7f, 0x80, 0x81, 0x82, 0x83,

-0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8a, 0x8b,

-0x8c, 0x8d, 0x8e, 0x8f, 0x90, 0x91, 0x92, 0x93,

-0x94, 0x95, 0x96, 0x97, 0x98, 0x99, 0x9a, 0x9b,

-0x9c, 0x9d, 0x9e, 0x9f, 0xa0, 0xa1, 0xa2, 0xa3,

-0xa4, 0xa5, 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xab,

-0xac, 0xad, 0xae, 0xaf, 0xb0, 0xb1, 0xb2, 0xb3,

-0xb4, 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xbb,

-0xbc, 0xbd, 0xbe, 0xbf, 0xc0, 0xc1, 0xc2, 0xc3,

-0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xcb,

-0xcc, 0xcd, 0xce, 0xcf, 0xd0, 0xd1, 0xd2, 0xd3,

-0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xdb,

-0xdc, 0xdd, 0xde, 0xdf, 0xe0, 0xe1, 0xe2, 0xe3,

-0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea, 0xeb,

-0xec, 0xed, 0xee, 0xef, 0xf0, 0xf1, 0xf2, 0xf3,

-0xf4, 0xf5, 0xf6, 0xf7, 0xf8, 0xf9, 0xfa, 0xfb,

-0xfc, 0xfd, 0xfe, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,

-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff

-};

-

-/* Division table for ( 65535/x ); x = 0 to 512 */

-const M4VIFI_UInt16  M4VIFI_DivTable[512]

-= {

-0, 65535, 32768, 21845, 16384, 13107, 10922, 9362,

-8192, 7281, 6553, 5957, 5461, 5041, 4681, 4369,

-4096, 3855, 3640, 3449, 3276, 3120, 2978, 2849,

-2730, 2621, 2520, 2427, 2340, 2259, 2184, 2114,

-2048, 1985, 1927, 1872, 1820, 1771, 1724, 1680,

-1638, 1598, 1560, 1524, 1489, 1456, 1424, 1394,

-1365, 1337, 1310, 1285, 1260, 1236, 1213, 1191,

-1170, 1149, 1129, 1110, 1092, 1074, 1057, 1040,

-1024, 1008, 992, 978, 963, 949, 936, 923,

-910, 897, 885, 873, 862, 851, 840, 829,

-819, 809, 799, 789, 780, 771, 762, 753,

-744, 736, 728, 720, 712, 704, 697, 689,

-682, 675, 668, 661, 655, 648, 642, 636,

-630, 624, 618, 612, 606, 601, 595, 590,

-585, 579, 574, 569, 564, 560, 555, 550,

-546, 541, 537, 532, 528, 524, 520, 516,

-512, 508, 504, 500, 496, 492, 489, 485,

-481, 478, 474, 471, 468, 464, 461, 458,

-455, 451, 448, 445, 442, 439, 436, 434,

-431, 428, 425, 422, 420, 417, 414, 412,

-409, 407, 404, 402, 399, 397, 394, 392,

-390, 387, 385, 383, 381, 378, 376, 374,

-372, 370, 368, 366, 364, 362, 360, 358,

-356, 354, 352, 350, 348, 346, 344, 343,

-341, 339, 337, 336, 334, 332, 330, 329,

-327, 326, 324, 322, 321, 319, 318, 316,

-315, 313, 312, 310, 309, 307, 306, 304,

-303, 302, 300, 299, 297, 296, 295, 293,

-292, 291, 289, 288, 287, 286, 284, 283,

-282, 281, 280, 278, 277, 276, 275, 274,

-273, 271, 270, 269, 268, 267, 266, 265,

-264, 263, 262, 261, 260, 259, 258, 257,

-256, 255, 254, 253, 252, 251, 250, 249,

-248, 247, 246, 245, 244, 243, 242, 241,

-240, 240, 239, 238, 237, 236, 235, 234,

-234, 233, 232, 231, 230, 229, 229, 228,

-227, 226, 225, 225, 224, 223, 222, 222,

-221, 220, 219, 219, 218, 217, 217, 216,

-215, 214, 214, 213, 212, 212, 211, 210,

-210, 209, 208, 208, 207, 206, 206, 205,

-204, 204, 203, 202, 202, 201, 201, 200,

-199, 199, 198, 197, 197, 196, 196, 195,

-195, 194, 193, 193, 192, 192, 191, 191,

-190, 189, 189, 188, 188, 187, 187, 186,

-186, 185, 185, 184, 184, 183, 183, 182,

-182, 181, 181, 180, 180, 179, 179, 178,

-178, 177, 177, 176, 176, 175, 175, 174,

-174, 173, 173, 172, 172, 172, 171, 171,

-170, 170, 169, 169, 168, 168, 168, 167,

-167, 166, 166, 165, 165, 165, 164, 164,

-163, 163, 163, 162, 162, 161, 161, 161,

-160, 160, 159, 159, 159, 158, 158, 157,

-157, 157, 156, 156, 156, 155, 155, 154,

-154, 154, 153, 153, 153, 152, 152, 152,

-151, 151, 151, 150, 150, 149, 149, 149,

-148, 148, 148, 147, 147, 147, 146, 146,

-146, 145, 145, 145, 144, 144, 144, 144,

-143, 143, 143, 142, 142, 142, 141, 141,

-141, 140, 140, 140, 140, 139, 139, 139,

-138, 138, 138, 137, 137, 137, 137, 136,

-136, 136, 135, 135, 135, 135, 134, 134,

-134, 134, 133, 133, 133, 132, 132, 132,

-132, 131, 131, 131, 131, 130, 130, 130,

-130, 129, 129, 129, 129, 128, 128, 128

-};

-

-const M4VIFI_Int32  const_storage1[8]

-= {

-0x00002568, 0x00003343,0x00000649,0x00000d0f, 0x0000D86C, 0x0000D83B, 0x00010000, 0x00010000

-};

-

-const M4VIFI_Int32  const_storage[8]

-= {

-0x00002568, 0x00003343, 0x1BF800, 0x00000649, 0x00000d0f, 0x110180, 0x40cf, 0x22BE00

-};

-

-

-const M4VIFI_UInt16  *M4VIFI_DivTable_zero

- = &M4VIFI_DivTable[0];

-

-const M4VIFI_UInt8   *M4VIFI_ClipTable_zero

- = &M4VIFI_ClipTable[500];

-

-M4VIFI_UInt8 M4VIFI_YUV420PlanarToYUV420Semiplanar(void *user_data,

-    M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane *PlaneOut ) {

-

-    M4VIFI_UInt32 i;

-    M4VIFI_UInt8 *p_buf_src, *p_buf_dest, *p_buf_src_u, *p_buf_src_v;

-    M4VIFI_UInt8    return_code = M4VIFI_OK;

-

-    /* the filter is implemented with the assumption that the width is equal to stride */

-    if(PlaneIn[0].u_width != PlaneIn[0].u_stride)

-        return M4VIFI_INVALID_PARAM;

-

-    /* The input Y Plane is the same as the output Y Plane */

-    p_buf_src = &(PlaneIn[0].pac_data[PlaneIn[0].u_topleft]);

-    p_buf_dest = &(PlaneOut[0].pac_data[PlaneOut[0].u_topleft]);

-    memcpy((void *)p_buf_dest,(void *)p_buf_src ,

-        PlaneOut[0].u_width * PlaneOut[0].u_height);

-

-    /* The U and V components are planar. The need to be made interleaved */

-    p_buf_src_u = &(PlaneIn[1].pac_data[PlaneIn[1].u_topleft]);

-    p_buf_src_v = &(PlaneIn[2].pac_data[PlaneIn[2].u_topleft]);

-    p_buf_dest  = &(PlaneOut[1].pac_data[PlaneOut[1].u_topleft]);

-

-    for(i = 0; i < PlaneOut[1].u_width*PlaneOut[1].u_height; i++)

-    {

-        *p_buf_dest++ = *p_buf_src_u++;

-        *p_buf_dest++ = *p_buf_src_v++;

-    }

-    return return_code;

-}

-

-M4VIFI_UInt8 M4VIFI_SemiplanarYUV420toYUV420(void *user_data,

-    M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane *PlaneOut ) {

-

-     M4VIFI_UInt32 i;

-     M4VIFI_UInt8 *p_buf_src, *p_buf_dest, *p_buf_src_u, *p_buf_src_v;

-     M4VIFI_UInt8 *p_buf_dest_u,*p_buf_dest_v,*p_buf_src_uv;

-     M4VIFI_UInt8     return_code = M4VIFI_OK;

-

-     /* the filter is implemented with the assumption that the width is equal to stride */

-     if(PlaneIn[0].u_width != PlaneIn[0].u_stride)

-        return M4VIFI_INVALID_PARAM;

-

-     /* The input Y Plane is the same as the output Y Plane */

-     p_buf_src = &(PlaneIn[0].pac_data[PlaneIn[0].u_topleft]);

-     p_buf_dest = &(PlaneOut[0].pac_data[PlaneOut[0].u_topleft]);

-     memcpy((void *)p_buf_dest,(void *)p_buf_src ,

-         PlaneOut[0].u_width * PlaneOut[0].u_height);

-

-     /* The U and V components are planar. The need to be made interleaved */

-     p_buf_src_uv = &(PlaneIn[1].pac_data[PlaneIn[1].u_topleft]);

-     p_buf_dest_u  = &(PlaneOut[1].pac_data[PlaneOut[1].u_topleft]);

-     p_buf_dest_v  = &(PlaneOut[2].pac_data[PlaneOut[2].u_topleft]);

-

-     for(i = 0; i < PlaneOut[1].u_width*PlaneOut[1].u_height; i++)

-     {

-        *p_buf_dest_u++ = *p_buf_src_uv++;

-        *p_buf_dest_v++ = *p_buf_src_uv++;

-     }

-     return return_code;

-}

-

-

-/**

- ******************************************************************************

- * prototype    M4VSS3GPP_externalVideoEffectColor(M4OSA_Void *pFunctionContext,

- *                                                  M4VIFI_ImagePlane *PlaneIn,

- *                                                  M4VIFI_ImagePlane *PlaneOut,

- *                                                  M4VSS3GPP_ExternalProgress *pProgress,

- *                                                  M4OSA_UInt32 uiEffectKind)

- *

- * @brief   This function apply a color effect on an input YUV420 planar frame

- * @note

- * @param   pFunctionContext(IN) Contains which color to apply (not very clean ...)

- * @param   PlaneIn         (IN) Input YUV420 planar

- * @param   PlaneOut        (IN/OUT) Output YUV420 planar

- * @param   pProgress       (IN/OUT) Progress indication (0-100)

- * @param   uiEffectKind    (IN) Unused

- *

- * @return  M4VIFI_OK:  No error

- ******************************************************************************

-*/

-M4OSA_ERR M4VSS3GPP_externalVideoEffectColor(M4OSA_Void *pFunctionContext,

-            M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane *PlaneOut,

-            M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiEffectKind) {

-

-    M4VIFI_Int32 plane_number;

-    M4VIFI_UInt32 i,j;

-    M4VIFI_UInt8 *p_buf_src, *p_buf_dest;

-    M4xVSS_ColorStruct* ColorContext = (M4xVSS_ColorStruct*)pFunctionContext;

-

-    for (plane_number = 0; plane_number < 3; plane_number++)

-    {

-        p_buf_src =

-         &(PlaneIn[plane_number].pac_data[PlaneIn[plane_number].u_topleft]);

-

-        p_buf_dest =

-         &(PlaneOut[plane_number].pac_data[PlaneOut[plane_number].u_topleft]);

-        for (i = 0; i < PlaneOut[plane_number].u_height; i++)

-        {

-            /**

-             * Chrominance */

-            if(plane_number==1 || plane_number==2)

-            {

-                //switch ((M4OSA_UInt32)pFunctionContext) // commented because a structure for the effects context exist

-                switch (ColorContext->colorEffectType)

-                {

-                case M4xVSS_kVideoEffectType_BlackAndWhite:

-                    memset((void *)p_buf_dest,128,

-                     PlaneIn[plane_number].u_width);

-                    break;

-                case M4xVSS_kVideoEffectType_Pink:

-                    memset((void *)p_buf_dest,255,

-                     PlaneIn[plane_number].u_width);

-                    break;

-                case M4xVSS_kVideoEffectType_Green:

-                    memset((void *)p_buf_dest,0,

-                     PlaneIn[plane_number].u_width);

-                    break;

-                case M4xVSS_kVideoEffectType_Sepia:

-                    if(plane_number==1)

-                    {

-                        memset((void *)p_buf_dest,117,

-                         PlaneIn[plane_number].u_width);

-                    }

-                    else

-                    {

-                        memset((void *)p_buf_dest,139,

-                         PlaneIn[plane_number].u_width);

-                    }

-                    break;

-                case M4xVSS_kVideoEffectType_Negative:

-                    memcpy((void *)p_buf_dest,

-                     (void *)p_buf_src ,PlaneOut[plane_number].u_width);

-                    break;

-

-                case M4xVSS_kVideoEffectType_ColorRGB16:

-                    {

-                        M4OSA_UInt16 r = 0,g = 0,b = 0,y = 0,u = 0,v = 0;

-

-                        /*first get the r, g, b*/

-                        b = (ColorContext->rgb16ColorData &  0x001f);

-                        g = (ColorContext->rgb16ColorData &  0x07e0)>>5;

-                        r = (ColorContext->rgb16ColorData &  0xf800)>>11;

-

-                        /*keep y, but replace u and v*/

-                        if(plane_number==1)

-                        {

-                            /*then convert to u*/

-                            u = U16(r, g, b);

-                            memset((void *)p_buf_dest,(M4OSA_UInt8)u,

-                             PlaneIn[plane_number].u_width);

-                        }

-                        if(plane_number==2)

-                        {

-                            /*then convert to v*/

-                            v = V16(r, g, b);

-                            memset((void *)p_buf_dest,(M4OSA_UInt8)v,

-                             PlaneIn[plane_number].u_width);

-                        }

-                    }

-                    break;

-                case M4xVSS_kVideoEffectType_Gradient:

-                    {

-                        M4OSA_UInt16 r = 0,g = 0,b = 0,y = 0,u = 0,v = 0;

-

-                        /*first get the r, g, b*/

-                        b = (ColorContext->rgb16ColorData &  0x001f);

-                        g = (ColorContext->rgb16ColorData &  0x07e0)>>5;

-                        r = (ColorContext->rgb16ColorData &  0xf800)>>11;

-

-                        /*for color gradation*/

-                        b = (M4OSA_UInt16)( b - ((b*i)/PlaneIn[plane_number].u_height));

-                        g = (M4OSA_UInt16)(g - ((g*i)/PlaneIn[plane_number].u_height));

-                        r = (M4OSA_UInt16)(r - ((r*i)/PlaneIn[plane_number].u_height));

-

-                        /*keep y, but replace u and v*/

-                        if(plane_number==1)

-                        {

-                            /*then convert to u*/

-                            u = U16(r, g, b);

-                            memset((void *)p_buf_dest,(M4OSA_UInt8)u,

-                             PlaneIn[plane_number].u_width);

-                        }

-                        if(plane_number==2)

-                        {

-                            /*then convert to v*/

-                            v = V16(r, g, b);

-                            memset((void *)p_buf_dest,(M4OSA_UInt8)v,

-                             PlaneIn[plane_number].u_width);

-                        }

-                    }

-                    break;

-                default:

-                    return M4VIFI_INVALID_PARAM;

-                }

-            }

-            /**

-             * Luminance */

-            else

-            {

-                //switch ((M4OSA_UInt32)pFunctionContext)// commented because a structure for the effects context exist

-                switch (ColorContext->colorEffectType)

-                {

-                case M4xVSS_kVideoEffectType_Negative:

-                    for(j=0;j<PlaneOut[plane_number].u_width;j++)

-                    {

-                            p_buf_dest[j] = 255 - p_buf_src[j];

-                    }

-                    break;

-                default:

-                    memcpy((void *)p_buf_dest,

-                     (void *)p_buf_src ,PlaneOut[plane_number].u_width);

-                    break;

-                }

-            }

-            p_buf_src += PlaneIn[plane_number].u_stride;

-            p_buf_dest += PlaneOut[plane_number].u_stride;

-        }

-    }

-

-    return M4VIFI_OK;

-}

-

-/**

- ******************************************************************************

- * prototype    M4VSS3GPP_externalVideoEffectFraming(M4OSA_Void *pFunctionContext,

- *                                                  M4VIFI_ImagePlane *PlaneIn,

- *                                                  M4VIFI_ImagePlane *PlaneOut,

- *                                                  M4VSS3GPP_ExternalProgress *pProgress,

- *                                                  M4OSA_UInt32 uiEffectKind)

- *

- * @brief   This function add a fixed or animated image on an input YUV420 planar frame

- * @note

- * @param   pFunctionContext(IN) Contains which color to apply (not very clean ...)

- * @param   PlaneIn         (IN) Input YUV420 planar

- * @param   PlaneOut        (IN/OUT) Output YUV420 planar

- * @param   pProgress       (IN/OUT) Progress indication (0-100)

- * @param   uiEffectKind    (IN) Unused

- *

- * @return  M4VIFI_OK:  No error

- ******************************************************************************

-*/

-M4OSA_ERR M4VSS3GPP_externalVideoEffectFraming(

-            M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn[3],

-            M4VIFI_ImagePlane *PlaneOut, M4VSS3GPP_ExternalProgress *pProgress,

-            M4OSA_UInt32 uiEffectKind ) {

-

-    M4VIFI_UInt32 x,y;

-

-    M4VIFI_UInt8 *p_in_Y = PlaneIn[0].pac_data;

-    M4VIFI_UInt8 *p_in_U = PlaneIn[1].pac_data;

-    M4VIFI_UInt8 *p_in_V = PlaneIn[2].pac_data;

-

-    M4xVSS_FramingStruct* Framing = M4OSA_NULL;

-    M4xVSS_FramingStruct* currentFraming = M4OSA_NULL;

-    M4VIFI_UInt8 *FramingRGB = M4OSA_NULL;

-

-    M4VIFI_UInt8 *p_out0;

-    M4VIFI_UInt8 *p_out1;

-    M4VIFI_UInt8 *p_out2;

-

-    M4VIFI_UInt32 topleft[2];

-

-    M4OSA_UInt8 transparent1 =

-     (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8);

-    M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR;

-

-#ifndef DECODE_GIF_ON_SAVING

-    Framing = (M4xVSS_FramingStruct *)userData;

-    currentFraming = (M4xVSS_FramingStruct *)Framing->pCurrent;

-    FramingRGB = Framing->FramingRgb->pac_data;

-#endif /*DECODE_GIF_ON_SAVING*/

-

-#ifdef DECODE_GIF_ON_SAVING

-    M4OSA_ERR err;

-    Framing =

-     (M4xVSS_FramingStruct *)((M4xVSS_FramingContext*)userData)->aFramingCtx;

-    if(Framing == M4OSA_NULL)

-    {

-        ((M4xVSS_FramingContext*)userData)->clipTime = pProgress->uiOutputTime;

-        err = M4xVSS_internalDecodeGIF(userData);

-        if(M4NO_ERROR != err)

-        {

-            M4OSA_TRACE1_1("M4VSS3GPP_externalVideoEffectFraming: \

-                Error in M4xVSS_internalDecodeGIF: 0x%x", err);

-            return err;

-        }

-        Framing =

-         (M4xVSS_FramingStruct *)((M4xVSS_FramingContext*)userData)->aFramingCtx;

-        /* Initializes first GIF time */

-        ((M4xVSS_FramingContext*)userData)->current_gif_time =

-          pProgress->uiOutputTime;

-    }

-    currentFraming = (M4xVSS_FramingStruct *)Framing;

-    FramingRGB = Framing->FramingRgb->pac_data;

-#endif /*DECODE_GIF_ON_SAVING*/

-

-    /**

-     * Initialize input / output plane pointers */

-    p_in_Y += PlaneIn[0].u_topleft;

-    p_in_U += PlaneIn[1].u_topleft;

-    p_in_V += PlaneIn[2].u_topleft;

-

-    p_out0 = PlaneOut[0].pac_data;

-    p_out1 = PlaneOut[1].pac_data;

-    p_out2 = PlaneOut[2].pac_data;

-

-    /**

-     * Depending on time, initialize Framing frame to use */

-    if(Framing->previousClipTime == -1)

-    {

-        Framing->previousClipTime = pProgress->uiOutputTime;

-    }

-

-    /**

-     * If the current clip time has reach the duration of one frame of the framing picture

-     * we need to step to next framing picture */

-#ifdef DECODE_GIF_ON_SAVING

-    if(((M4xVSS_FramingContext*)userData)->b_animated == M4OSA_TRUE)

-    {

-        while((((M4xVSS_FramingContext*)userData)->current_gif_time + currentFraming->duration) < pProgress->uiOutputTime)

-        {

-            ((M4xVSS_FramingContext*)userData)->clipTime =

-             pProgress->uiOutputTime;

-

-            err = M4xVSS_internalDecodeGIF(userData);

-            if(M4NO_ERROR != err)

-            {

-                M4OSA_TRACE1_1("M4VSS3GPP_externalVideoEffectFraming: Error in M4xVSS_internalDecodeGIF: 0x%x", err);

-                return err;

-            }

-            if(currentFraming->duration != 0)

-            {

-                ((M4xVSS_FramingContext*)userData)->current_gif_time += currentFraming->duration;

-            }

-            else

-            {

-                ((M4xVSS_FramingContext*)userData)->current_gif_time +=

-                 pProgress->uiOutputTime - Framing->previousClipTime;

-            }

-            Framing = (M4xVSS_FramingStruct *)((M4xVSS_FramingContext*)userData)->aFramingCtx;

-            currentFraming = (M4xVSS_FramingStruct *)Framing;

-            FramingRGB = Framing->FramingRgb->pac_data;

-        }

-    }

-#else

-            Framing->pCurrent = currentFraming->pNext;

-            currentFraming = (M4xVSS_FramingStruct*)Framing->pCurrent;

-#endif /*DECODE_GIF_ON_SAVING*/

-

-    Framing->previousClipTime = pProgress->uiOutputTime;

-    FramingRGB = currentFraming->FramingRgb->pac_data;

-    topleft[0] = currentFraming->topleft_x;

-    topleft[1] = currentFraming->topleft_y;

-

-    for( x=0 ;x < PlaneIn[0].u_height ; x++)

-    {

-        for( y=0 ;y < PlaneIn[0].u_width ; y++)

-        {

-            /**

-             * To handle framing with input size != output size

-             * Framing is applyed if coordinates matches between framing/topleft and input plane */

-            if( y < (topleft[0] + currentFraming->FramingYuv[0].u_width)  &&

-                y >= topleft[0] &&

-                x < (topleft[1] + currentFraming->FramingYuv[0].u_height) &&

-                x >= topleft[1])

-            {

-

-                /*Alpha blending support*/

-                M4OSA_Float alphaBlending = 1;

-#ifdef DECODE_GIF_ON_SAVING

-                M4xVSS_internalEffectsAlphaBlending* alphaBlendingStruct =

-                 (M4xVSS_internalEffectsAlphaBlending*)((M4xVSS_FramingContext*)userData)->alphaBlendingStruct;

-#else

-                M4xVSS_internalEffectsAlphaBlending* alphaBlendingStruct =

-                 (M4xVSS_internalEffectsAlphaBlending*)((M4xVSS_FramingStruct*)userData)->alphaBlendingStruct;

-#endif //#ifdef DECODE_GIF_ON_SAVING

-

-                if(alphaBlendingStruct != M4OSA_NULL)

-                {

-                    if(pProgress->uiProgress < (M4OSA_UInt32)(alphaBlendingStruct->m_fadeInTime*10))

-                    {

-                        alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle - alphaBlendingStruct->m_start)*pProgress->uiProgress/(alphaBlendingStruct->m_fadeInTime*10));

-                        alphaBlending += alphaBlendingStruct->m_start;

-                        alphaBlending /= 100;

-                    }

-                    else if(pProgress->uiProgress >= (M4OSA_UInt32)(alphaBlendingStruct->m_fadeInTime*10) && pProgress->uiProgress < 1000 - (M4OSA_UInt32)(alphaBlendingStruct->m_fadeOutTime*10))

-                    {

-                        alphaBlending = (M4OSA_Float)((M4OSA_Float)alphaBlendingStruct->m_middle/100);

-                    }

-                    else if(pProgress->uiProgress >= 1000 - (M4OSA_UInt32)(alphaBlendingStruct->m_fadeOutTime*10))

-                    {

-                        alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle - alphaBlendingStruct->m_end))*(1000 - pProgress->uiProgress)/(alphaBlendingStruct->m_fadeOutTime*10);

-                        alphaBlending += alphaBlendingStruct->m_end;

-                        alphaBlending /= 100;

-                    }

-                }

-

-                /**/

-

-                if((*(FramingRGB)==transparent1) && (*(FramingRGB+1)==transparent2))

-                {

-                    *( p_out0+y+x*PlaneOut[0].u_stride)=(*(p_in_Y+y+x*PlaneIn[0].u_stride));

-                    *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)=(*(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride));

-                    *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)=(*(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride));

-                }

-                else

-                {

-                    *( p_out0+y+x*PlaneOut[0].u_stride)=(*(currentFraming->FramingYuv[0].pac_data+(y-topleft[0])+(x-topleft[1])*currentFraming->FramingYuv[0].u_stride))*alphaBlending;

-                    *( p_out0+y+x*PlaneOut[0].u_stride)+=(*(p_in_Y+y+x*PlaneIn[0].u_stride))*(1-alphaBlending);

-                    *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)=(*(currentFraming->FramingYuv[1].pac_data+((y-topleft[0])>>1)+((x-topleft[1])>>1)*currentFraming->FramingYuv[1].u_stride))*alphaBlending;

-                    *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)+=(*(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride))*(1-alphaBlending);

-                    *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)=(*(currentFraming->FramingYuv[2].pac_data+((y-topleft[0])>>1)+((x-topleft[1])>>1)*currentFraming->FramingYuv[2].u_stride))*alphaBlending;

-                    *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)+=(*(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride))*(1-alphaBlending);

-                }

-                if( PlaneIn[0].u_width < (topleft[0] + currentFraming->FramingYuv[0].u_width) &&

-                    y == PlaneIn[0].u_width-1)

-                {

-                    FramingRGB = FramingRGB + 2 * (topleft[0] + currentFraming->FramingYuv[0].u_width - PlaneIn[0].u_width + 1);

-                }

-                else

-                {

-                    FramingRGB = FramingRGB + 2;

-                }

-            }

-            /**

-             * Just copy input plane to output plane */

-            else

-            {

-                *( p_out0+y+x*PlaneOut[0].u_stride)=*(p_in_Y+y+x*PlaneIn[0].u_stride);

-                *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)=*(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride);

-                *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)=*(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride);

-            }

-        }

-    }

-

-#ifdef DECODE_GIF_ON_SAVING

-    if(pProgress->bIsLast == M4OSA_TRUE

-        && (M4OSA_Bool)((M4xVSS_FramingContext*)userData)->b_IsFileGif == M4OSA_TRUE)

-    {

-        M4xVSS_internalDecodeGIF_Cleaning((M4xVSS_FramingContext*)userData);

-    }

-#endif /*DECODE_GIF_ON_SAVING*/

-    return M4VIFI_OK;

-}

-

-

-/**

- ******************************************************************************

- * prototype    M4VSS3GPP_externalVideoEffectFifties(M4OSA_Void *pFunctionContext,

- *                                                  M4VIFI_ImagePlane *PlaneIn,

- *                                                  M4VIFI_ImagePlane *PlaneOut,

- *                                                  M4VSS3GPP_ExternalProgress *pProgress,

- *                                                  M4OSA_UInt32 uiEffectKind)

- *

- * @brief   This function make a video look as if it was taken in the fifties

- * @note

- * @param   pUserData       (IN) Context

- * @param   pPlaneIn        (IN) Input YUV420 planar

- * @param   pPlaneOut       (IN/OUT) Output YUV420 planar

- * @param   pProgress       (IN/OUT) Progress indication (0-100)

- * @param   uiEffectKind    (IN) Unused

- *

- * @return  M4VIFI_OK:          No error

- * @return  M4ERR_PARAMETER:    pFiftiesData, pPlaneOut or pProgress are NULL (DEBUG only)

- ******************************************************************************

-*/

-M4OSA_ERR M4VSS3GPP_externalVideoEffectFifties(

-    M4OSA_Void *pUserData, M4VIFI_ImagePlane *pPlaneIn,

-    M4VIFI_ImagePlane *pPlaneOut, M4VSS3GPP_ExternalProgress *pProgress,

-    M4OSA_UInt32 uiEffectKind )

-{

-    M4VIFI_UInt32 x, y, xShift;

-    M4VIFI_UInt8 *pInY = pPlaneIn[0].pac_data;

-    M4VIFI_UInt8 *pOutY, *pInYbegin;

-    M4VIFI_UInt8 *pInCr,* pOutCr;

-    M4VIFI_Int32 plane_number;

-

-    /* Internal context*/

-    M4xVSS_FiftiesStruct* p_FiftiesData = (M4xVSS_FiftiesStruct *)pUserData;

-

-    /* Initialize input / output plane pointers */

-    pInY += pPlaneIn[0].u_topleft;

-    pOutY = pPlaneOut[0].pac_data;

-    pInYbegin  = pInY;

-

-    /* Initialize the random */

-    if(p_FiftiesData->previousClipTime < 0)

-    {

-        M4OSA_randInit();

-        M4OSA_rand((M4OSA_Int32*)&(p_FiftiesData->shiftRandomValue), (pPlaneIn[0].u_height) >> 4);

-        M4OSA_rand((M4OSA_Int32*)&(p_FiftiesData->stripeRandomValue), (pPlaneIn[0].u_width)<< 2);

-        p_FiftiesData->previousClipTime = pProgress->uiOutputTime;

-    }

-

-    /* Choose random values if we have reached the duration of a partial effect */

-    else if( (pProgress->uiOutputTime - p_FiftiesData->previousClipTime) > p_FiftiesData->fiftiesEffectDuration)

-    {

-        M4OSA_rand((M4OSA_Int32*)&(p_FiftiesData->shiftRandomValue), (pPlaneIn[0].u_height) >> 4);

-        M4OSA_rand((M4OSA_Int32*)&(p_FiftiesData->stripeRandomValue), (pPlaneIn[0].u_width)<< 2);

-        p_FiftiesData->previousClipTime = pProgress->uiOutputTime;

-    }

-

-    /* Put in Sepia the chrominance */

-    for (plane_number = 1; plane_number < 3; plane_number++)

-    {

-        pInCr  = pPlaneIn[plane_number].pac_data  + pPlaneIn[plane_number].u_topleft;

-        pOutCr = pPlaneOut[plane_number].pac_data + pPlaneOut[plane_number].u_topleft;

-

-        for (x = 0; x < pPlaneOut[plane_number].u_height; x++)

-        {

-            if (1 == plane_number)

-                memset((void *)pOutCr, 117,pPlaneIn[plane_number].u_width); /* U value */

-            else

-                memset((void *)pOutCr, 139,pPlaneIn[plane_number].u_width); /* V value */

-

-            pInCr  += pPlaneIn[plane_number].u_stride;

-            pOutCr += pPlaneOut[plane_number].u_stride;

-        }

-    }

-

-    /* Compute the new pixels values */

-    for( x = 0 ; x < pPlaneIn[0].u_height ; x++)

-    {

-        M4VIFI_UInt8 *p_outYtmp, *p_inYtmp;

-

-        /* Compute the xShift (random value) */

-        if (0 == (p_FiftiesData->shiftRandomValue % 5 ))

-            xShift = (x + p_FiftiesData->shiftRandomValue ) % (pPlaneIn[0].u_height - 1);

-        else

-            xShift = (x + (pPlaneIn[0].u_height - p_FiftiesData->shiftRandomValue) ) % (pPlaneIn[0].u_height - 1);

-

-        /* Initialize the pointers */

-        p_outYtmp = pOutY + 1;                                    /* yShift of 1 pixel */

-        p_inYtmp  = pInYbegin + (xShift * pPlaneIn[0].u_stride);  /* Apply the xShift */

-

-        for( y = 0 ; y < pPlaneIn[0].u_width ; y++)

-        {

-            /* Set Y value */

-            if (xShift > (pPlaneIn[0].u_height - 4))

-                *p_outYtmp = 40;        /* Add some horizontal black lines between the two parts of the image */

-            else if ( y == p_FiftiesData->stripeRandomValue)

-                *p_outYtmp = 90;        /* Add a random vertical line for the bulk */

-            else

-                *p_outYtmp = *p_inYtmp;

-

-

-            /* Go to the next pixel */

-            p_outYtmp++;

-            p_inYtmp++;

-

-            /* Restart at the beginning of the line for the last pixel*/

-            if (y == (pPlaneIn[0].u_width - 2))

-                p_outYtmp = pOutY;

-        }

-

-        /* Go to the next line */

-        pOutY += pPlaneOut[0].u_stride;

-    }

-

-    return M4VIFI_OK;

-}

-

-unsigned char M4VFL_modifyLumaWithScale(M4ViComImagePlane *plane_in,

-                                        M4ViComImagePlane *plane_out,

-                                        unsigned long lum_factor,

-                                        void *user_data)

-{

-    unsigned short *p_src, *p_dest, *p_src_line, *p_dest_line;

-    unsigned char *p_csrc, *p_cdest, *p_csrc_line, *p_cdest_line;

-    unsigned long pix_src;

-    unsigned long u_outpx, u_outpx2;

-    unsigned long u_width, u_stride, u_stride_out,u_height, pix;

-    long i, j;

-

-    /* copy or filter chroma */

-    u_width = plane_in[1].u_width;

-    u_height = plane_in[1].u_height;

-    u_stride = plane_in[1].u_stride;

-    u_stride_out = plane_out[1].u_stride;

-    p_cdest_line = (unsigned char *) &plane_out[1].pac_data[plane_out[1].u_topleft];

-    p_csrc_line = (unsigned char *) &plane_in[1].pac_data[plane_in[1].u_topleft];

-

-    if (lum_factor > 256)

-    {

-        p_cdest = (unsigned char *) &plane_out[2].pac_data[plane_out[2].u_topleft];

-        p_csrc = (unsigned char *) &plane_in[2].pac_data[plane_in[2].u_topleft];

-        /* copy chroma */

-        for (j = u_height; j != 0; j--)

-        {

-            for (i = u_width; i != 0; i--)

-            {

-                memcpy((void *)p_cdest_line, (void *)p_csrc_line, u_width);

-                memcpy((void *)p_cdest, (void *)p_csrc, u_width);

-            }

-            p_cdest_line += u_stride_out;

-            p_cdest += u_stride_out;

-            p_csrc_line += u_stride;

-            p_csrc += u_stride;

-        }

-    }

-    else

-    {

-        /* filter chroma */

-        pix = (1024 - lum_factor) << 7;

-        for (j = u_height; j != 0; j--)

-        {

-            p_cdest = p_cdest_line;

-            p_csrc = p_csrc_line;

-            for (i = u_width; i != 0; i--)

-            {

-                *p_cdest++ = ((pix + (*p_csrc++ & 0xFF) * lum_factor) >> LUM_FACTOR_MAX);

-            }

-            p_cdest_line += u_stride_out;

-            p_csrc_line += u_stride;

-        }

-        p_cdest_line = (unsigned char *) &plane_out[2].pac_data[plane_out[2].u_topleft];

-        p_csrc_line = (unsigned char *) &plane_in[2].pac_data[plane_in[2].u_topleft];

-        for (j = u_height; j != 0; j--)

-        {

-            p_cdest = p_cdest_line;

-            p_csrc = p_csrc_line;

-            for (i = u_width; i != 0; i--)

-            {

-                *p_cdest++ = ((pix + (*p_csrc & 0xFF) * lum_factor) >> LUM_FACTOR_MAX);

-            }

-            p_cdest_line += u_stride_out;

-            p_csrc_line += u_stride;

-        }

-    }

-    /* apply luma factor */

-    u_width = plane_in[0].u_width;

-    u_height = plane_in[0].u_height;

-    u_stride = (plane_in[0].u_stride >> 1);

-    u_stride_out = (plane_out[0].u_stride >> 1);

-    p_dest = (unsigned short *) &plane_out[0].pac_data[plane_out[0].u_topleft];

-    p_src = (unsigned short *) &plane_in[0].pac_data[plane_in[0].u_topleft];

-    p_dest_line = p_dest;

-    p_src_line = p_src;

-

-    for (j = u_height; j != 0; j--)

-    {

-        p_dest = p_dest_line;

-        p_src = p_src_line;

-        for (i = (u_width >> 1); i != 0; i--)

-        {

-            pix_src = (unsigned long) *p_src++;

-            pix = pix_src & 0xFF;

-            u_outpx = ((pix * lum_factor) >> LUM_FACTOR_MAX);

-            pix = ((pix_src & 0xFF00) >> 8);

-            u_outpx2 = (((pix * lum_factor) >> LUM_FACTOR_MAX)<< 8) ;

-            *p_dest++ = (unsigned short) (u_outpx2 | u_outpx);

-        }

-        p_dest_line += u_stride_out;

-        p_src_line += u_stride;

-    }

-

-    return 0;

-}

-

-/**

- ******************************************************************************

- * unsigned char M4VFL_applyCurtain(M4ViComImagePlane *plane_in, M4ViComImagePlane *plane_out, M4VFL_CurtainParam *curtain_factor, void *user_data)

- * @author  Beatrice Nezot (PHILIPS Software Vision)

- * @brief   This function applies a black curtain onto a YUV420 image.

- * @note    THis function writes black lines either at the top of the image or at

- *          the bottom of the image. The other lines are copied from the source image.

- *          First the number of black lines is compted and is rounded to an even integer.

- * @param   plane_in: (IN) pointer to the 3 image planes of the source image

- * @param   plane_out: (OUT) pointer to the 3 image planes of the destination image

- * @param   user_data: (IN) pointer to some user_data

- * @param   curtain_factor: (IN) structure with the parameters of the curtain (nb of black lines and if at the top/bottom of the image)

- * @return  0: there is no error

- ******************************************************************************

-*/

-unsigned char M4VFL_applyCurtain(M4ViComImagePlane *plane_in, M4ViComImagePlane *plane_out, M4VFL_CurtainParam *curtain_factor, void *user_data)

-{

-    unsigned char *p_src, *p_srcu, *p_srcv,*p_dest, *p_destu, *p_destv;

-    unsigned long u_width, u_widthuv, u_stride_out, u_stride_out_uv,u_stride, u_stride_uv,u_height;

-    long j;

-    unsigned long nb_black_lines;

-

-    u_width = plane_in[0].u_width;

-    u_height = plane_in[0].u_height;

-    u_stride_out = plane_out[0].u_stride ;

-    u_stride_out_uv = plane_out[1].u_stride;

-    p_dest = (unsigned char *) &plane_out[0].pac_data[plane_out[0].u_topleft];

-    p_destu = (unsigned char *) &plane_out[1].pac_data[plane_out[1].u_topleft];

-    p_destv = (unsigned char *) &plane_out[2].pac_data[plane_out[2].u_topleft];

-    u_widthuv = u_width >> 1;

-    u_stride = plane_in[0].u_stride ;

-    u_stride_uv = plane_in[1].u_stride;

-

-    /* nb_black_lines is even */

-    nb_black_lines = (unsigned long) ((curtain_factor->nb_black_lines >> 1) << 1);

-

-    if (curtain_factor->top_is_black)

-    {

-        /* black lines first */

-        /* compute index of of first source pixels (Y, U and V) to copy after the black lines */

-        p_src = (unsigned char *) &plane_in[0].pac_data[plane_in[0].u_topleft + ((nb_black_lines) * plane_in[0].u_stride)];

-        p_srcu = (unsigned char *) &plane_in[1].pac_data[plane_in[1].u_topleft + (((nb_black_lines) * plane_in[1].u_stride) >> 1)];

-        p_srcv = (unsigned char *) &plane_in[2].pac_data[plane_in[2].u_topleft+ (((nb_black_lines) * plane_in[2].u_stride) >> 1)];

-

-        /* write black lines */

-        for (j = (nb_black_lines >> 1); j != 0; j--)

-        {

-            memset((void *)p_dest, 0,u_width);

-            p_dest += u_stride_out;

-            memset((void *)p_dest, 0,u_width);

-            p_dest += u_stride_out;

-            memset((void *)p_destu, 128,u_widthuv);

-            memset((void *)p_destv, 128,u_widthuv);

-            p_destu += u_stride_out_uv;

-            p_destv += u_stride_out_uv;

-        }

-

-        /* copy from source image */

-        for (j = (u_height - nb_black_lines) >> 1; j != 0; j--)

-        {

-            memcpy((void *)p_dest, (void *)p_src, u_width);

-            p_dest += u_stride_out;

-            p_src += u_stride;

-            memcpy((void *)p_dest, (void *)p_src, u_width);

-            p_dest += u_stride_out;

-            p_src += u_stride;

-            memcpy((void *)p_destu, (void *)p_srcu, u_widthuv);

-            memcpy((void *)p_destv, (void *)p_srcv, u_widthuv);

-            p_destu += u_stride_out_uv;

-            p_destv += u_stride_out_uv;

-            p_srcu += u_stride_uv;

-            p_srcv += u_stride_uv;

-        }

-    }

-    else

-    {

-        /* black lines at the bottom of the image */

-        p_src = (unsigned char *) &plane_in[0].pac_data[plane_in[0].u_topleft];

-        p_srcu = (unsigned char *) &plane_in[1].pac_data[plane_in[1].u_topleft];

-        p_srcv = (unsigned char *) &plane_in[2].pac_data[plane_in[2].u_topleft];

-

-        /* copy from source image image */

-        for (j = (nb_black_lines >> 1); j != 0; j--)

-        {

-            memcpy((void *)p_dest, (void *)p_src, u_width);

-            p_dest += u_stride_out;

-            p_src += u_stride;

-            memcpy((void *)p_dest, (void *)p_src, u_width);

-            p_dest += u_stride_out;

-            p_src += u_stride;

-            memcpy((void *)p_destu, (void *)p_srcu, u_widthuv);

-            memcpy((void *)p_destv, (void *)p_srcv, u_widthuv);

-            p_destu += u_stride_out_uv;

-            p_destv += u_stride_out_uv;

-            p_srcu += u_stride_uv;

-            p_srcv += u_stride_uv;

-        }

-

-        /* write black lines*/

-        /* the pointers to p_dest, p_destu and p_destv are used through the two loops "for" */

-        for (j = (u_height - nb_black_lines) >> 1; j != 0; j--)

-        {

-            memset((void *)p_dest, 0,u_width);

-            p_dest += u_stride_out;

-            memset((void *)p_dest, 0,u_width);

-            p_dest += u_stride_out;

-            memset((void *)p_destu, 128,u_widthuv);

-            memset((void *)p_destv, 128,u_widthuv);

-            p_destu += u_stride_out_uv;

-            p_destv += u_stride_out_uv;

-        }

-    }

-

-    return 0;

-}

-

-

-/******************************************************************************

- * prototype    M4OSA_ERR M4xVSS_internalConvertRGBtoYUV(M4xVSS_FramingStruct* framingCtx)

- * @brief   This function converts an RGB565 plane to YUV420 planar

- * @note    It is used only for framing effect

- *          It allocates output YUV planes

- * @param   framingCtx  (IN) The framing struct containing input RGB565 plane

- *

- * @return  M4NO_ERROR: No error

- * @return  M4ERR_PARAMETER: At least one of the function parameters is null

- * @return  M4ERR_ALLOC: Allocation error (no more memory)

- ******************************************************************************

-*/

-M4OSA_ERR M4xVSS_internalConvertRGBtoYUV(M4xVSS_FramingStruct* framingCtx)

-{

-    M4OSA_ERR err;

-

-    /**

-     * Allocate output YUV planes */

-    framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(3*sizeof(M4VIFI_ImagePlane), M4VS, (M4OSA_Char*)"M4xVSS_internalConvertRGBtoYUV: Output plane YUV");

-    if(framingCtx->FramingYuv == M4OSA_NULL)

-    {

-        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV");

-        return M4ERR_ALLOC;

-    }

-    framingCtx->FramingYuv[0].u_width = framingCtx->FramingRgb->u_width;

-    framingCtx->FramingYuv[0].u_height = framingCtx->FramingRgb->u_height;

-    framingCtx->FramingYuv[0].u_topleft = 0;

-    framingCtx->FramingYuv[0].u_stride = framingCtx->FramingRgb->u_width;

-    framingCtx->FramingYuv[0].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc((framingCtx->FramingYuv[0].u_width*framingCtx->FramingYuv[0].u_height*3)>>1, M4VS, (M4OSA_Char*)"Alloc for the Convertion output YUV");;

-    if(framingCtx->FramingYuv[0].pac_data == M4OSA_NULL)

-    {

-        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV");

-        return M4ERR_ALLOC;

-    }

-    framingCtx->FramingYuv[1].u_width = (framingCtx->FramingRgb->u_width)>>1;

-    framingCtx->FramingYuv[1].u_height = (framingCtx->FramingRgb->u_height)>>1;

-    framingCtx->FramingYuv[1].u_topleft = 0;

-    framingCtx->FramingYuv[1].u_stride = (framingCtx->FramingRgb->u_width)>>1;

-    framingCtx->FramingYuv[1].pac_data = framingCtx->FramingYuv[0].pac_data + framingCtx->FramingYuv[0].u_width * framingCtx->FramingYuv[0].u_height;

-    framingCtx->FramingYuv[2].u_width = (framingCtx->FramingRgb->u_width)>>1;

-    framingCtx->FramingYuv[2].u_height = (framingCtx->FramingRgb->u_height)>>1;

-    framingCtx->FramingYuv[2].u_topleft = 0;

-    framingCtx->FramingYuv[2].u_stride = (framingCtx->FramingRgb->u_width)>>1;

-    framingCtx->FramingYuv[2].pac_data = framingCtx->FramingYuv[1].pac_data + framingCtx->FramingYuv[1].u_width * framingCtx->FramingYuv[1].u_height;

-

-    /**

-     * Convert input RGB 565 to YUV 420 to be able to merge it with output video in framing effect */

-    err = M4VIFI_xVSS_RGB565toYUV420(M4OSA_NULL, framingCtx->FramingRgb, framingCtx->FramingYuv);

-    if(err != M4NO_ERROR)

-    {

-        M4OSA_TRACE1_1("M4xVSS_internalConvertRGBtoYUV: error when converting from RGB to YUV: 0x%x\n", err);

-    }

-

-    framingCtx->duration = 0;

-    framingCtx->previousClipTime = -1;

-    framingCtx->previewOffsetClipTime = -1;

-

-    /**

-     * Only one element in the chained list (no animated image with RGB buffer...) */

-    framingCtx->pCurrent = framingCtx;

-    framingCtx->pNext = framingCtx;

-

-    return M4NO_ERROR;

-}

-

-/******************************************************************************

- * prototype    M4OSA_ERR M4xVSS_internalConvertRGB888toYUV(M4xVSS_FramingStruct* framingCtx)

- * @brief   This function converts an RGB888 plane to YUV420 planar

- * @note    It is used only for framing effect

- *          It allocates output YUV planes

- * @param   framingCtx  (IN) The framing struct containing input RGB888 plane

- *

- * @return  M4NO_ERROR: No error

- * @return  M4ERR_PARAMETER: At least one of the function parameters is null

- * @return  M4ERR_ALLOC: Allocation error (no more memory)

- ******************************************************************************

-*/

-M4OSA_ERR M4xVSS_internalConvertRGB888toYUV(M4xVSS_FramingStruct* framingCtx)

-{

-    M4OSA_ERR err;

-

-    /**

-     * Allocate output YUV planes */

-    framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(3*sizeof(M4VIFI_ImagePlane), M4VS, (M4OSA_Char*)"M4xVSS_internalConvertRGBtoYUV: Output plane YUV");

-    if(framingCtx->FramingYuv == M4OSA_NULL)

-    {

-        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV");

-        return M4ERR_ALLOC;

-    }

-    framingCtx->FramingYuv[0].u_width = framingCtx->FramingRgb->u_width;

-    framingCtx->FramingYuv[0].u_height = framingCtx->FramingRgb->u_height;

-    framingCtx->FramingYuv[0].u_topleft = 0;

-    framingCtx->FramingYuv[0].u_stride = framingCtx->FramingRgb->u_width;

-    framingCtx->FramingYuv[0].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc((framingCtx->FramingYuv[0].u_width*framingCtx->FramingYuv[0].u_height*3)>>1, M4VS, (M4OSA_Char*)"Alloc for the Convertion output YUV");;

-    if(framingCtx->FramingYuv[0].pac_data == M4OSA_NULL)

-    {

-        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV");

-        return M4ERR_ALLOC;

-    }

-    framingCtx->FramingYuv[1].u_width = (framingCtx->FramingRgb->u_width)>>1;

-    framingCtx->FramingYuv[1].u_height = (framingCtx->FramingRgb->u_height)>>1;

-    framingCtx->FramingYuv[1].u_topleft = 0;

-    framingCtx->FramingYuv[1].u_stride = (framingCtx->FramingRgb->u_width)>>1;

-    framingCtx->FramingYuv[1].pac_data = framingCtx->FramingYuv[0].pac_data + framingCtx->FramingYuv[0].u_width * framingCtx->FramingYuv[0].u_height;

-    framingCtx->FramingYuv[2].u_width = (framingCtx->FramingRgb->u_width)>>1;

-    framingCtx->FramingYuv[2].u_height = (framingCtx->FramingRgb->u_height)>>1;

-    framingCtx->FramingYuv[2].u_topleft = 0;

-    framingCtx->FramingYuv[2].u_stride = (framingCtx->FramingRgb->u_width)>>1;

-    framingCtx->FramingYuv[2].pac_data = framingCtx->FramingYuv[1].pac_data + framingCtx->FramingYuv[1].u_width * framingCtx->FramingYuv[1].u_height;

-

-    /**

-     * Convert input RGB888 to YUV 420 to be able to merge it with output video in framing effect */

-    err = M4VIFI_RGB888toYUV420(M4OSA_NULL, framingCtx->FramingRgb, framingCtx->FramingYuv);

-    if(err != M4NO_ERROR)

-    {

-        M4OSA_TRACE1_1("M4xVSS_internalConvertRGBtoYUV: error when converting from RGB to YUV: 0x%x\n", err);

-    }

-

-    framingCtx->duration = 0;

-    framingCtx->previousClipTime = -1;

-    framingCtx->previewOffsetClipTime = -1;

-

-    /**

-     * Only one element in the chained list (no animated image with RGB buffer...) */

-    framingCtx->pCurrent = framingCtx;

-    framingCtx->pNext = framingCtx;

-

-    return M4NO_ERROR;

-}

-

-/**

- ******************************************************************************

- * M4VIFI_UInt8 M4VIFI_RGB565toYUV420 (void *pUserData,

- *                                   M4VIFI_ImagePlane *pPlaneIn,

- *                                   M4VIFI_ImagePlane *pPlaneOut)

- * @author  Patrice Martinez / Philips Digital Networks - MP4Net

- * @brief   transform RGB565 image to a YUV420 image.

- * @note    Convert RGB565 to YUV420,

- *          Loop on each row ( 2 rows by 2 rows )

- *              Loop on each column ( 2 col by 2 col )

- *                  Get 4 RGB samples from input data and build 4 output Y samples

- *                  and each single U & V data

- *              end loop on col

- *          end loop on row

- * @param   pUserData: (IN) User Specific Data

- * @param   pPlaneIn: (IN) Pointer to RGB565 Plane

- * @param   pPlaneOut: (OUT) Pointer to  YUV420 buffer Plane

- * @return  M4VIFI_OK: there is no error

- * @return  M4VIFI_ILLEGAL_FRAME_HEIGHT: YUV Plane height is ODD

- * @return  M4VIFI_ILLEGAL_FRAME_WIDTH:  YUV Plane width is ODD

- ******************************************************************************

-*/

-M4VIFI_UInt8    M4VIFI_xVSS_RGB565toYUV420(void *pUserData, M4VIFI_ImagePlane *pPlaneIn,

-                                                      M4VIFI_ImagePlane *pPlaneOut)

-{

-    M4VIFI_UInt32   u32_width, u32_height;

-    M4VIFI_UInt32   u32_stride_Y, u32_stride2_Y, u32_stride_U, u32_stride_V;

-    M4VIFI_UInt32   u32_stride_rgb, u32_stride_2rgb;

-    M4VIFI_UInt32   u32_col, u32_row;

-

-    M4VIFI_Int32    i32_r00, i32_r01, i32_r10, i32_r11;

-    M4VIFI_Int32    i32_g00, i32_g01, i32_g10, i32_g11;

-    M4VIFI_Int32    i32_b00, i32_b01, i32_b10, i32_b11;

-    M4VIFI_Int32    i32_y00, i32_y01, i32_y10, i32_y11;

-    M4VIFI_Int32    i32_u00, i32_u01, i32_u10, i32_u11;

-    M4VIFI_Int32    i32_v00, i32_v01, i32_v10, i32_v11;

-    M4VIFI_UInt8    *pu8_yn, *pu8_ys, *pu8_u, *pu8_v;

-    M4VIFI_UInt8    *pu8_y_data, *pu8_u_data, *pu8_v_data;

-    M4VIFI_UInt8    *pu8_rgbn_data, *pu8_rgbn;

-    M4VIFI_UInt16   u16_pix1, u16_pix2, u16_pix3, u16_pix4;

-    M4VIFI_UInt8 count_null=0;

-

-    /* Check planes height are appropriate */

-    if( (pPlaneIn->u_height != pPlaneOut[0].u_height)           ||

-        (pPlaneOut[0].u_height != (pPlaneOut[1].u_height<<1))   ||

-        (pPlaneOut[0].u_height != (pPlaneOut[2].u_height<<1)))

-    {

-        return M4VIFI_ILLEGAL_FRAME_HEIGHT;

-    }

-

-    /* Check planes width are appropriate */

-    if( (pPlaneIn->u_width != pPlaneOut[0].u_width)         ||

-        (pPlaneOut[0].u_width != (pPlaneOut[1].u_width<<1)) ||

-        (pPlaneOut[0].u_width != (pPlaneOut[2].u_width<<1)))

-    {

-        return M4VIFI_ILLEGAL_FRAME_WIDTH;

-    }

-

-    /* Set the pointer to the beginning of the output data buffers */

-    pu8_y_data = pPlaneOut[0].pac_data + pPlaneOut[0].u_topleft;

-    pu8_u_data = pPlaneOut[1].pac_data + pPlaneOut[1].u_topleft;

-    pu8_v_data = pPlaneOut[2].pac_data + pPlaneOut[2].u_topleft;

-

-    /* Set the pointer to the beginning of the input data buffers */

-    pu8_rgbn_data   = pPlaneIn->pac_data + pPlaneIn->u_topleft;

-

-    /* Get the size of the output image */

-    u32_width = pPlaneOut[0].u_width;

-    u32_height = pPlaneOut[0].u_height;

-

-    /* Set the size of the memory jumps corresponding to row jump in each output plane */

-    u32_stride_Y = pPlaneOut[0].u_stride;

-    u32_stride2_Y = u32_stride_Y << 1;

-    u32_stride_U = pPlaneOut[1].u_stride;

-    u32_stride_V = pPlaneOut[2].u_stride;

-

-    /* Set the size of the memory jumps corresponding to row jump in input plane */

-    u32_stride_rgb = pPlaneIn->u_stride;

-    u32_stride_2rgb = u32_stride_rgb << 1;

-

-

-    /* Loop on each row of the output image, input coordinates are estimated from output ones */

-    /* Two YUV rows are computed at each pass */

-    for (u32_row = u32_height ;u32_row != 0; u32_row -=2)

-    {

-        /* Current Y plane row pointers */

-        pu8_yn = pu8_y_data;

-        /* Next Y plane row pointers */

-        pu8_ys = pu8_yn + u32_stride_Y;

-        /* Current U plane row pointer */

-        pu8_u = pu8_u_data;

-        /* Current V plane row pointer */

-        pu8_v = pu8_v_data;

-

-        pu8_rgbn = pu8_rgbn_data;

-

-        /* Loop on each column of the output image */

-        for (u32_col = u32_width; u32_col != 0 ; u32_col -=2)

-        {

-            /* Get four RGB 565 samples from input data */

-            u16_pix1 = *( (M4VIFI_UInt16 *) pu8_rgbn);

-            u16_pix2 = *( (M4VIFI_UInt16 *) (pu8_rgbn + CST_RGB_16_SIZE));

-            u16_pix3 = *( (M4VIFI_UInt16 *) (pu8_rgbn + u32_stride_rgb));

-            u16_pix4 = *( (M4VIFI_UInt16 *) (pu8_rgbn + u32_stride_rgb + CST_RGB_16_SIZE));

-

-            /* Unpack RGB565 to 8bit R, G, B */

-            /* (x,y) */

-            GET_RGB565(i32_b00,i32_g00,i32_r00,u16_pix1);

-            /* (x+1,y) */

-            GET_RGB565(i32_b10,i32_g10,i32_r10,u16_pix2);

-            /* (x,y+1) */

-            GET_RGB565(i32_b01,i32_g01,i32_r01,u16_pix3);

-            /* (x+1,y+1) */

-            GET_RGB565(i32_b11,i32_g11,i32_r11,u16_pix4);

-            /* If RGB is transparent color (0, 63, 0), we transform it to white (31,63,31) */

-            if(i32_b00 == 0 && i32_g00 == 63 && i32_r00 == 0)

-            {

-                i32_b00 = 31;

-                i32_r00 = 31;

-            }

-            if(i32_b10 == 0 && i32_g10 == 63 && i32_r10 == 0)

-            {

-                i32_b10 = 31;

-                i32_r10 = 31;

-            }

-            if(i32_b01 == 0 && i32_g01 == 63 && i32_r01 == 0)

-            {

-                i32_b01 = 31;

-                i32_r01 = 31;

-            }

-            if(i32_b11 == 0 && i32_g11 == 63 && i32_r11 == 0)

-            {

-                i32_b11 = 31;

-                i32_r11 = 31;

-            }

-            /* Convert RGB value to YUV */

-            i32_u00 = U16(i32_r00, i32_g00, i32_b00);

-            i32_v00 = V16(i32_r00, i32_g00, i32_b00);

-            /* luminance value */

-            i32_y00 = Y16(i32_r00, i32_g00, i32_b00);

-

-            i32_u10 = U16(i32_r10, i32_g10, i32_b10);

-            i32_v10 = V16(i32_r10, i32_g10, i32_b10);

-            /* luminance value */

-            i32_y10 = Y16(i32_r10, i32_g10, i32_b10);

-

-            i32_u01 = U16(i32_r01, i32_g01, i32_b01);

-            i32_v01 = V16(i32_r01, i32_g01, i32_b01);

-            /* luminance value */

-            i32_y01 = Y16(i32_r01, i32_g01, i32_b01);

-

-            i32_u11 = U16(i32_r11, i32_g11, i32_b11);

-            i32_v11 = V16(i32_r11, i32_g11, i32_b11);

-            /* luminance value */

-            i32_y11 = Y16(i32_r11, i32_g11, i32_b11);

-

-            /* Store luminance data */

-            pu8_yn[0] = (M4VIFI_UInt8)i32_y00;

-            pu8_yn[1] = (M4VIFI_UInt8)i32_y10;

-            pu8_ys[0] = (M4VIFI_UInt8)i32_y01;

-            pu8_ys[1] = (M4VIFI_UInt8)i32_y11;

-            *pu8_u = (M4VIFI_UInt8)((i32_u00 + i32_u01 + i32_u10 + i32_u11 + 2) >> 2);

-            *pu8_v = (M4VIFI_UInt8)((i32_v00 + i32_v01 + i32_v10 + i32_v11 + 2) >> 2);

-            /* Prepare for next column */

-            pu8_rgbn += (CST_RGB_16_SIZE<<1);

-            /* Update current Y plane line pointer*/

-            pu8_yn += 2;

-            /* Update next Y plane line pointer*/

-            pu8_ys += 2;

-            /* Update U plane line pointer*/

-            pu8_u ++;

-            /* Update V plane line pointer*/

-            pu8_v ++;

-        } /* End of horizontal scanning */

-

-        /* Prepare pointers for the next row */

-        pu8_y_data += u32_stride2_Y;

-        pu8_u_data += u32_stride_U;

-        pu8_v_data += u32_stride_V;

-        pu8_rgbn_data += u32_stride_2rgb;

-

-

-    } /* End of vertical scanning */

-

-    return M4VIFI_OK;

-}

-

-/***************************************************************************

-Proto:

-M4VIFI_UInt8    M4VIFI_RGB888toYUV420(void *pUserData, M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane PlaneOut[3]);

-Author:     Patrice Martinez / Philips Digital Networks - MP4Net

-Purpose:    filling of the YUV420 plane from a BGR24 plane

-Abstract:   Loop on each row ( 2 rows by 2 rows )

-                Loop on each column ( 2 col by 2 col )

-                    Get 4 BGR samples from input data and build 4 output Y samples and each single U & V data

-                end loop on col

-            end loop on row

-

-In:         RGB24 plane

-InOut:      none

-Out:        array of 3 M4VIFI_ImagePlane structures

-Modified:   ML: RGB function modified to BGR.

-***************************************************************************/

-M4VIFI_UInt8 M4VIFI_RGB888toYUV420(void *pUserData, M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane PlaneOut[3])

-{

-

-    M4VIFI_UInt32   u32_width, u32_height;

-    M4VIFI_UInt32   u32_stride_Y, u32_stride2_Y, u32_stride_U, u32_stride_V, u32_stride_rgb, u32_stride_2rgb;

-    M4VIFI_UInt32   u32_col, u32_row;

-

-    M4VIFI_Int32    i32_r00, i32_r01, i32_r10, i32_r11;

-    M4VIFI_Int32    i32_g00, i32_g01, i32_g10, i32_g11;

-    M4VIFI_Int32    i32_b00, i32_b01, i32_b10, i32_b11;

-    M4VIFI_Int32    i32_y00, i32_y01, i32_y10, i32_y11;

-    M4VIFI_Int32    i32_u00, i32_u01, i32_u10, i32_u11;

-    M4VIFI_Int32    i32_v00, i32_v01, i32_v10, i32_v11;

-    M4VIFI_UInt8    *pu8_yn, *pu8_ys, *pu8_u, *pu8_v;

-    M4VIFI_UInt8    *pu8_y_data, *pu8_u_data, *pu8_v_data;

-    M4VIFI_UInt8    *pu8_rgbn_data, *pu8_rgbn;

-

-    /* check sizes */

-    if( (PlaneIn->u_height != PlaneOut[0].u_height)         ||

-        (PlaneOut[0].u_height != (PlaneOut[1].u_height<<1)) ||

-        (PlaneOut[0].u_height != (PlaneOut[2].u_height<<1)))

-        return M4VIFI_ILLEGAL_FRAME_HEIGHT;

-

-    if( (PlaneIn->u_width != PlaneOut[0].u_width)       ||

-        (PlaneOut[0].u_width != (PlaneOut[1].u_width<<1))   ||

-        (PlaneOut[0].u_width != (PlaneOut[2].u_width<<1)))

-        return M4VIFI_ILLEGAL_FRAME_WIDTH;

-

-

-    /* set the pointer to the beginning of the output data buffers */

-    pu8_y_data  = PlaneOut[0].pac_data + PlaneOut[0].u_topleft;

-    pu8_u_data  = PlaneOut[1].pac_data + PlaneOut[1].u_topleft;

-    pu8_v_data  = PlaneOut[2].pac_data + PlaneOut[2].u_topleft;

-

-    /* idem for input buffer */

-    pu8_rgbn_data   = PlaneIn->pac_data + PlaneIn->u_topleft;

-

-    /* get the size of the output image */

-    u32_width   = PlaneOut[0].u_width;

-    u32_height  = PlaneOut[0].u_height;

-

-    /* set the size of the memory jumps corresponding to row jump in each output plane */

-    u32_stride_Y = PlaneOut[0].u_stride;

-    u32_stride2_Y= u32_stride_Y << 1;

-    u32_stride_U = PlaneOut[1].u_stride;

-    u32_stride_V = PlaneOut[2].u_stride;

-

-    /* idem for input plane */

-    u32_stride_rgb = PlaneIn->u_stride;

-    u32_stride_2rgb = u32_stride_rgb << 1;

-

-    /* loop on each row of the output image, input coordinates are estimated from output ones */

-    /* two YUV rows are computed at each pass */

-    for (u32_row = u32_height ;u32_row != 0; u32_row -=2)

-    {

-        /* update working pointers */

-        pu8_yn  = pu8_y_data;

-        pu8_ys  = pu8_yn + u32_stride_Y;

-

-        pu8_u   = pu8_u_data;

-        pu8_v   = pu8_v_data;

-

-        pu8_rgbn= pu8_rgbn_data;

-

-        /* loop on each column of the output image*/

-        for (u32_col = u32_width; u32_col != 0 ; u32_col -=2)

-        {

-            /* get RGB samples of 4 pixels */

-            GET_RGB24(i32_r00, i32_g00, i32_b00, pu8_rgbn, 0);

-            GET_RGB24(i32_r10, i32_g10, i32_b10, pu8_rgbn, CST_RGB_24_SIZE);

-            GET_RGB24(i32_r01, i32_g01, i32_b01, pu8_rgbn, u32_stride_rgb);

-            GET_RGB24(i32_r11, i32_g11, i32_b11, pu8_rgbn, u32_stride_rgb + CST_RGB_24_SIZE);

-

-            i32_u00 = U24(i32_r00, i32_g00, i32_b00);

-            i32_v00 = V24(i32_r00, i32_g00, i32_b00);

-            i32_y00 = Y24(i32_r00, i32_g00, i32_b00);       /* matrix luminance */

-            pu8_yn[0]= (M4VIFI_UInt8)i32_y00;

-

-            i32_u10 = U24(i32_r10, i32_g10, i32_b10);

-            i32_v10 = V24(i32_r10, i32_g10, i32_b10);

-            i32_y10 = Y24(i32_r10, i32_g10, i32_b10);

-            pu8_yn[1]= (M4VIFI_UInt8)i32_y10;

-

-            i32_u01 = U24(i32_r01, i32_g01, i32_b01);

-            i32_v01 = V24(i32_r01, i32_g01, i32_b01);

-            i32_y01 = Y24(i32_r01, i32_g01, i32_b01);

-            pu8_ys[0]= (M4VIFI_UInt8)i32_y01;

-

-            i32_u11 = U24(i32_r11, i32_g11, i32_b11);

-            i32_v11 = V24(i32_r11, i32_g11, i32_b11);

-            i32_y11 = Y24(i32_r11, i32_g11, i32_b11);

-            pu8_ys[1] = (M4VIFI_UInt8)i32_y11;

-

-            *pu8_u  = (M4VIFI_UInt8)((i32_u00 + i32_u01 + i32_u10 + i32_u11 + 2) >> 2);

-            *pu8_v  = (M4VIFI_UInt8)((i32_v00 + i32_v01 + i32_v10 + i32_v11 + 2) >> 2);

-

-            pu8_rgbn    +=  (CST_RGB_24_SIZE<<1);

-            pu8_yn      += 2;

-            pu8_ys      += 2;

-

-            pu8_u ++;

-            pu8_v ++;

-        } /* end of horizontal scanning */

-

-        pu8_y_data      += u32_stride2_Y;

-        pu8_u_data      += u32_stride_U;

-        pu8_v_data      += u32_stride_V;

-        pu8_rgbn_data   += u32_stride_2rgb;

-

-

-    } /* End of vertical scanning */

-

-    return M4VIFI_OK;

-}

-

-/** YUV420 to YUV420 */

-/**

- *******************************************************************************************

- * M4VIFI_UInt8 M4VIFI_YUV420toYUV420 (void *pUserData,

- *                                     M4VIFI_ImagePlane *pPlaneIn,

- *                                     M4VIFI_ImagePlane *pPlaneOut)

- * @brief   Transform YUV420 image to a YUV420 image.

- * @param   pUserData: (IN) User Specific Data (Unused - could be NULL)

- * @param   pPlaneIn: (IN) Pointer to YUV plane buffer

- * @param   pPlaneOut: (OUT) Pointer to YUV Plane

- * @return  M4VIFI_OK: there is no error

- * @return  M4VIFI_ILLEGAL_FRAME_HEIGHT: Error in plane height

- * @return  M4VIFI_ILLEGAL_FRAME_WIDTH:  Error in plane width

- *******************************************************************************************

- */

-

-M4VIFI_UInt8 M4VIFI_YUV420toYUV420(void *user_data, M4VIFI_ImagePlane PlaneIn[3], M4VIFI_ImagePlane *PlaneOut )

-{

-    M4VIFI_Int32 plane_number;

-    M4VIFI_UInt32 i;

-    M4VIFI_UInt8 *p_buf_src, *p_buf_dest;

-

-    for (plane_number = 0; plane_number < 3; plane_number++)

-    {

-        p_buf_src = &(PlaneIn[plane_number].pac_data[PlaneIn[plane_number].u_topleft]);

-        p_buf_dest = &(PlaneOut[plane_number].pac_data[PlaneOut[plane_number].u_topleft]);

-        for (i = 0; i < PlaneOut[plane_number].u_height; i++)

-        {

-            memcpy((void *)p_buf_dest, (void *)p_buf_src ,PlaneOut[plane_number].u_width);

-            p_buf_src += PlaneIn[plane_number].u_stride;

-            p_buf_dest += PlaneOut[plane_number].u_stride;

-        }

-    }

-    return M4VIFI_OK;

-}

-

-/**

- ***********************************************************************************************

- * M4VIFI_UInt8 M4VIFI_ResizeBilinearYUV420toYUV420(void *pUserData, M4VIFI_ImagePlane *pPlaneIn,

- *                                                                  M4VIFI_ImagePlane *pPlaneOut)

- * @author  David Dana (PHILIPS Software)

- * @brief   Resizes YUV420 Planar plane.

- * @note    Basic structure of the function

- *          Loop on each row (step 2)

- *              Loop on each column (step 2)

- *                  Get four Y samples and 1 U & V sample

- *                  Resize the Y with corresponing U and V samples

- *                  Place the YUV in the ouput plane

- *              end loop column

- *          end loop row

- *          For resizing bilinear interpolation linearly interpolates along

- *          each row, and then uses that result in a linear interpolation down each column.

- *          Each estimated pixel in the output image is a weighted

- *          combination of its four neighbours. The ratio of compression

- *          or dilatation is estimated using input and output sizes.

- * @param   pUserData: (IN) User Data

- * @param   pPlaneIn: (IN) Pointer to YUV420 (Planar) plane buffer

- * @param   pPlaneOut: (OUT) Pointer to YUV420 (Planar) plane

- * @return  M4VIFI_OK: there is no error

- * @return  M4VIFI_ILLEGAL_FRAME_HEIGHT: Error in height

- * @return  M4VIFI_ILLEGAL_FRAME_WIDTH:  Error in width

- ***********************************************************************************************

-*/

-M4VIFI_UInt8    M4VIFI_ResizeBilinearYUV420toYUV420(void *pUserData,

-                                                                M4VIFI_ImagePlane *pPlaneIn,

-                                                                M4VIFI_ImagePlane *pPlaneOut)

-{

-    M4VIFI_UInt8    *pu8_data_in, *pu8_data_out, *pu8dum;

-    M4VIFI_UInt32   u32_plane;

-    M4VIFI_UInt32   u32_width_in, u32_width_out, u32_height_in, u32_height_out;

-    M4VIFI_UInt32   u32_stride_in, u32_stride_out;

-    M4VIFI_UInt32   u32_x_inc, u32_y_inc;

-    M4VIFI_UInt32   u32_x_accum, u32_y_accum, u32_x_accum_start;

-    M4VIFI_UInt32   u32_width, u32_height;

-    M4VIFI_UInt32   u32_y_frac;

-    M4VIFI_UInt32   u32_x_frac;

-    M4VIFI_UInt32   u32_temp_value;

-    M4VIFI_UInt8    *pu8_src_top;

-    M4VIFI_UInt8    *pu8_src_bottom;

-

-    M4VIFI_UInt8    u8Wflag = 0;

-    M4VIFI_UInt8    u8Hflag = 0;

-    M4VIFI_UInt32   loop = 0;

-

-

-    /*

-     If input width is equal to output width and input height equal to

-     output height then M4VIFI_YUV420toYUV420 is called.

-    */

-    if ((pPlaneIn[0].u_height == pPlaneOut[0].u_height) &&

-              (pPlaneIn[0].u_width == pPlaneOut[0].u_width))

-    {

-        return M4VIFI_YUV420toYUV420(pUserData, pPlaneIn, pPlaneOut);

-    }

-

-    /* Check for the YUV width and height are even */

-    if ((IS_EVEN(pPlaneIn[0].u_height) == FALSE)    ||

-        (IS_EVEN(pPlaneOut[0].u_height) == FALSE))

-    {

-        return M4VIFI_ILLEGAL_FRAME_HEIGHT;

-    }

-

-    if ((IS_EVEN(pPlaneIn[0].u_width) == FALSE) ||

-        (IS_EVEN(pPlaneOut[0].u_width) == FALSE))

-    {

-        return M4VIFI_ILLEGAL_FRAME_WIDTH;

-    }

-

-    /* Loop on planes */

-    for(u32_plane = 0;u32_plane < PLANES;u32_plane++)

-    {

-        /* Set the working pointers at the beginning of the input/output data field */

-        pu8_data_in     = pPlaneIn[u32_plane].pac_data + pPlaneIn[u32_plane].u_topleft;

-        pu8_data_out    = pPlaneOut[u32_plane].pac_data + pPlaneOut[u32_plane].u_topleft;

-

-        /* Get the memory jump corresponding to a row jump */

-        u32_stride_in   = pPlaneIn[u32_plane].u_stride;

-        u32_stride_out  = pPlaneOut[u32_plane].u_stride;

-

-        /* Set the bounds of the active image */

-        u32_width_in    = pPlaneIn[u32_plane].u_width;

-        u32_height_in   = pPlaneIn[u32_plane].u_height;

-

-        u32_width_out   = pPlaneOut[u32_plane].u_width;

-        u32_height_out  = pPlaneOut[u32_plane].u_height;

-

-        /*

-        For the case , width_out = width_in , set the flag to avoid

-        accessing one column beyond the input width.In this case the last

-        column is replicated for processing

-        */

-        if (u32_width_out == u32_width_in) {

-            u32_width_out = u32_width_out-1;

-            u8Wflag = 1;

-        }

-

-        /* Compute horizontal ratio between src and destination width.*/

-        if (u32_width_out >= u32_width_in)

-        {

-            u32_x_inc   = ((u32_width_in-1) * MAX_SHORT) / (u32_width_out-1);

-        }

-        else

-        {

-            u32_x_inc   = (u32_width_in * MAX_SHORT) / (u32_width_out);

-        }

-

-        /*

-        For the case , height_out = height_in , set the flag to avoid

-        accessing one row beyond the input height.In this case the last

-        row is replicated for processing

-        */

-        if (u32_height_out == u32_height_in) {

-            u32_height_out = u32_height_out-1;

-            u8Hflag = 1;

-        }

-

-        /* Compute vertical ratio between src and destination height.*/

-        if (u32_height_out >= u32_height_in)

-        {

-            u32_y_inc   = ((u32_height_in - 1) * MAX_SHORT) / (u32_height_out-1);

-        }

-        else

-        {

-            u32_y_inc = (u32_height_in * MAX_SHORT) / (u32_height_out);

-        }

-

-        /*

-        Calculate initial accumulator value : u32_y_accum_start.

-        u32_y_accum_start is coded on 15 bits, and represents a value

-        between 0 and 0.5

-        */

-        if (u32_y_inc >= MAX_SHORT)

-        {

-        /*

-        Keep the fractionnal part, assimung that integer  part is coded

-        on the 16 high bits and the fractional on the 15 low bits

-        */

-            u32_y_accum = u32_y_inc & 0xffff;

-

-            if (!u32_y_accum)

-            {

-                u32_y_accum = MAX_SHORT;

-            }

-

-            u32_y_accum >>= 1;

-        }

-        else

-        {

-            u32_y_accum = 0;

-        }

-

-

-        /*

-        Calculate initial accumulator value : u32_x_accum_start.

-        u32_x_accum_start is coded on 15 bits, and represents a value

-        between 0 and 0.5

-        */

-        if (u32_x_inc >= MAX_SHORT)

-        {

-            u32_x_accum_start = u32_x_inc & 0xffff;

-

-            if (!u32_x_accum_start)

-            {

-                u32_x_accum_start = MAX_SHORT;

-            }

-

-            u32_x_accum_start >>= 1;

-        }

-        else

-        {

-            u32_x_accum_start = 0;

-        }

-

-        u32_height = u32_height_out;

-

-        /*

-        Bilinear interpolation linearly interpolates along each row, and

-        then uses that result in a linear interpolation donw each column.

-        Each estimated pixel in the output image is a weighted combination

-        of its four neighbours according to the formula:

-        F(p',q')=f(p,q)R(-a)R(b)+f(p,q-1)R(-a)R(b-1)+f(p+1,q)R(1-a)R(b)+

-        f(p+&,q+1)R(1-a)R(b-1) with  R(x) = / x+1  -1 =< x =< 0 \ 1-x

-        0 =< x =< 1 and a (resp. b)weighting coefficient is the distance

-        from the nearest neighbor in the p (resp. q) direction

-        */

-

-        do { /* Scan all the row */

-

-            /* Vertical weight factor */

-            u32_y_frac = (u32_y_accum>>12)&15;

-

-            /* Reinit accumulator */

-            u32_x_accum = u32_x_accum_start;

-

-            u32_width = u32_width_out;

-

-            do { /* Scan along each row */

-                pu8_src_top = pu8_data_in + (u32_x_accum >> 16);

-                pu8_src_bottom = pu8_src_top + u32_stride_in;

-                u32_x_frac = (u32_x_accum >> 12)&15; /* Horizontal weight factor */

-

-                /* Weighted combination */

-                u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) +

-                                                 pu8_src_top[1]*u32_x_frac)*(16-u32_y_frac) +

-                                                (pu8_src_bottom[0]*(16-u32_x_frac) +

-                                                 pu8_src_bottom[1]*u32_x_frac)*u32_y_frac )>>8);

-

-                *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;

-

-                /* Update horizontal accumulator */

-                u32_x_accum += u32_x_inc;

-            } while(--u32_width);

-

-            /*

-               This u8Wflag flag gets in to effect if input and output

-               width is same, and height may be different. So previous

-               pixel is replicated here

-            */

-            if (u8Wflag) {

-                *pu8_data_out = (M4VIFI_UInt8)u32_temp_value;

-            }

-

-            pu8dum = (pu8_data_out-u32_width_out);

-            pu8_data_out = pu8_data_out + u32_stride_out - u32_width_out;

-

-            /* Update vertical accumulator */

-            u32_y_accum += u32_y_inc;

-            if (u32_y_accum>>16) {

-                pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * u32_stride_in;

-                u32_y_accum &= 0xffff;

-            }

-        } while(--u32_height);

-

-        /*

-        This u8Hflag flag gets in to effect if input and output height

-        is same, and width may be different. So previous pixel row is

-        replicated here

-        */

-        if (u8Hflag) {

-            for(loop =0; loop < (u32_width_out+u8Wflag); loop++) {

-                *pu8_data_out++ = (M4VIFI_UInt8)*pu8dum++;

-            }

-        }

-    }

-

-    return M4VIFI_OK;

-}

-

-M4OSA_ERR applyRenderingMode(M4VIFI_ImagePlane* pPlaneIn, M4VIFI_ImagePlane* pPlaneOut, M4xVSS_MediaRendering mediaRendering)

-{

-    M4OSA_ERR err = M4NO_ERROR;

-

-    if(mediaRendering == M4xVSS_kResizing)

-    {

-        /**

-         * Call the resize filter. From the intermediate frame to the encoder image plane */

-        err = M4VIFI_ResizeBilinearYUV420toYUV420(M4OSA_NULL, pPlaneIn, pPlaneOut);

-        if (M4NO_ERROR != err)

-        {

-            M4OSA_TRACE1_1("applyRenderingMode: M4ViFilResizeBilinearYUV420toYUV420 returns 0x%x!", err);

-            return err;

-        }

-    }

-    else

-    {

-        M4AIR_Params Params;

-        M4OSA_Context m_air_context;

-        M4VIFI_ImagePlane pImagePlanesTemp[3];

-        M4VIFI_ImagePlane* pPlaneTemp;

-        M4OSA_UInt8* pOutPlaneY = pPlaneOut[0].pac_data + pPlaneOut[0].u_topleft;

-        M4OSA_UInt8* pOutPlaneU = pPlaneOut[1].pac_data + pPlaneOut[1].u_topleft;

-        M4OSA_UInt8* pOutPlaneV = pPlaneOut[2].pac_data + pPlaneOut[2].u_topleft;

-        M4OSA_UInt8* pInPlaneY = NULL;

-        M4OSA_UInt8* pInPlaneU = NULL;

-        M4OSA_UInt8* pInPlaneV = NULL;

-        M4OSA_UInt32 i;

-

-        /*to keep media aspect ratio*/

-        /*Initialize AIR Params*/

-        Params.m_inputCoord.m_x = 0;

-        Params.m_inputCoord.m_y = 0;

-        Params.m_inputSize.m_height = pPlaneIn->u_height;

-        Params.m_inputSize.m_width = pPlaneIn->u_width;

-        Params.m_outputSize.m_width = pPlaneOut->u_width;

-        Params.m_outputSize.m_height = pPlaneOut->u_height;

-        Params.m_bOutputStripe = M4OSA_FALSE;

-        Params.m_outputOrientation = M4COMMON_kOrientationTopLeft;

-

-        /**

-        Media rendering: Black borders*/

-        if(mediaRendering == M4xVSS_kBlackBorders)

-        {

-            memset((void *)pPlaneOut[0].pac_data,Y_PLANE_BORDER_VALUE,(pPlaneOut[0].u_height*pPlaneOut[0].u_stride));

-            memset((void *)pPlaneOut[1].pac_data,U_PLANE_BORDER_VALUE,(pPlaneOut[1].u_height*pPlaneOut[1].u_stride));

-            memset((void *)pPlaneOut[2].pac_data,V_PLANE_BORDER_VALUE,(pPlaneOut[2].u_height*pPlaneOut[2].u_stride));

-

-            pImagePlanesTemp[0].u_width = pPlaneOut[0].u_width;

-            pImagePlanesTemp[0].u_height = pPlaneOut[0].u_height;

-            pImagePlanesTemp[0].u_stride = pPlaneOut[0].u_width;

-            pImagePlanesTemp[0].u_topleft = 0;

-            pImagePlanesTemp[0].pac_data = M4OSA_NULL;

-

-            pImagePlanesTemp[1].u_width = pPlaneOut[1].u_width;

-            pImagePlanesTemp[1].u_height = pPlaneOut[1].u_height;

-            pImagePlanesTemp[1].u_stride = pPlaneOut[1].u_width;

-            pImagePlanesTemp[1].u_topleft = 0;

-            pImagePlanesTemp[1].pac_data = M4OSA_NULL;

-

-            pImagePlanesTemp[2].u_width = pPlaneOut[2].u_width;

-            pImagePlanesTemp[2].u_height = pPlaneOut[2].u_height;

-            pImagePlanesTemp[2].u_stride = pPlaneOut[2].u_width;

-            pImagePlanesTemp[2].u_topleft = 0;

-            pImagePlanesTemp[2].pac_data = M4OSA_NULL;

-

-            /* Allocates plan in local image plane structure */

-            pImagePlanesTemp[0].pac_data = (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc(pImagePlanesTemp[0].u_width * pImagePlanesTemp[0].u_height, M4VS, (M4OSA_Char*)"applyRenderingMode: temporary plane bufferY") ;

-            if(pImagePlanesTemp[0].pac_data == M4OSA_NULL)

-            {

-                M4OSA_TRACE1_0("Error alloc in applyRenderingMode");

-                return M4ERR_ALLOC;

-            }

-            pImagePlanesTemp[1].pac_data = (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc(pImagePlanesTemp[1].u_width * pImagePlanesTemp[1].u_height, M4VS, (M4OSA_Char*)"applyRenderingMode: temporary plane bufferU") ;

-            if(pImagePlanesTemp[1].pac_data == M4OSA_NULL)

-            {

-

-                M4OSA_TRACE1_0("Error alloc in applyRenderingMode");

-                return M4ERR_ALLOC;

-            }

-            pImagePlanesTemp[2].pac_data = (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc(pImagePlanesTemp[2].u_width * pImagePlanesTemp[2].u_height, M4VS, (M4OSA_Char*)"applyRenderingMode: temporary plane bufferV") ;

-            if(pImagePlanesTemp[2].pac_data == M4OSA_NULL)

-            {

-

-                M4OSA_TRACE1_0("Error alloc in applyRenderingMode");

-                return M4ERR_ALLOC;

-            }

-

-            pInPlaneY = pImagePlanesTemp[0].pac_data ;

-            pInPlaneU = pImagePlanesTemp[1].pac_data ;

-            pInPlaneV = pImagePlanesTemp[2].pac_data ;

-

-            memset((void *)pImagePlanesTemp[0].pac_data,Y_PLANE_BORDER_VALUE,(pImagePlanesTemp[0].u_height*pImagePlanesTemp[0].u_stride));

-            memset((void *)pImagePlanesTemp[1].pac_data,U_PLANE_BORDER_VALUE,(pImagePlanesTemp[1].u_height*pImagePlanesTemp[1].u_stride));

-            memset((void *)pImagePlanesTemp[2].pac_data,V_PLANE_BORDER_VALUE,(pImagePlanesTemp[2].u_height*pImagePlanesTemp[2].u_stride));

-

-            if((M4OSA_UInt32)((pPlaneIn->u_height * pPlaneOut->u_width) /pPlaneIn->u_width) <= pPlaneOut->u_height)//Params.m_inputSize.m_height < Params.m_inputSize.m_width)

-            {

-                /*it is height so black borders will be on the top and on the bottom side*/

-                Params.m_outputSize.m_width = pPlaneOut->u_width;

-                Params.m_outputSize.m_height = (M4OSA_UInt32)((pPlaneIn->u_height * pPlaneOut->u_width) /pPlaneIn->u_width);

-                /*number of lines at the top*/

-                pImagePlanesTemp[0].u_topleft = (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[0].u_height-Params.m_outputSize.m_height)>>1))*pImagePlanesTemp[0].u_stride;

-                pImagePlanesTemp[0].u_height = Params.m_outputSize.m_height;

-                pImagePlanesTemp[1].u_topleft = (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[1].u_height-(Params.m_outputSize.m_height>>1)))>>1)*pImagePlanesTemp[1].u_stride;

-                pImagePlanesTemp[1].u_height = Params.m_outputSize.m_height>>1;

-                pImagePlanesTemp[2].u_topleft = (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[2].u_height-(Params.m_outputSize.m_height>>1)))>>1)*pImagePlanesTemp[2].u_stride;

-                pImagePlanesTemp[2].u_height = Params.m_outputSize.m_height>>1;

-            }

-            else

-            {

-                /*it is width so black borders will be on the left and right side*/

-                Params.m_outputSize.m_height = pPlaneOut->u_height;

-                Params.m_outputSize.m_width = (M4OSA_UInt32)((pPlaneIn->u_width * pPlaneOut->u_height) /pPlaneIn->u_height);

-

-                pImagePlanesTemp[0].u_topleft = (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[0].u_width-Params.m_outputSize.m_width)>>1));

-                pImagePlanesTemp[0].u_width = Params.m_outputSize.m_width;

-                pImagePlanesTemp[1].u_topleft = (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[1].u_width-(Params.m_outputSize.m_width>>1)))>>1);

-                pImagePlanesTemp[1].u_width = Params.m_outputSize.m_width>>1;

-                pImagePlanesTemp[2].u_topleft = (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[2].u_width-(Params.m_outputSize.m_width>>1)))>>1);

-                pImagePlanesTemp[2].u_width = Params.m_outputSize.m_width>>1;

-            }

-

-            /*Width and height have to be even*/

-            Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1;

-            Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1;

-            Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1;

-            Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1;

-            pImagePlanesTemp[0].u_width = (pImagePlanesTemp[0].u_width>>1)<<1;

-            pImagePlanesTemp[1].u_width = (pImagePlanesTemp[1].u_width>>1)<<1;

-            pImagePlanesTemp[2].u_width = (pImagePlanesTemp[2].u_width>>1)<<1;

-            pImagePlanesTemp[0].u_height = (pImagePlanesTemp[0].u_height>>1)<<1;

-            pImagePlanesTemp[1].u_height = (pImagePlanesTemp[1].u_height>>1)<<1;

-            pImagePlanesTemp[2].u_height = (pImagePlanesTemp[2].u_height>>1)<<1;

-

-            /*Check that values are coherent*/

-            if(Params.m_inputSize.m_height == Params.m_outputSize.m_height)

-            {

-                Params.m_inputSize.m_width = Params.m_outputSize.m_width;

-            }

-            else if(Params.m_inputSize.m_width == Params.m_outputSize.m_width)

-            {

-                Params.m_inputSize.m_height = Params.m_outputSize.m_height;

-            }

-            pPlaneTemp = pImagePlanesTemp;

-

-

-        }

-

-        /**

-        Media rendering: Cropping*/

-        if(mediaRendering == M4xVSS_kCropping)

-        {

-            Params.m_outputSize.m_height = pPlaneOut->u_height;

-            Params.m_outputSize.m_width = pPlaneOut->u_width;

-            if((Params.m_outputSize.m_height * Params.m_inputSize.m_width) /Params.m_outputSize.m_width<Params.m_inputSize.m_height)

-            {

-                /*height will be cropped*/

-                Params.m_inputSize.m_height = (M4OSA_UInt32)((Params.m_outputSize.m_height * Params.m_inputSize.m_width) /Params.m_outputSize.m_width);

-                Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1;

-                Params.m_inputCoord.m_y = (M4OSA_Int32)((M4OSA_Int32)((pPlaneIn->u_height - Params.m_inputSize.m_height))>>1);

-            }

-            else

-            {

-                /*width will be cropped*/

-                Params.m_inputSize.m_width = (M4OSA_UInt32)((Params.m_outputSize.m_width * Params.m_inputSize.m_height) /Params.m_outputSize.m_height);

-                Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1;

-                Params.m_inputCoord.m_x = (M4OSA_Int32)((M4OSA_Int32)((pPlaneIn->u_width - Params.m_inputSize.m_width))>>1);

-            }

-            pPlaneTemp = pPlaneOut;

-        }

-

-        /**

-         * Call AIR functions */

-        err = M4AIR_create(&m_air_context, M4AIR_kYUV420P);

-        if(err != M4NO_ERROR)

-        {

-

-            M4OSA_TRACE1_1("applyRenderingMode: Error when initializing AIR: 0x%x", err);

-            for(i=0; i<3; i++)

-            {

-                if(pImagePlanesTemp[i].pac_data != M4OSA_NULL)

-                {

-                    free(pImagePlanesTemp[i].pac_data);

-                    pImagePlanesTemp[i].pac_data = M4OSA_NULL;

-                }

-            }

-            return err;

-        }

-

-

-        err = M4AIR_configure(m_air_context, &Params);

-        if(err != M4NO_ERROR)

-        {

-

-            M4OSA_TRACE1_1("applyRenderingMode: Error when configuring AIR: 0x%x", err);

-            M4AIR_cleanUp(m_air_context);

-            for(i=0; i<3; i++)

-            {

-                if(pImagePlanesTemp[i].pac_data != M4OSA_NULL)

-                {

-                    free(pImagePlanesTemp[i].pac_data);

-                    pImagePlanesTemp[i].pac_data = M4OSA_NULL;

-                }

-            }

-            return err;

-        }

-

-        err = M4AIR_get(m_air_context, pPlaneIn, pPlaneTemp);

-        if(err != M4NO_ERROR)

-        {

-            M4OSA_TRACE1_1("applyRenderingMode: Error when getting AIR plane: 0x%x", err);

-            M4AIR_cleanUp(m_air_context);

-            for(i=0; i<3; i++)

-            {

-                if(pImagePlanesTemp[i].pac_data != M4OSA_NULL)

-                {

-                    free(pImagePlanesTemp[i].pac_data);

-                    pImagePlanesTemp[i].pac_data = M4OSA_NULL;

-                }

-            }

-            return err;

-        }

-

-        if(mediaRendering == M4xVSS_kBlackBorders)

-        {

-            for(i=0; i<pPlaneOut[0].u_height; i++)

-            {

-                memcpy((void *)pOutPlaneY, (void *)pInPlaneY, pPlaneOut[0].u_width);

-                pInPlaneY += pPlaneOut[0].u_width;

-                pOutPlaneY += pPlaneOut[0].u_stride;

-            }

-            for(i=0; i<pPlaneOut[1].u_height; i++)

-            {

-                memcpy((void *)pOutPlaneU, (void *)pInPlaneU, pPlaneOut[1].u_width);

-                pInPlaneU += pPlaneOut[1].u_width;

-                pOutPlaneU += pPlaneOut[1].u_stride;

-            }

-            for(i=0; i<pPlaneOut[2].u_height; i++)

-            {

-                memcpy((void *)pOutPlaneV, (void *)pInPlaneV, pPlaneOut[2].u_width);

-                pInPlaneV += pPlaneOut[2].u_width;

-                pOutPlaneV += pPlaneOut[2].u_stride;

-            }

-

-            for(i=0; i<3; i++)

-            {

-                if(pImagePlanesTemp[i].pac_data != M4OSA_NULL)

-                {

-                    free(pImagePlanesTemp[i].pac_data);

-                    pImagePlanesTemp[i].pac_data = M4OSA_NULL;

-                }

-            }

-        }

-

-        if (m_air_context != M4OSA_NULL) {

-            M4AIR_cleanUp(m_air_context);

-            m_air_context = M4OSA_NULL;

-        }

-    }

-

-    return err;

-}

-

-//TODO: remove this code after link with videoartist lib

-/* M4AIR code*/

-#define M4AIR_YUV420_FORMAT_SUPPORTED

-#define M4AIR_YUV420A_FORMAT_SUPPORTED

-

-/************************* COMPILATION CHECKS ***************************/

-#ifndef M4AIR_YUV420_FORMAT_SUPPORTED

-#ifndef M4AIR_BGR565_FORMAT_SUPPORTED

-#ifndef M4AIR_RGB565_FORMAT_SUPPORTED

-#ifndef M4AIR_BGR888_FORMAT_SUPPORTED

-#ifndef M4AIR_RGB888_FORMAT_SUPPORTED

-#ifndef M4AIR_JPG_FORMAT_SUPPORTED

-

-#error "Please define at least one input format for the AIR component"

-

-#endif

-#endif

-#endif

-#endif

-#endif

-#endif

-

-/************************ M4AIR INTERNAL TYPES DEFINITIONS ***********************/

-

-/**

- ******************************************************************************

- * enum         M4AIR_States

- * @brief       The following enumeration defines the internal states of the AIR.

- ******************************************************************************

-*/

-typedef enum

-{

-    M4AIR_kCreated,         /**< State after M4AIR_create has been called */

-    M4AIR_kConfigured           /**< State after M4AIR_configure has been called */

-}M4AIR_States;

-

-

-/**

- ******************************************************************************

- * struct       M4AIR_InternalContext

- * @brief       The following structure is the internal context of the AIR.

- ******************************************************************************

-*/

-typedef struct

-{

-    M4AIR_States                m_state;            /**< Internal state */

-    M4AIR_InputFormatType   m_inputFormat;      /**< Input format like YUV420Planar, RGB565, JPG, etc ... */

-    M4AIR_Params            m_params;           /**< Current input Parameter of  the processing */

-    M4OSA_UInt32            u32_x_inc[4];       /**< ratio between input and ouput width for YUV */

-    M4OSA_UInt32            u32_y_inc[4];       /**< ratio between input and ouput height for YUV */

-    M4OSA_UInt32            u32_x_accum_start[4];   /**< horizontal initial accumulator value */

-    M4OSA_UInt32            u32_y_accum_start[4];   /**< Vertical initial accumulator value */

-    M4OSA_UInt32            u32_x_accum[4];     /**< save of horizontal accumulator value */

-    M4OSA_UInt32            u32_y_accum[4];     /**< save of vertical accumulator value */

-    M4OSA_UInt8*            pu8_data_in[4];         /**< Save of input plane pointers in case of stripe mode */

-    M4OSA_UInt32            m_procRows;         /**< Number of processed rows, used in stripe mode only */

-    M4OSA_Bool              m_bOnlyCopy;            /**< Flag to know if we just perform a copy or a bilinear interpolation */

-    M4OSA_Bool              m_bFlipX;               /**< Depend on output orientation, used during processing to revert processing order in X coordinates */

-    M4OSA_Bool              m_bFlipY;               /**< Depend on output orientation, used during processing to revert processing order in Y coordinates */

-    M4OSA_Bool              m_bRevertXY;            /**< Depend on output orientation, used during processing to revert X and Y processing order (+-90° rotation) */

-}M4AIR_InternalContext;

-

-/********************************* MACROS *******************************/

-#define M4ERR_CHECK_NULL_RETURN_VALUE(retval, pointer) if ((pointer) == M4OSA_NULL) return ((M4OSA_ERR)(retval));

-

-

-/********************** M4AIR PUBLIC API IMPLEMENTATION ********************/

-/**

- ******************************************************************************

- * M4OSA_ERR M4AIR_create(M4OSA_Context* pContext,M4AIR_InputFormatType inputFormat)

- * @author  Arnaud Collard

- * @brief       This function initialize an instance of the AIR.

- * @param   pContext:   (IN/OUT) Address of the context to create

- * @param   inputFormat:    (IN) input format type.

- * @return  M4NO_ERROR: there is no error

- * @return  M4ERR_PARAMETER: pContext is M4OSA_NULL (debug only). Invalid formatType

- * @return  M4ERR_ALLOC: No more memory is available

- ******************************************************************************

-*/

-M4OSA_ERR M4AIR_create(M4OSA_Context* pContext,M4AIR_InputFormatType inputFormat)

-{

-    M4OSA_ERR err = M4NO_ERROR ;

-    M4AIR_InternalContext* pC = M4OSA_NULL ;

-    /* Check that the address on the context is not NULL */

-    M4ERR_CHECK_NULL_RETURN_VALUE(M4ERR_PARAMETER, pContext) ;

-

-    *pContext = M4OSA_NULL ;

-

-    /* Internal Context creation */

-    pC = (M4AIR_InternalContext*)M4OSA_32bitAlignedMalloc(sizeof(M4AIR_InternalContext), M4AIR, (M4OSA_Char*)"AIR internal context") ;

-    M4ERR_CHECK_NULL_RETURN_VALUE(M4ERR_ALLOC, pC) ;

-

-

-    /* Check if the input format is supported */

-    switch(inputFormat)

-    {

-#ifdef M4AIR_YUV420_FORMAT_SUPPORTED

-        case M4AIR_kYUV420P:

-        break ;

-#endif

-#ifdef M4AIR_YUV420A_FORMAT_SUPPORTED

-        case M4AIR_kYUV420AP:

-        break ;

-#endif

-        default:

-            err = M4ERR_AIR_FORMAT_NOT_SUPPORTED;

-            goto M4AIR_create_cleanup ;

-    }

-

-    /**< Save input format and update state */

-    pC->m_inputFormat = inputFormat;

-    pC->m_state = M4AIR_kCreated;

-

-    /* Return the context to the caller */

-    *pContext = pC ;

-

-    return M4NO_ERROR ;

-

-M4AIR_create_cleanup:

-    /* Error management : we destroy the context if needed */

-    if(M4OSA_NULL != pC)

-    {

-        free(pC) ;

-    }

-

-    *pContext = M4OSA_NULL ;

-

-    return err ;

-}

-

-

-

-/**

- ******************************************************************************

- * M4OSA_ERR M4AIR_cleanUp(M4OSA_Context pContext)

- * @author  Arnaud Collard

- * @brief       This function destroys an instance of the AIR component

- * @param   pContext:   (IN) Context identifying the instance to destroy

- * @return  M4NO_ERROR: there is no error

- * @return  M4ERR_PARAMETER: pContext is M4OSA_NULL (debug only).

- * @return  M4ERR_STATE: Internal state is incompatible with this function call.

-******************************************************************************

-*/

-M4OSA_ERR M4AIR_cleanUp(M4OSA_Context pContext)

-{

-    M4AIR_InternalContext* pC = (M4AIR_InternalContext*)pContext ;

-

-    M4ERR_CHECK_NULL_RETURN_VALUE(M4ERR_PARAMETER, pContext) ;

-

-    /**< Check state */

-    if((M4AIR_kCreated != pC->m_state)&&(M4AIR_kConfigured != pC->m_state))

-    {

-        return M4ERR_STATE;

-    }

-    free(pC) ;

-

-    return M4NO_ERROR ;

-

-}

-

-

-/**

- ******************************************************************************

- * M4OSA_ERR M4AIR_configure(M4OSA_Context pContext, M4AIR_Params* pParams)

- * @brief       This function will configure the AIR.

- * @note    It will set the input and output coordinates and sizes,

- *          and indicates if we will proceed in stripe or not.

- *          In case a M4AIR_get in stripe mode was on going, it will cancel this previous processing

- *          and reset the get process.

- * @param   pContext:               (IN) Context identifying the instance

- * @param   pParams->m_bOutputStripe:(IN) Stripe mode.

- * @param   pParams->m_inputCoord:  (IN) X,Y coordinates of the first valid pixel in input.

- * @param   pParams->m_inputSize:   (IN) input ROI size.

- * @param   pParams->m_outputSize:  (IN) output size.

- * @return  M4NO_ERROR: there is no error

- * @return  M4ERR_ALLOC: No more memory space to add a new effect.

- * @return  M4ERR_PARAMETER: pContext is M4OSA_NULL (debug only).

- * @return  M4ERR_AIR_FORMAT_NOT_SUPPORTED: the requested input format is not supported.

- ******************************************************************************

-*/

-M4OSA_ERR M4AIR_configure(M4OSA_Context pContext, M4AIR_Params* pParams)

-{

-    M4AIR_InternalContext* pC = (M4AIR_InternalContext*)pContext ;

-    M4OSA_UInt32    i,u32_width_in, u32_width_out, u32_height_in, u32_height_out;

-    M4OSA_UInt32    nb_planes;

-

-    M4ERR_CHECK_NULL_RETURN_VALUE(M4ERR_PARAMETER, pContext) ;

-

-    if(M4AIR_kYUV420AP == pC->m_inputFormat)

-    {

-        nb_planes = 4;

-    }

-    else

-    {

-        nb_planes = 3;

-    }

-

-    /**< Check state */

-    if((M4AIR_kCreated != pC->m_state)&&(M4AIR_kConfigured != pC->m_state))

-    {

-        return M4ERR_STATE;

-    }

-

-    /** Save parameters */

-    pC->m_params = *pParams;

-

-    /* Check for the input&output width and height are even */

-        if( ((pC->m_params.m_inputSize.m_height)&0x1)    ||

-         ((pC->m_params.m_inputSize.m_height)&0x1))

-        {

-         return M4ERR_AIR_ILLEGAL_FRAME_SIZE;

-        }

-

-    if( ((pC->m_params.m_inputSize.m_width)&0x1)    ||

-         ((pC->m_params.m_inputSize.m_width)&0x1))

-        {

-            return M4ERR_AIR_ILLEGAL_FRAME_SIZE;

-        }

-    if(((pC->m_params.m_inputSize.m_width) == (pC->m_params.m_outputSize.m_width))

-        &&((pC->m_params.m_inputSize.m_height) == (pC->m_params.m_outputSize.m_height)))

-    {

-        /**< No resize in this case, we will just copy input in output */

-        pC->m_bOnlyCopy = M4OSA_TRUE;

-    }

-    else

-    {

-        pC->m_bOnlyCopy = M4OSA_FALSE;

-

-        /**< Initialize internal variables used for resize filter */

-        for(i=0;i<nb_planes;i++)

-        {

-

-            u32_width_in = ((i==0)||(i==3))?pC->m_params.m_inputSize.m_width:(pC->m_params.m_inputSize.m_width+1)>>1;

-            u32_height_in = ((i==0)||(i==3))?pC->m_params.m_inputSize.m_height:(pC->m_params.m_inputSize.m_height+1)>>1;

-            u32_width_out = ((i==0)||(i==3))?pC->m_params.m_outputSize.m_width:(pC->m_params.m_outputSize.m_width+1)>>1;

-            u32_height_out = ((i==0)||(i==3))?pC->m_params.m_outputSize.m_height:(pC->m_params.m_outputSize.m_height+1)>>1;

-

-                /* Compute horizontal ratio between src and destination width.*/

-                if (u32_width_out >= u32_width_in)

-                {

-                    pC->u32_x_inc[i]   = ((u32_width_in-1) * 0x10000) / (u32_width_out-1);

-                }

-                else

-                {

-                    pC->u32_x_inc[i]   = (u32_width_in * 0x10000) / (u32_width_out);

-                }

-

-                /* Compute vertical ratio between src and destination height.*/

-                if (u32_height_out >= u32_height_in)

-                {

-                    pC->u32_y_inc[i]   = ((u32_height_in - 1) * 0x10000) / (u32_height_out-1);

-                }

-                else

-                {

-                    pC->u32_y_inc[i] = (u32_height_in * 0x10000) / (u32_height_out);

-                }

-

-                /*

-                Calculate initial accumulator value : u32_y_accum_start.

-                u32_y_accum_start is coded on 15 bits, and represents a value between 0 and 0.5

-                */

-                if (pC->u32_y_inc[i] >= 0x10000)

-                {

-                    /*

-                        Keep the fractionnal part, assimung that integer  part is coded

-                        on the 16 high bits and the fractionnal on the 15 low bits

-                    */

-                    pC->u32_y_accum_start[i] = pC->u32_y_inc[i] & 0xffff;

-

-                    if (!pC->u32_y_accum_start[i])

-                    {

-                        pC->u32_y_accum_start[i] = 0x10000;

-                    }

-

-                    pC->u32_y_accum_start[i] >>= 1;

-                }

-                else

-                {

-                    pC->u32_y_accum_start[i] = 0;

-                }

-                /**< Take into account that Y coordinate can be odd

-                    in this case we have to put a 0.5 offset

-                    for U and V plane as there a 2 times sub-sampled vs Y*/

-                if((pC->m_params.m_inputCoord.m_y&0x1)&&((i==1)||(i==2)))

-                {

-                    pC->u32_y_accum_start[i] += 0x8000;

-                }

-

-                /*

-                    Calculate initial accumulator value : u32_x_accum_start.

-                    u32_x_accum_start is coded on 15 bits, and represents a value between 0 and 0.5

-                */

-

-                if (pC->u32_x_inc[i] >= 0x10000)

-                {

-                    pC->u32_x_accum_start[i] = pC->u32_x_inc[i] & 0xffff;

-

-                    if (!pC->u32_x_accum_start[i])

-                    {

-                        pC->u32_x_accum_start[i] = 0x10000;

-                    }

-

-                    pC->u32_x_accum_start[i] >>= 1;

-                }

-                else

-                {

-                    pC->u32_x_accum_start[i] = 0;

-                }

-                /**< Take into account that X coordinate can be odd

-                    in this case we have to put a 0.5 offset

-                    for U and V plane as there a 2 times sub-sampled vs Y*/

-                if((pC->m_params.m_inputCoord.m_x&0x1)&&((i==1)||(i==2)))

-                {

-                    pC->u32_x_accum_start[i] += 0x8000;

-                }

-        }

-    }

-

-    /**< Reset variable used for stripe mode */

-    pC->m_procRows = 0;

-

-    /**< Initialize var for X/Y processing order according to orientation */

-    pC->m_bFlipX = M4OSA_FALSE;

-    pC->m_bFlipY = M4OSA_FALSE;

-    pC->m_bRevertXY = M4OSA_FALSE;

-    switch(pParams->m_outputOrientation)

-    {

-        case M4COMMON_kOrientationTopLeft:

-            break;

-        case M4COMMON_kOrientationTopRight:

-            pC->m_bFlipX = M4OSA_TRUE;

-            break;

-        case M4COMMON_kOrientationBottomRight:

-            pC->m_bFlipX = M4OSA_TRUE;

-            pC->m_bFlipY = M4OSA_TRUE;

-            break;

-        case M4COMMON_kOrientationBottomLeft:

-            pC->m_bFlipY = M4OSA_TRUE;

-            break;

-        case M4COMMON_kOrientationLeftTop:

-            pC->m_bRevertXY = M4OSA_TRUE;

-            break;

-        case M4COMMON_kOrientationRightTop:

-            pC->m_bRevertXY = M4OSA_TRUE;

-            pC->m_bFlipY = M4OSA_TRUE;

-        break;

-        case M4COMMON_kOrientationRightBottom:

-            pC->m_bRevertXY = M4OSA_TRUE;

-            pC->m_bFlipX = M4OSA_TRUE;

-            pC->m_bFlipY = M4OSA_TRUE;

-            break;

-        case M4COMMON_kOrientationLeftBottom:

-            pC->m_bRevertXY = M4OSA_TRUE;

-            pC->m_bFlipX = M4OSA_TRUE;

-            break;

-        default:

-        return M4ERR_PARAMETER;

-    }

-    /**< Update state */

-    pC->m_state = M4AIR_kConfigured;

-

-    return M4NO_ERROR ;

-}

-

-

-/**

- ******************************************************************************

- * M4OSA_ERR M4AIR_get(M4OSA_Context pContext, M4VIFI_ImagePlane* pIn, M4VIFI_ImagePlane* pOut)

- * @brief   This function will provide the requested resized area of interest according to settings

- *          provided in M4AIR_configure.

- * @note    In case the input format type is JPEG, input plane(s)

- *          in pIn is not used. In normal mode, dimension specified in output plane(s) structure must be the

- *          same than the one specified in M4AIR_configure. In stripe mode, only the width will be the same,

- *          height will be taken as the stripe height (typically 16).

- *          In normal mode, this function is call once to get the full output picture. In stripe mode, it is called

- *          for each stripe till the whole picture has been retrieved,and  the position of the output stripe in the output picture

- *          is internally incremented at each step.

- *          Any call to M4AIR_configure during stripe process will reset this one to the beginning of the output picture.

- * @param   pContext:   (IN) Context identifying the instance

- * @param   pIn:            (IN) Plane structure containing input Plane(s).

- * @param   pOut:       (IN/OUT)  Plane structure containing output Plane(s).

- * @return  M4NO_ERROR: there is no error

- * @return  M4ERR_ALLOC: No more memory space to add a new effect.

- * @return  M4ERR_PARAMETER: pContext is M4OSA_NULL (debug only).

- ******************************************************************************

-*/

-M4OSA_ERR M4AIR_get(M4OSA_Context pContext, M4VIFI_ImagePlane* pIn, M4VIFI_ImagePlane* pOut)

-{

-    M4AIR_InternalContext* pC = (M4AIR_InternalContext*)pContext ;

-    M4OSA_UInt32 i,j,k,u32_x_frac,u32_y_frac,u32_x_accum,u32_y_accum,u32_shift;

-        M4OSA_UInt8    *pu8_data_in, *pu8_data_in_org, *pu8_data_in_tmp, *pu8_data_out;

-        M4OSA_UInt8    *pu8_src_top;

-        M4OSA_UInt8    *pu8_src_bottom;

-    M4OSA_UInt32    u32_temp_value;

-    M4OSA_Int32 i32_tmp_offset;

-    M4OSA_UInt32    nb_planes;

-

-

-

-    M4ERR_CHECK_NULL_RETURN_VALUE(M4ERR_PARAMETER, pContext) ;

-

-    /**< Check state */

-    if(M4AIR_kConfigured != pC->m_state)

-    {

-        return M4ERR_STATE;

-    }

-

-    if(M4AIR_kYUV420AP == pC->m_inputFormat)

-    {

-        nb_planes = 4;

-    }

-    else

-    {

-        nb_planes = 3;

-    }

-

-    /**< Loop on each Plane */

-    for(i=0;i<nb_planes;i++)

-    {

-

-         /* Set the working pointers at the beginning of the input/output data field */

-

-        u32_shift = ((i==0)||(i==3))?0:1; /**< Depend on Luma or Chroma */

-

-        if((M4OSA_FALSE == pC->m_params.m_bOutputStripe)||((M4OSA_TRUE == pC->m_params.m_bOutputStripe)&&(0 == pC->m_procRows)))

-        {

-            /**< For input, take care about ROI */

-            pu8_data_in     = pIn[i].pac_data + pIn[i].u_topleft + (pC->m_params.m_inputCoord.m_x>>u32_shift)

-                        + (pC->m_params.m_inputCoord.m_y >> u32_shift) * pIn[i].u_stride;

-

-            /** Go at end of line/column in case X/Y scanning is flipped */

-            if(M4OSA_TRUE == pC->m_bFlipX)

-            {

-                pu8_data_in += ((pC->m_params.m_inputSize.m_width)>>u32_shift) -1 ;

-            }

-            if(M4OSA_TRUE == pC->m_bFlipY)

-            {

-                pu8_data_in += ((pC->m_params.m_inputSize.m_height>>u32_shift) -1) * pIn[i].u_stride;

-            }

-

-            /**< Initialize accumulators in case we are using it (bilinear interpolation) */

-            if( M4OSA_FALSE == pC->m_bOnlyCopy)

-            {

-                pC->u32_x_accum[i] = pC->u32_x_accum_start[i];

-                pC->u32_y_accum[i] = pC->u32_y_accum_start[i];

-            }

-

-        }

-        else

-        {

-            /**< In case of stripe mode for other than first stripe, we need to recover input pointer from internal context */

-            pu8_data_in = pC->pu8_data_in[i];

-        }

-

-        /**< In every mode, output data are at the beginning of the output plane */

-        pu8_data_out    = pOut[i].pac_data + pOut[i].u_topleft;

-

-        /**< Initialize input offset applied after each pixel */

-        if(M4OSA_FALSE == pC->m_bFlipY)

-        {

-            i32_tmp_offset = pIn[i].u_stride;

-        }

-        else

-        {

-            i32_tmp_offset = -pIn[i].u_stride;

-        }

-

-        /**< In this case, no bilinear interpolation is needed as input and output dimensions are the same */

-        if( M4OSA_TRUE == pC->m_bOnlyCopy)

-        {

-            /**< No +-90° rotation */

-            if(M4OSA_FALSE == pC->m_bRevertXY)

-            {

-                /**< No flip on X abscissa */

-                if(M4OSA_FALSE == pC->m_bFlipX)

-                {

-                     M4OSA_UInt32 loc_height = pOut[i].u_height;

-                     M4OSA_UInt32 loc_width = pOut[i].u_width;

-                     M4OSA_UInt32 loc_stride = pIn[i].u_stride;

-                    /**< Loop on each row */

-                    for (j=0; j<loc_height; j++)

-                    {

-                        /**< Copy one whole line */

-                        memcpy((void *)pu8_data_out, (void *)pu8_data_in, loc_width);

-

-                        /**< Update pointers */

-                        pu8_data_out += pOut[i].u_stride;

-                        if(M4OSA_FALSE == pC->m_bFlipY)

-                        {

-                            pu8_data_in += loc_stride;

-                        }

-                        else

-                        {

-                            pu8_data_in -= loc_stride;

-                        }

-                    }

-                }

-                else

-                {

-                    /**< Loop on each row */

-                    for(j=0;j<pOut[i].u_height;j++)

-                    {

-                        /**< Loop on each pixel of 1 row */

-                        for(k=0;k<pOut[i].u_width;k++)

-                        {

-                            *pu8_data_out++ = *pu8_data_in--;

-                        }

-

-                        /**< Update pointers */

-                        pu8_data_out += (pOut[i].u_stride - pOut[i].u_width);

-

-                        pu8_data_in += pOut[i].u_width + i32_tmp_offset;

-

-                    }

-                }

-            }

-            /**< Here we have a +-90° rotation */

-            else

-            {

-

-                /**< Loop on each row */

-                for(j=0;j<pOut[i].u_height;j++)

-                {

-                    pu8_data_in_tmp = pu8_data_in;

-

-                    /**< Loop on each pixel of 1 row */

-                    for(k=0;k<pOut[i].u_width;k++)

-                    {

-                        *pu8_data_out++ = *pu8_data_in_tmp;

-

-                        /**< Update input pointer in order to go to next/past line */

-                        pu8_data_in_tmp += i32_tmp_offset;

-                    }

-

-                    /**< Update pointers */

-                    pu8_data_out += (pOut[i].u_stride - pOut[i].u_width);

-                    if(M4OSA_FALSE == pC->m_bFlipX)

-                    {

-                        pu8_data_in ++;

-                    }

-                    else

-                    {

-                        pu8_data_in --;

-                    }

-                }

-            }

-        }

-        /**< Bilinear interpolation */

-        else

-        {

-

-        if(3 != i)  /**< other than alpha plane */

-        {

-            /**No +-90° rotation */

-            if(M4OSA_FALSE == pC->m_bRevertXY)

-            {

-

-                /**< Loop on each row */

-                for(j=0;j<pOut[i].u_height;j++)

-                {

-                    /* Vertical weight factor */

-                    u32_y_frac = (pC->u32_y_accum[i]>>12)&15;

-

-                    /* Reinit horizontal weight factor */

-                    u32_x_accum = pC->u32_x_accum_start[i];

-

-

-

-                        if(M4OSA_TRUE ==  pC->m_bFlipX)

-                        {

-

-                            /**< Loop on each output pixel in a row */

-                            for(k=0;k<pOut[i].u_width;k++)

-                            {

-

-                                u32_x_frac = (u32_x_accum >> 12)&15; /* Fraction of Horizontal weight factor */

-

-                                pu8_src_top = (pu8_data_in - (u32_x_accum >> 16)) -1 ;

-

-                                pu8_src_bottom = pu8_src_top + i32_tmp_offset;

-

-                                /* Weighted combination */

-                                u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[1]*(16-u32_x_frac) +

-                                                                 pu8_src_top[0]*u32_x_frac)*(16-u32_y_frac) +

-                                                                (pu8_src_bottom[1]*(16-u32_x_frac) +

-                                                                 pu8_src_bottom[0]*u32_x_frac)*u32_y_frac )>>8);

-

-                                *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;

-

-                                /* Update horizontal accumulator */

-                                u32_x_accum += pC->u32_x_inc[i];

-                            }

-                        }

-

-                        else

-                        {

-                            /**< Loop on each output pixel in a row */

-                            for(k=0;k<pOut[i].u_width;k++)

-                            {

-                                u32_x_frac = (u32_x_accum >> 12)&15; /* Fraction of Horizontal weight factor */

-

-                                pu8_src_top = pu8_data_in + (u32_x_accum >> 16);

-

-                                pu8_src_bottom = pu8_src_top + i32_tmp_offset;

-

-                                /* Weighted combination */

-                                u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) +

-                                                                 pu8_src_top[1]*u32_x_frac)*(16-u32_y_frac) +

-                                                                (pu8_src_bottom[0]*(16-u32_x_frac) +

-                                                                 pu8_src_bottom[1]*u32_x_frac)*u32_y_frac )>>8);

-

-                                    *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;

-

-                                /* Update horizontal accumulator */

-                                u32_x_accum += pC->u32_x_inc[i];

-                            }

-

-                        }

-

-                    pu8_data_out += pOut[i].u_stride - pOut[i].u_width;

-

-                    /* Update vertical accumulator */

-                    pC->u32_y_accum[i] += pC->u32_y_inc[i];

-                    if (pC->u32_y_accum[i]>>16)

-                    {

-                        pu8_data_in = pu8_data_in + (pC->u32_y_accum[i] >> 16) * i32_tmp_offset;

-                        pC->u32_y_accum[i] &= 0xffff;

-                    }

-                }

-        }

-            /** +-90° rotation */

-            else

-            {

-                pu8_data_in_org = pu8_data_in;

-

-                /**< Loop on each output row */

-                for(j=0;j<pOut[i].u_height;j++)

-                {

-                    /* horizontal weight factor */

-                    u32_x_frac = (pC->u32_x_accum[i]>>12)&15;

-

-                    /* Reinit accumulator */

-                    u32_y_accum = pC->u32_y_accum_start[i];

-

-                    if(M4OSA_TRUE ==  pC->m_bFlipX)

-                    {

-

-                        /**< Loop on each output pixel in a row */

-                        for(k=0;k<pOut[i].u_width;k++)

-                        {

-

-                            u32_y_frac = (u32_y_accum >> 12)&15; /* Vertical weight factor */

-

-

-                            pu8_src_top = (pu8_data_in - (pC->u32_x_accum[i] >> 16)) - 1;

-

-                            pu8_src_bottom = pu8_src_top + i32_tmp_offset;

-

-                            /* Weighted combination */

-                            u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[1]*(16-u32_x_frac) +

-                                                                 pu8_src_top[0]*u32_x_frac)*(16-u32_y_frac) +

-                                                                (pu8_src_bottom[1]*(16-u32_x_frac) +

-                                                                 pu8_src_bottom[0]*u32_x_frac)*u32_y_frac )>>8);

-

-                            *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;

-

-                            /* Update vertical accumulator */

-                            u32_y_accum += pC->u32_y_inc[i];

-                            if (u32_y_accum>>16)

-                            {

-                                pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * i32_tmp_offset;

-                                u32_y_accum &= 0xffff;

-                            }

-

-                        }

-                    }

-                    else

-                    {

-                        /**< Loop on each output pixel in a row */

-                        for(k=0;k<pOut[i].u_width;k++)

-                        {

-

-                            u32_y_frac = (u32_y_accum >> 12)&15; /* Vertical weight factor */

-

-                            pu8_src_top = pu8_data_in + (pC->u32_x_accum[i] >> 16);

-

-                            pu8_src_bottom = pu8_src_top + i32_tmp_offset;

-

-                            /* Weighted combination */

-                            u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) +

-                                                                 pu8_src_top[1]*u32_x_frac)*(16-u32_y_frac) +

-                                                                (pu8_src_bottom[0]*(16-u32_x_frac) +

-                                                                 pu8_src_bottom[1]*u32_x_frac)*u32_y_frac )>>8);

-

-                            *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;

-

-                            /* Update vertical accumulator */

-                            u32_y_accum += pC->u32_y_inc[i];

-                            if (u32_y_accum>>16)

-                            {

-                                pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * i32_tmp_offset;

-                                u32_y_accum &= 0xffff;

-                            }

-                        }

-                    }

-                    pu8_data_out += pOut[i].u_stride - pOut[i].u_width;

-

-                    /* Update horizontal accumulator */

-                    pC->u32_x_accum[i] += pC->u32_x_inc[i];

-

-                    pu8_data_in = pu8_data_in_org;

-                }

-

-            }

-            }/** 3 != i */

-            else

-            {

-            /**No +-90° rotation */

-            if(M4OSA_FALSE == pC->m_bRevertXY)

-            {

-

-                /**< Loop on each row */

-                for(j=0;j<pOut[i].u_height;j++)

-                {

-                    /* Vertical weight factor */

-                    u32_y_frac = (pC->u32_y_accum[i]>>12)&15;

-

-                    /* Reinit horizontal weight factor */

-                    u32_x_accum = pC->u32_x_accum_start[i];

-

-

-

-                        if(M4OSA_TRUE ==  pC->m_bFlipX)

-                        {

-

-                            /**< Loop on each output pixel in a row */

-                            for(k=0;k<pOut[i].u_width;k++)

-                            {

-

-                                u32_x_frac = (u32_x_accum >> 12)&15; /* Fraction of Horizontal weight factor */

-

-                                pu8_src_top = (pu8_data_in - (u32_x_accum >> 16)) -1 ;

-

-                                pu8_src_bottom = pu8_src_top + i32_tmp_offset;

-

-                                /* Weighted combination */

-                                u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[1]*(16-u32_x_frac) +

-                                                                 pu8_src_top[0]*u32_x_frac)*(16-u32_y_frac) +

-                                                                (pu8_src_bottom[1]*(16-u32_x_frac) +

-                                                                 pu8_src_bottom[0]*u32_x_frac)*u32_y_frac )>>8);

-

-                                u32_temp_value= (u32_temp_value >> 7)*0xff;

-

-                                *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;

-

-                                /* Update horizontal accumulator */

-                                u32_x_accum += pC->u32_x_inc[i];

-                            }

-                        }

-

-                        else

-                        {

-                            /**< Loop on each output pixel in a row */

-                            for(k=0;k<pOut[i].u_width;k++)

-                            {

-                                u32_x_frac = (u32_x_accum >> 12)&15; /* Fraction of Horizontal weight factor */

-

-                                pu8_src_top = pu8_data_in + (u32_x_accum >> 16);

-

-                                pu8_src_bottom = pu8_src_top + i32_tmp_offset;

-

-                                /* Weighted combination */

-                                u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) +

-                                                                 pu8_src_top[1]*u32_x_frac)*(16-u32_y_frac) +

-                                                                (pu8_src_bottom[0]*(16-u32_x_frac) +

-                                                                 pu8_src_bottom[1]*u32_x_frac)*u32_y_frac )>>8);

-

-                                u32_temp_value= (u32_temp_value >> 7)*0xff;

-

-                                *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;

-

-                                /* Update horizontal accumulator */

-                                u32_x_accum += pC->u32_x_inc[i];

-                            }

-

-                        }

-

-                    pu8_data_out += pOut[i].u_stride - pOut[i].u_width;

-

-                    /* Update vertical accumulator */

-                    pC->u32_y_accum[i] += pC->u32_y_inc[i];

-                    if (pC->u32_y_accum[i]>>16)

-                    {

-                        pu8_data_in = pu8_data_in + (pC->u32_y_accum[i] >> 16) * i32_tmp_offset;

-                        pC->u32_y_accum[i] &= 0xffff;

-                    }

-                }

-

-            } /**< M4OSA_FALSE == pC->m_bRevertXY */

-            /** +-90° rotation */

-            else

-            {

-                pu8_data_in_org = pu8_data_in;

-

-                /**< Loop on each output row */

-                for(j=0;j<pOut[i].u_height;j++)

-                {

-                    /* horizontal weight factor */

-                    u32_x_frac = (pC->u32_x_accum[i]>>12)&15;

-

-                    /* Reinit accumulator */

-                    u32_y_accum = pC->u32_y_accum_start[i];

-

-                    if(M4OSA_TRUE ==  pC->m_bFlipX)

-                    {

-

-                        /**< Loop on each output pixel in a row */

-                        for(k=0;k<pOut[i].u_width;k++)

-                        {

-

-                            u32_y_frac = (u32_y_accum >> 12)&15; /* Vertical weight factor */

-

-

-                            pu8_src_top = (pu8_data_in - (pC->u32_x_accum[i] >> 16)) - 1;

-

-                            pu8_src_bottom = pu8_src_top + i32_tmp_offset;

-

-                            /* Weighted combination */

-                            u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[1]*(16-u32_x_frac) +

-                                                                 pu8_src_top[0]*u32_x_frac)*(16-u32_y_frac) +

-                                                                (pu8_src_bottom[1]*(16-u32_x_frac) +

-                                                                 pu8_src_bottom[0]*u32_x_frac)*u32_y_frac )>>8);

-

-                            u32_temp_value= (u32_temp_value >> 7)*0xff;

-

-                            *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;

-

-                            /* Update vertical accumulator */

-                            u32_y_accum += pC->u32_y_inc[i];

-                            if (u32_y_accum>>16)

-                            {

-                                pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * i32_tmp_offset;

-                                u32_y_accum &= 0xffff;

-                            }

-

-                        }

-                    }

-                    else

-                    {

-                        /**< Loop on each output pixel in a row */

-                        for(k=0;k<pOut[i].u_width;k++)

-                        {

-

-                            u32_y_frac = (u32_y_accum >> 12)&15; /* Vertical weight factor */

-

-                            pu8_src_top = pu8_data_in + (pC->u32_x_accum[i] >> 16);

-

-                            pu8_src_bottom = pu8_src_top + i32_tmp_offset;

-

-                            /* Weighted combination */

-                            u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) +

-                                                                 pu8_src_top[1]*u32_x_frac)*(16-u32_y_frac) +

-                                                                (pu8_src_bottom[0]*(16-u32_x_frac) +

-                                                                 pu8_src_bottom[1]*u32_x_frac)*u32_y_frac )>>8);

-

-                            u32_temp_value= (u32_temp_value >> 7)*0xff;

-

-                            *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;

-

-                            /* Update vertical accumulator */

-                            u32_y_accum += pC->u32_y_inc[i];

-                            if (u32_y_accum>>16)

-                            {

-                                pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * i32_tmp_offset;

-                                u32_y_accum &= 0xffff;

-                            }

-                        }

-                    }

-                    pu8_data_out += pOut[i].u_stride - pOut[i].u_width;

-

-                    /* Update horizontal accumulator */

-                    pC->u32_x_accum[i] += pC->u32_x_inc[i];

-

-                    pu8_data_in = pu8_data_in_org;

-

-                }

-                } /**< M4OSA_TRUE == pC->m_bRevertXY */

-        }/** 3 == i */

-            }

-        /**< In case of stripe mode, save current input pointer */

-        if(M4OSA_TRUE == pC->m_params.m_bOutputStripe)

-        {

-            pC->pu8_data_in[i] = pu8_data_in;

-        }

-    }

-

-    /**< Update number of processed rows, reset it if we have finished with the whole processing */

-    pC->m_procRows += pOut[0].u_height;

-    if(M4OSA_FALSE == pC->m_bRevertXY)

-    {

-        if(pC->m_params.m_outputSize.m_height <= pC->m_procRows)    pC->m_procRows = 0;

-    }

-    else

-    {

-        if(pC->m_params.m_outputSize.m_width <= pC->m_procRows) pC->m_procRows = 0;

-    }

-

-    return M4NO_ERROR ;

-

-}

-/*+ Handle the image files here */

-

-/**

- ******************************************************************************

- * M4OSA_ERR LvGetImageThumbNail(M4OSA_UChar *fileName, M4OSA_Void **pBuffer)

- * @brief   This function gives YUV420 buffer of a given image file (in argb888 format)

- * @Note: The caller of the function is responsible to free the yuv buffer allocated

- * @param   fileName:       (IN) Path to the filename of the image argb data

- * @param   height:     (IN) Height of the image

- * @param     width:             (OUT) pBuffer pointer to the address where the yuv data address needs to be returned.

- * @return  M4NO_ERROR: there is no error

- * @return  M4ERR_ALLOC: No more memory space to add a new effect.

- * @return  M4ERR_FILE_NOT_FOUND: if the file passed does not exists.

- ******************************************************************************

-*/

-M4OSA_ERR LvGetImageThumbNail(const char *fileName, M4OSA_UInt32 height, M4OSA_UInt32 width, M4OSA_Void **pBuffer) {

-

-    M4VIFI_ImagePlane rgbPlane, *yuvPlane;

-    M4OSA_UInt32 frameSize_argb = (width * height * 4); // argb data

-    M4OSA_Context lImageFileFp  = M4OSA_NULL;

-    M4OSA_ERR err = M4NO_ERROR;

-

-    M4OSA_UInt8 *pTmpData = (M4OSA_UInt8*) M4OSA_32bitAlignedMalloc(frameSize_argb, M4VS, (M4OSA_Char*)"Image argb data");

-    if(pTmpData == M4OSA_NULL) {

-        LOGE("Failed to allocate memory for Image clip");

-        return M4ERR_ALLOC;

-    }

-

-       /** Read the argb data from the passed file. */

-    M4OSA_ERR lerr = M4OSA_fileReadOpen(&lImageFileFp, (M4OSA_Void *) fileName, M4OSA_kFileRead);

-

-    if((lerr != M4NO_ERROR) || (lImageFileFp == M4OSA_NULL))

-    {

-        LOGE("LVPreviewController: Can not open the file ");

-        free(pTmpData);

-        return M4ERR_FILE_NOT_FOUND;

-    }

-    lerr = M4OSA_fileReadData(lImageFileFp, (M4OSA_MemAddr8)pTmpData, &frameSize_argb);

-    if(lerr != M4NO_ERROR)

-    {

-        LOGE("LVPreviewController: can not read the data ");

-        M4OSA_fileReadClose(lImageFileFp);

-        free(pTmpData);

-        return lerr;

-    }

-    M4OSA_fileReadClose(lImageFileFp);

-

-    M4OSA_UInt32 frameSize = (width * height * 3); //Size of YUV420 data.

-    rgbPlane.pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize, M4VS, (M4OSA_Char*)"Image clip RGB888 data");

-    if(rgbPlane.pac_data == M4OSA_NULL)

-    {

-        LOGE("Failed to allocate memory for Image clip");

-        free(pTmpData);

-        return M4ERR_ALLOC;

-    }

-

-    /** Remove the alpha channel */

-    for (M4OSA_UInt32 i=0, j = 0; i < frameSize_argb; i++) {

-        if ((i % 4) == 0) continue;

-        rgbPlane.pac_data[j] = pTmpData[i];

-        j++;

-    }

-    free(pTmpData);

-

-#ifdef FILE_DUMP

-    FILE *fp = fopen("/sdcard/Input/test_rgb.raw", "wb");

-    if(fp == NULL)

-        LOGE("Errors file can not be created");

-    else {

-        fwrite(rgbPlane.pac_data, frameSize, 1, fp);

-        fclose(fp);

-    }

-#endif

-        rgbPlane.u_height = height;

-        rgbPlane.u_width = width;

-        rgbPlane.u_stride = width*3;

-        rgbPlane.u_topleft = 0;

-

-        yuvPlane = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(3*sizeof(M4VIFI_ImagePlane),

-                M4VS, (M4OSA_Char*)"M4xVSS_internalConvertRGBtoYUV: Output plane YUV");

-        yuvPlane[0].u_height = height;

-        yuvPlane[0].u_width = width;

-        yuvPlane[0].u_stride = width;

-        yuvPlane[0].u_topleft = 0;

-        yuvPlane[0].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(yuvPlane[0].u_height * yuvPlane[0].u_width * 1.5, M4VS, (M4OSA_Char*)"imageClip YUV data");

-

-        yuvPlane[1].u_height = yuvPlane[0].u_height >>1;

-        yuvPlane[1].u_width = yuvPlane[0].u_width >> 1;

-        yuvPlane[1].u_stride = yuvPlane[1].u_width;

-        yuvPlane[1].u_topleft = 0;

-        yuvPlane[1].pac_data = (M4VIFI_UInt8*)(yuvPlane[0].pac_data + yuvPlane[0].u_height * yuvPlane[0].u_width);

-

-        yuvPlane[2].u_height = yuvPlane[0].u_height >>1;

-        yuvPlane[2].u_width = yuvPlane[0].u_width >> 1;

-        yuvPlane[2].u_stride = yuvPlane[2].u_width;

-        yuvPlane[2].u_topleft = 0;

-        yuvPlane[2].pac_data = (M4VIFI_UInt8*)(yuvPlane[1].pac_data + yuvPlane[1].u_height * yuvPlane[1].u_width);

-

-

-        err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane, yuvPlane);

-        //err = M4VIFI_BGR888toYUV420(M4OSA_NULL, &rgbPlane, yuvPlane);

-        if(err != M4NO_ERROR)

-        {

-            LOGE("error when converting from RGB to YUV: 0x%x\n", (unsigned int)err);

-        }

-        free(rgbPlane.pac_data);

-

-        //LOGE("RGB to YUV done");

-#ifdef FILE_DUMP

-        FILE *fp1 = fopen("/sdcard/Input/test_yuv.raw", "wb");

-        if(fp1 == NULL)

-            LOGE("Errors file can not be created");

-        else {

-            fwrite(yuvPlane[0].pac_data, yuvPlane[0].u_height * yuvPlane[0].u_width * 1.5, 1, fp1);

-            fclose(fp1);

-        }

-#endif

-        *pBuffer = yuvPlane[0].pac_data;

-        free(yuvPlane);

-        return M4NO_ERROR;

-

-}

-M4OSA_Void prepareYUV420ImagePlane(M4VIFI_ImagePlane *plane,

-    M4OSA_UInt32 width, M4OSA_UInt32 height, M4VIFI_UInt8 *buffer,

-    M4OSA_UInt32 reportedWidth, M4OSA_UInt32 reportedHeight) {

-

-    //Y plane

-    plane[0].u_width = width;

-    plane[0].u_height = height;

-    plane[0].u_stride = reportedWidth;

-    plane[0].u_topleft = 0;

-    plane[0].pac_data = buffer;

-

-    // U plane

-    plane[1].u_width = width/2;

-    plane[1].u_height = height/2;

-    plane[1].u_stride = reportedWidth >> 1;

-    plane[1].u_topleft = 0;

-    plane[1].pac_data = buffer+(reportedWidth*reportedHeight);

-

-    // V Plane

-    plane[2].u_width = width/2;

-    plane[2].u_height = height/2;

-    plane[2].u_stride = reportedWidth >> 1;

-    plane[2].u_topleft = 0;

-    plane[2].pac_data = plane[1].pac_data + ((reportedWidth/2)*(reportedHeight/2));

-}

-

-M4OSA_Void prepareYV12ImagePlane(M4VIFI_ImagePlane *plane,

-    M4OSA_UInt32 width, M4OSA_UInt32 height, M4OSA_UInt32 stride,

-    M4VIFI_UInt8 *buffer) {

-

-    //Y plane

-    plane[0].u_width = width;

-    plane[0].u_height = height;

-    plane[0].u_stride = stride;

-    plane[0].u_topleft = 0;

-    plane[0].pac_data = buffer;

-

-    // U plane

-    plane[1].u_width = width/2;

-    plane[1].u_height = height/2;

-    plane[1].u_stride = android::PreviewRenderer::ALIGN(plane[0].u_stride/2, 16);

-    plane[1].u_topleft = 0;

-    plane[1].pac_data = (buffer

-                + plane[0].u_height * plane[0].u_stride

-                + (plane[0].u_height/2) * android::PreviewRenderer::ALIGN((

-                 plane[0].u_stride / 2), 16));

-

-    // V Plane

-    plane[2].u_width = width/2;

-    plane[2].u_height = height/2;

-    plane[2].u_stride = android::PreviewRenderer::ALIGN(plane[0].u_stride/2, 16);

-    plane[2].u_topleft = 0;

-    plane[2].pac_data = (buffer +

-     plane[0].u_height * android::PreviewRenderer::ALIGN(plane[0].u_stride, 16));

-

-

-}

-

-M4OSA_Void swapImagePlanes(

-    M4VIFI_ImagePlane *planeIn, M4VIFI_ImagePlane *planeOut,

-    M4VIFI_UInt8 *buffer1, M4VIFI_UInt8 *buffer2) {

-

-    planeIn[0].u_height = planeOut[0].u_height;

-    planeIn[0].u_width = planeOut[0].u_width;

-    planeIn[0].u_stride = planeOut[0].u_stride;

-    planeIn[0].u_topleft = planeOut[0].u_topleft;

-    planeIn[0].pac_data = planeOut[0].pac_data;

-

-    /**

-     * U plane */

-    planeIn[1].u_width = planeOut[1].u_width;

-    planeIn[1].u_height = planeOut[1].u_height;

-    planeIn[1].u_stride = planeOut[1].u_stride;

-    planeIn[1].u_topleft = planeOut[1].u_topleft;

-    planeIn[1].pac_data = planeOut[1].pac_data;

-    /**

-     * V Plane */

-    planeIn[2].u_width = planeOut[2].u_width;

-    planeIn[2].u_height = planeOut[2].u_height;

-    planeIn[2].u_stride = planeOut[2].u_stride;

-    planeIn[2].u_topleft = planeOut[2].u_topleft;

-    planeIn[2].pac_data = planeOut[2].pac_data;

-

-    if(planeOut[0].pac_data == (M4VIFI_UInt8*)buffer1)

-    {

-        planeOut[0].pac_data = (M4VIFI_UInt8*)buffer2;

-        planeOut[1].pac_data = (M4VIFI_UInt8*)(buffer2 +

-         planeOut[0].u_width*planeOut[0].u_height);

-

-        planeOut[2].pac_data = (M4VIFI_UInt8*)(buffer2 +

-         planeOut[0].u_width*planeOut[0].u_height +

-         planeOut[1].u_width*planeOut[1].u_height);

-    }

-    else

-    {

-        planeOut[0].pac_data = (M4VIFI_UInt8*)buffer1;

-        planeOut[1].pac_data = (M4VIFI_UInt8*)(buffer1 +

-         planeOut[0].u_width*planeOut[0].u_height);

-

-        planeOut[2].pac_data = (M4VIFI_UInt8*)(buffer1 +

-         planeOut[0].u_width*planeOut[0].u_height +

-         planeOut[1].u_width*planeOut[1].u_height);

-    }

-

-}

-

-M4OSA_Void computePercentageDone(

-    M4OSA_UInt32 ctsMs, M4OSA_UInt32 effectStartTimeMs,

-    M4OSA_UInt32 effectDuration, M4OSA_Double *percentageDone) {

-

-    M4OSA_Double videoEffectTime =0;

-

-    // Compute how far from the beginning of the effect we are, in clip-base time.

-    videoEffectTime =

-     (M4OSA_Int32)(ctsMs+ 0.5) - effectStartTimeMs;

-

-    // To calculate %, substract timeIncrement

-    // because effect should finish on the last frame

-    // which is from CTS = (eof-timeIncrement) till CTS = eof

-    *percentageDone =

-     videoEffectTime / ((M4OSA_Float)effectDuration);

-

-    if(*percentageDone < 0.0) *percentageDone = 0.0;

-    if(*percentageDone > 1.0) *percentageDone = 1.0;

-

-}

-

-

-M4OSA_Void computeProgressForVideoEffect(

-    M4OSA_UInt32 ctsMs, M4OSA_UInt32 effectStartTimeMs,

-    M4OSA_UInt32 effectDuration, M4VSS3GPP_ExternalProgress* extProgress) {

-

-    M4OSA_Double percentageDone =0;

-

-    computePercentageDone(ctsMs, effectStartTimeMs, effectDuration, &percentageDone);

-

-    extProgress->uiProgress = (M4OSA_UInt32)( percentageDone * 1000 );

-    extProgress->uiOutputTime = (M4OSA_UInt32)(ctsMs + 0.5);

-    extProgress->uiClipTime = extProgress->uiOutputTime;

-    extProgress->bIsLast = M4OSA_FALSE;

-}

-

-M4OSA_ERR prepareFramingStructure(

-    M4xVSS_FramingStruct* framingCtx,

-    M4VSS3GPP_EffectSettings* effectsSettings, M4OSA_UInt32 index,

-    M4VIFI_UInt8* overlayRGB, M4VIFI_UInt8* overlayYUV) {

-

-    M4OSA_ERR err = M4NO_ERROR;

-

-    // Force input RGB buffer to even size to avoid errors in YUV conversion

-    framingCtx->FramingRgb = effectsSettings[index].xVSS.pFramingBuffer;

-    framingCtx->FramingRgb->u_width = framingCtx->FramingRgb->u_width & ~1;

-    framingCtx->FramingRgb->u_height = framingCtx->FramingRgb->u_height & ~1;

-    framingCtx->FramingYuv = NULL;

-

-    framingCtx->duration = effectsSettings[index].uiDuration;

-    framingCtx->topleft_x = effectsSettings[index].xVSS.topleft_x;

-    framingCtx->topleft_y = effectsSettings[index].xVSS.topleft_y;

-    framingCtx->pCurrent = framingCtx;

-    framingCtx->pNext = framingCtx;

-    framingCtx->previousClipTime = -1;

-

-    framingCtx->alphaBlendingStruct =

-     (M4xVSS_internalEffectsAlphaBlending*)M4OSA_32bitAlignedMalloc(

-      sizeof(M4xVSS_internalEffectsAlphaBlending), M4VS,

-      (M4OSA_Char*)"alpha blending struct");

-

-    framingCtx->alphaBlendingStruct->m_fadeInTime =

-     effectsSettings[index].xVSS.uialphaBlendingFadeInTime;

-

-    framingCtx->alphaBlendingStruct->m_fadeOutTime =

-     effectsSettings[index].xVSS.uialphaBlendingFadeOutTime;

-

-    framingCtx->alphaBlendingStruct->m_end =

-     effectsSettings[index].xVSS.uialphaBlendingEnd;

-

-    framingCtx->alphaBlendingStruct->m_middle =

-     effectsSettings[index].xVSS.uialphaBlendingMiddle;

-

-    framingCtx->alphaBlendingStruct->m_start =

-     effectsSettings[index].xVSS.uialphaBlendingStart;

-

-    // If new Overlay buffer, convert from RGB to YUV

-    if((overlayRGB != framingCtx->FramingRgb->pac_data) || (overlayYUV == NULL) ) {

-

-        // If YUV buffer exists, delete it

-        if(overlayYUV != NULL) {

-           free(overlayYUV);

-           overlayYUV = NULL;

-        }

-    if(effectsSettings[index].xVSS.rgbType == M4VSS3GPP_kRGB565) {

-        // Input RGB565 plane is provided,

-        // let's convert it to YUV420, and update framing structure

-        err = M4xVSS_internalConvertRGBtoYUV(framingCtx);

-    }

-    else if(effectsSettings[index].xVSS.rgbType == M4VSS3GPP_kRGB888) {

-        // Input RGB888 plane is provided,

-        // let's convert it to YUV420, and update framing structure

-        err = M4xVSS_internalConvertRGB888toYUV(framingCtx);

-    }

-    else {

-        err = M4ERR_PARAMETER;

-    }

-        overlayYUV = framingCtx->FramingYuv[0].pac_data;

-        overlayRGB = framingCtx->FramingRgb->pac_data;

-

-    }

-    else {

-        LOGV(" YUV buffer reuse");

-        framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(

-            3*sizeof(M4VIFI_ImagePlane), M4VS, (M4OSA_Char*)"YUV");

-

-        if(framingCtx->FramingYuv == M4OSA_NULL) {

-            return M4ERR_ALLOC;

-        }

-

-        framingCtx->FramingYuv[0].u_width = framingCtx->FramingRgb->u_width;

-        framingCtx->FramingYuv[0].u_height = framingCtx->FramingRgb->u_height;

-        framingCtx->FramingYuv[0].u_topleft = 0;

-        framingCtx->FramingYuv[0].u_stride = framingCtx->FramingRgb->u_width;

-        framingCtx->FramingYuv[0].pac_data = (M4VIFI_UInt8*)overlayYUV;

-

-        framingCtx->FramingYuv[1].u_width = (framingCtx->FramingRgb->u_width)>>1;

-        framingCtx->FramingYuv[1].u_height = (framingCtx->FramingRgb->u_height)>>1;

-        framingCtx->FramingYuv[1].u_topleft = 0;

-        framingCtx->FramingYuv[1].u_stride = (framingCtx->FramingRgb->u_width)>>1;

-        framingCtx->FramingYuv[1].pac_data = framingCtx->FramingYuv[0].pac_data +

-            framingCtx->FramingYuv[0].u_width * framingCtx->FramingYuv[0].u_height;

-

-        framingCtx->FramingYuv[2].u_width = (framingCtx->FramingRgb->u_width)>>1;

-        framingCtx->FramingYuv[2].u_height = (framingCtx->FramingRgb->u_height)>>1;

-        framingCtx->FramingYuv[2].u_topleft = 0;

-        framingCtx->FramingYuv[2].u_stride = (framingCtx->FramingRgb->u_width)>>1;

-        framingCtx->FramingYuv[2].pac_data = framingCtx->FramingYuv[1].pac_data +

-            framingCtx->FramingYuv[1].u_width * framingCtx->FramingYuv[1].u_height;

-

-        framingCtx->duration = 0;

-        framingCtx->previousClipTime = -1;

-        framingCtx->previewOffsetClipTime = -1;

-

-    }

-    return err;

-}

-

-M4OSA_ERR applyColorEffect(M4xVSS_VideoEffectType colorEffect,

-    M4VIFI_ImagePlane *planeIn, M4VIFI_ImagePlane *planeOut,

-    M4VIFI_UInt8 *buffer1, M4VIFI_UInt8 *buffer2, M4OSA_UInt16 rgbColorData) {

-

-    M4xVSS_ColorStruct colorContext;

-    M4OSA_ERR err = M4NO_ERROR;

-

-    colorContext.colorEffectType = colorEffect;

-    colorContext.rgb16ColorData = rgbColorData;

-

-    err = M4VSS3GPP_externalVideoEffectColor(

-     (M4OSA_Void *)&colorContext, planeIn, planeOut, NULL,

-     colorEffect);

-

-    if(err != M4NO_ERROR) {

-        LOGV("M4VSS3GPP_externalVideoEffectColor(%d) error %d",

-            colorEffect, err);

-

-        if(NULL != buffer1) {

-            free(buffer1);

-            buffer1 = NULL;

-        }

-        if(NULL != buffer2) {

-            free(buffer2);

-            buffer2 = NULL;

-        }

-        return err;

-    }

-

-    // The out plane now becomes the in plane for adding other effects

-    swapImagePlanes(planeIn, planeOut, buffer1, buffer2);

-

-    return err;

-}

-

-M4OSA_ERR applyLumaEffect(M4VSS3GPP_VideoEffectType videoEffect,

-    M4VIFI_ImagePlane *planeIn, M4VIFI_ImagePlane *planeOut,

-    M4VIFI_UInt8 *buffer1, M4VIFI_UInt8 *buffer2, M4OSA_Int32 lum_factor) {

-

-    M4OSA_ERR err = M4NO_ERROR;

-

-    err = M4VFL_modifyLumaWithScale(

-         (M4ViComImagePlane*)planeIn,(M4ViComImagePlane*)planeOut,

-         lum_factor, NULL);

-

-    if(err != M4NO_ERROR) {

-        LOGE("M4VFL_modifyLumaWithScale(%d) error %d", videoEffect, (int)err);

-

-        if(NULL != buffer1) {

-            free(buffer1);

-            buffer1= NULL;

-        }

-        if(NULL != buffer2) {

-            free(buffer2);

-            buffer2= NULL;

-        }

-        return err;

-    }

-

-    // The out plane now becomes the in plane for adding other effects

-    swapImagePlanes(planeIn, planeOut,(M4VIFI_UInt8 *)buffer1,

-     (M4VIFI_UInt8 *)buffer2);

-

-    return err;

-}

-

-M4OSA_ERR applyCurtainEffect(M4VSS3GPP_VideoEffectType videoEffect,

-    M4VIFI_ImagePlane *planeIn, M4VIFI_ImagePlane *planeOut,

-    M4VIFI_UInt8 *buffer1, M4VIFI_UInt8 *buffer2,

-    M4VFL_CurtainParam* curtainParams) {

-

-    M4OSA_ERR err = M4NO_ERROR;

-

-    // Apply the curtain effect

-    err = M4VFL_applyCurtain( (M4ViComImagePlane*)planeIn,

-          (M4ViComImagePlane*)planeOut, curtainParams, NULL);

-    if(err != M4NO_ERROR) {

-        LOGE("M4VFL_applyCurtain(%d) error %d", videoEffect, (int)err);

-

-        if(NULL != buffer1) {

-            free(buffer1);

-            buffer1= NULL;

-        }

-        if(NULL != buffer2) {

-            free(buffer2);

-            buffer2 = NULL;

-        }

-        return err;

-    }

-

-    // The out plane now becomes the in plane for adding other effects

-    swapImagePlanes(planeIn, planeOut,(M4VIFI_UInt8 *)buffer1,

-    (M4VIFI_UInt8 *)buffer2);

-

-    return err;

-}

-

-M4OSA_ERR applyEffectsAndRenderingMode(vePostProcessParams *params,

-    M4OSA_UInt32 reportedWidth, M4OSA_UInt32 reportedHeight) {

-

-    M4OSA_ERR err = M4NO_ERROR;

-    M4VIFI_ImagePlane planeIn[3], planeOut[3];

-    M4VIFI_UInt8 *finalOutputBuffer = NULL, *tempOutputBuffer= NULL;

-    M4OSA_Double percentageDone =0;

-    M4OSA_Int32 lum_factor;

-    M4VFL_CurtainParam curtainParams;

-    M4VSS3GPP_ExternalProgress extProgress;

-    M4xVSS_FiftiesStruct fiftiesCtx;

-    M4OSA_UInt32 frameSize = 0, i=0;

-

-    frameSize = (params->videoWidth*params->videoHeight*3) >> 1;

-

-    finalOutputBuffer = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize, M4VS,

-     (M4OSA_Char*)("lvpp finalOutputBuffer"));

-

-    if(finalOutputBuffer == NULL) {

-        LOGE("applyEffectsAndRenderingMode: malloc error");

-        return M4ERR_ALLOC;

-    }

-

-    // allocate the tempOutputBuffer

-    tempOutputBuffer = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(

-     ((params->videoHeight*params->videoWidth*3)>>1), M4VS, (M4OSA_Char*)("lvpp colorBuffer"));

-

-    if(tempOutputBuffer == NULL) {

-        LOGE("applyEffectsAndRenderingMode: malloc error tempOutputBuffer");

-        if(NULL != finalOutputBuffer) {

-            free(finalOutputBuffer);

-            finalOutputBuffer = NULL;

-        }

-        return M4ERR_ALLOC;

-    }

-

-    // Initialize the In plane

-    prepareYUV420ImagePlane(planeIn, params->videoWidth, params->videoHeight,

-       params->vidBuffer, reportedWidth, reportedHeight);

-

-    // Initialize the Out plane

-    prepareYUV420ImagePlane(planeOut, params->videoWidth, params->videoHeight,

-       (M4VIFI_UInt8 *)tempOutputBuffer, params->videoWidth, params->videoHeight);

-

-    // The planeIn contains the YUV420 input data to postprocessing node

-    // and planeOut will contain the YUV420 data with effect

-    // In each successive if condition, apply filter to successive

-    // output YUV frame so that concurrent effects are both applied

-

-    if(params->currentVideoEffect & VIDEO_EFFECT_BLACKANDWHITE) {

-        err = applyColorEffect(M4xVSS_kVideoEffectType_BlackAndWhite,

-              planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,

-              (M4VIFI_UInt8 *)tempOutputBuffer, 0);

-        if(err != M4NO_ERROR) {

-            return err;

-        }

-    }

-

-    if(params->currentVideoEffect & VIDEO_EFFECT_PINK) {

-        err = applyColorEffect(M4xVSS_kVideoEffectType_Pink,

-              planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,

-              (M4VIFI_UInt8 *)tempOutputBuffer, 0);

-        if(err != M4NO_ERROR) {

-            return err;

-        }

-    }

-

-    if(params->currentVideoEffect & VIDEO_EFFECT_GREEN) {

-        err = applyColorEffect(M4xVSS_kVideoEffectType_Green,

-              planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,

-              (M4VIFI_UInt8 *)tempOutputBuffer, 0);

-        if(err != M4NO_ERROR) {

-            return err;

-        }

-    }

-

-    if(params->currentVideoEffect & VIDEO_EFFECT_SEPIA) {

-        err = applyColorEffect(M4xVSS_kVideoEffectType_Sepia,

-              planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,

-              (M4VIFI_UInt8 *)tempOutputBuffer, 0);

-        if(err != M4NO_ERROR) {

-            return err;

-        }

-    }

-

-    if(params->currentVideoEffect & VIDEO_EFFECT_NEGATIVE) {

-        err = applyColorEffect(M4xVSS_kVideoEffectType_Negative,

-              planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,

-              (M4VIFI_UInt8 *)tempOutputBuffer, 0);

-        if(err != M4NO_ERROR) {

-            return err;

-        }

-    }

-

-    if(params->currentVideoEffect & VIDEO_EFFECT_GRADIENT) {

-        // find the effect in effectSettings array

-        for(i=0;i<params->numberEffects;i++) {

-            if(params->effectsSettings[i].VideoEffectType ==

-             (M4VSS3GPP_VideoEffectType)M4xVSS_kVideoEffectType_Gradient)

-                break;

-        }

-        err = applyColorEffect(M4xVSS_kVideoEffectType_Gradient,

-              planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,

-              (M4VIFI_UInt8 *)tempOutputBuffer,

-              params->effectsSettings[i].xVSS.uiRgb16InputColor);

-        if(err != M4NO_ERROR) {

-            return err;

-        }

-    }

-

-    if(params->currentVideoEffect & VIDEO_EFFECT_COLOR_RGB16) {

-        // Find the effect in effectSettings array

-        for(i=0;i<params->numberEffects;i++) {

-            if(params->effectsSettings[i].VideoEffectType ==

-             (M4VSS3GPP_VideoEffectType)M4xVSS_kVideoEffectType_ColorRGB16)

-                break;

-        }

-        err = applyColorEffect(M4xVSS_kVideoEffectType_ColorRGB16,

-              planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,

-              (M4VIFI_UInt8 *)tempOutputBuffer,

-              params->effectsSettings[i].xVSS.uiRgb16InputColor);

-        if(err != M4NO_ERROR) {

-            return err;

-        }

-    }

-

-    if(params->currentVideoEffect & VIDEO_EFFECT_FIFTIES) {

-        // Find the effect in effectSettings array

-        for(i=0;i<params->numberEffects;i++) {

-            if(params->effectsSettings[i].VideoEffectType ==

-             (M4VSS3GPP_VideoEffectType)M4xVSS_kVideoEffectType_Fifties)

-                break;

-        }

-        if(i < params->numberEffects) {

-            computeProgressForVideoEffect(params->timeMs,

-             params->effectsSettings[i].uiStartTime,

-             params->effectsSettings[i].uiDuration, &extProgress);

-

-            if(params->isFiftiesEffectStarted) {

-                fiftiesCtx.previousClipTime = -1;

-            }

-            fiftiesCtx.fiftiesEffectDuration =

-             1000/params->effectsSettings[i].xVSS.uiFiftiesOutFrameRate;

-

-            fiftiesCtx.shiftRandomValue = 0;

-            fiftiesCtx.stripeRandomValue = 0;

-

-            err = M4VSS3GPP_externalVideoEffectFifties(

-             (M4OSA_Void *)&fiftiesCtx, planeIn, planeOut, &extProgress,

-             M4xVSS_kVideoEffectType_Fifties);

-

-            if(err != M4NO_ERROR) {

-                LOGE("M4VSS3GPP_externalVideoEffectFifties error 0x%x", (unsigned int)err);

-

-                if(NULL != finalOutputBuffer) {

-                    free(finalOutputBuffer);

-                    finalOutputBuffer = NULL;

-                }

-                if(NULL != tempOutputBuffer) {

-                    free(tempOutputBuffer);

-                    tempOutputBuffer = NULL;

-                }

-                return err;

-            }

-

-            // The out plane now becomes the in plane for adding other effects

-            swapImagePlanes(planeIn, planeOut,(M4VIFI_UInt8 *)finalOutputBuffer,

-             (M4VIFI_UInt8 *)tempOutputBuffer);

-        }

-    }

-

-    if(params->currentVideoEffect & VIDEO_EFFECT_FRAMING) {

-

-        M4xVSS_FramingStruct framingCtx;

-        // Find the effect in effectSettings array

-        for(i=0;i<params->numberEffects;i++) {

-            if(params->effectsSettings[i].VideoEffectType ==

-             (M4VSS3GPP_VideoEffectType)M4xVSS_kVideoEffectType_Framing) {

-                if((params->effectsSettings[i].uiStartTime <= params->timeMs + params->timeOffset) &&

-                   ((params->effectsSettings[i].uiStartTime+

-                     params->effectsSettings[i].uiDuration) >= params->timeMs + params->timeOffset))

-                {

-                        break;

-                }

-            }

-        }

-        if(i < params->numberEffects) {

-            computeProgressForVideoEffect(params->timeMs,

-             params->effectsSettings[i].uiStartTime,

-             params->effectsSettings[i].uiDuration, &extProgress);

-

-            err = prepareFramingStructure(&framingCtx,

-                  params->effectsSettings, i, params->overlayFrameRGBBuffer,

-                  params->overlayFrameYUVBuffer);

-

-            if(err == M4NO_ERROR) {

-                err = M4VSS3GPP_externalVideoEffectFraming(

-                      (M4OSA_Void *)&framingCtx, planeIn, planeOut, &extProgress,

-                      M4xVSS_kVideoEffectType_Framing);

-            }

-

-            free(framingCtx.alphaBlendingStruct);

-

-            if(framingCtx.FramingYuv != NULL) {

-                free(framingCtx.FramingYuv);

-                framingCtx.FramingYuv = NULL;

-            }

-            //If prepareFramingStructure / M4VSS3GPP_externalVideoEffectFraming

-            // returned error, then return from function

-            if(err != M4NO_ERROR) {

-

-                if(NULL != finalOutputBuffer) {

-                    free(finalOutputBuffer);

-                    finalOutputBuffer = NULL;

-                }

-                if(NULL != tempOutputBuffer) {

-                    free(tempOutputBuffer);

-                    tempOutputBuffer = NULL;

-                }

-                return err;

-            }

-

-            // The out plane now becomes the in plane for adding other effects

-            swapImagePlanes(planeIn, planeOut,(M4VIFI_UInt8 *)finalOutputBuffer,

-             (M4VIFI_UInt8 *)tempOutputBuffer);

-        }

-    }

-

-    if(params->currentVideoEffect & VIDEO_EFFECT_FADEFROMBLACK) {

-        /* find the effect in effectSettings array*/

-        for(i=0;i<params->numberEffects;i++) {

-            if(params->effectsSettings[i].VideoEffectType ==

-             M4VSS3GPP_kVideoEffectType_FadeFromBlack)

-                break;

-        }

-

-        if(i < params->numberEffects) {

-            computePercentageDone(params->timeMs,

-             params->effectsSettings[i].uiStartTime,

-             params->effectsSettings[i].uiDuration, &percentageDone);

-

-            // Compute where we are in the effect (scale is 0->1024)

-            lum_factor = (M4OSA_Int32)( percentageDone * 1024 );

-            // Apply the darkening effect

-            err = applyLumaEffect(M4VSS3GPP_kVideoEffectType_FadeFromBlack,

-                  planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,

-                  (M4VIFI_UInt8 *)tempOutputBuffer, lum_factor);

-            if(err != M4NO_ERROR) {

-                return err;

-            }

-        }

-    }

-

-    if(params->currentVideoEffect & VIDEO_EFFECT_FADETOBLACK) {

-        // Find the effect in effectSettings array

-        for(i=0;i<params->numberEffects;i++) {

-            if(params->effectsSettings[i].VideoEffectType ==

-             M4VSS3GPP_kVideoEffectType_FadeToBlack)

-                break;

-        }

-        if(i < params->numberEffects) {

-            computePercentageDone(params->timeMs,

-             params->effectsSettings[i].uiStartTime,

-             params->effectsSettings[i].uiDuration, &percentageDone);

-

-            // Compute where we are in the effect (scale is 0->1024)

-            lum_factor = (M4OSA_Int32)( (1.0-percentageDone) * 1024 );

-            // Apply the darkening effect

-            err = applyLumaEffect(M4VSS3GPP_kVideoEffectType_FadeToBlack,

-                  planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,

-                  (M4VIFI_UInt8 *)tempOutputBuffer, lum_factor);

-            if(err != M4NO_ERROR) {

-                return err;

-            }

-        }

-    }

-

-    if(params->currentVideoEffect & VIDEO_EFFECT_CURTAINOPEN) {

-        // Find the effect in effectSettings array

-        for(i=0;i<params->numberEffects;i++) {

-            if(params->effectsSettings[i].VideoEffectType ==

-             M4VSS3GPP_kVideoEffectType_CurtainOpening)

-                break;

-        }

-        if(i < params->numberEffects) {

-            computePercentageDone(params->timeMs,

-             params->effectsSettings[i].uiStartTime,

-             params->effectsSettings[i].uiDuration, &percentageDone);

-

-            // Compute where we are in the effect (scale is 0->height).

-            // It is done with floats because tmp x height

-            // can be very large (with long clips).

-            curtainParams.nb_black_lines =

-             (M4OSA_UInt16)((1.0 - percentageDone) * planeIn[0].u_height );

-            // The curtain is hanged on the ceiling

-            curtainParams.top_is_black = 1;

-

-            // Apply the curtain effect

-            err = applyCurtainEffect(M4VSS3GPP_kVideoEffectType_CurtainOpening,

-                  planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,

-                  (M4VIFI_UInt8 *)tempOutputBuffer, &curtainParams);

-            if(err != M4NO_ERROR) {

-                return err;

-            }

-        }

-    }

-

-    if(params->currentVideoEffect & VIDEO_EFFECT_CURTAINCLOSE) {

-        // Find the effect in effectSettings array

-        for(i=0;i<params->numberEffects;i++) {

-            if(params->effectsSettings[i].VideoEffectType ==

-             M4VSS3GPP_kVideoEffectType_CurtainClosing)

-                break;

-        }

-        if(i < params->numberEffects) {

-            computePercentageDone(params->timeMs,

-             params->effectsSettings[i].uiStartTime,

-             params->effectsSettings[i].uiDuration, &percentageDone);

-

-            // Compute where we are in the effect (scale is 0->height).

-            // It is done with floats because

-            // tmp x height can be very large (with long clips).

-            curtainParams.nb_black_lines =

-             (M4OSA_UInt16)(percentageDone * planeIn[0].u_height );

-

-            // The curtain is hanged on the ceiling

-            curtainParams.top_is_black = 1;

-

-            // Apply the curtain effect

-            err = applyCurtainEffect(M4VSS3GPP_kVideoEffectType_CurtainClosing,

-                  planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,

-                  (M4VIFI_UInt8 *)tempOutputBuffer, &curtainParams);

-            if(err != M4NO_ERROR) {

-                return err;

-            }

-        }

-    }

-

-    LOGV("doMediaRendering CALL getBuffer()");

-    // Set the output YUV420 plane to be compatible with YV12 format

-    // W & H even

-    // YVU instead of YUV

-    // align buffers on 32 bits

-

-    // Y plane

-    //in YV12 format, sizes must be even

-    M4OSA_UInt32 yv12PlaneWidth = ((params->outVideoWidth +1)>>1)<<1;

-    M4OSA_UInt32 yv12PlaneHeight = ((params->outVideoHeight+1)>>1)<<1;

-

-    prepareYV12ImagePlane(planeOut, yv12PlaneWidth, yv12PlaneHeight,

-     (M4OSA_UInt32)params->outBufferStride, (M4VIFI_UInt8 *)params->pOutBuffer);

-

-    err = applyRenderingMode(planeIn, planeOut, params->renderingMode);

-

-    if(M4OSA_NULL != finalOutputBuffer) {

-        free(finalOutputBuffer);

-        finalOutputBuffer= M4OSA_NULL;

-    }

-    if(M4OSA_NULL != tempOutputBuffer) {

-        free(tempOutputBuffer);

-        tempOutputBuffer = M4OSA_NULL;

-    }

-    if(err != M4NO_ERROR) {

-        LOGV("doVideoPostProcessing: applyRenderingMode returned err=%d",err);

-        return err;

-    }

-    return M4NO_ERROR;

-}

+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "VideoEditorTools.h"
+#include "PreviewRenderer.h"
+/*+ Handle the image files here */
+#include <utils/Log.h>
+/*- Handle the image files here */
+
+const M4VIFI_UInt8   M4VIFI_ClipTable[1256]
+= {
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x02, 0x03,
+0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b,
+0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13,
+0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b,
+0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23,
+0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b,
+0x2c, 0x2d, 0x2e, 0x2f, 0x30, 0x31, 0x32, 0x33,
+0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x3b,
+0x3c, 0x3d, 0x3e, 0x3f, 0x40, 0x41, 0x42, 0x43,
+0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x4a, 0x4b,
+0x4c, 0x4d, 0x4e, 0x4f, 0x50, 0x51, 0x52, 0x53,
+0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5a, 0x5b,
+0x5c, 0x5d, 0x5e, 0x5f, 0x60, 0x61, 0x62, 0x63,
+0x64, 0x65, 0x66, 0x67, 0x68, 0x69, 0x6a, 0x6b,
+0x6c, 0x6d, 0x6e, 0x6f, 0x70, 0x71, 0x72, 0x73,
+0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7a, 0x7b,
+0x7c, 0x7d, 0x7e, 0x7f, 0x80, 0x81, 0x82, 0x83,
+0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8a, 0x8b,
+0x8c, 0x8d, 0x8e, 0x8f, 0x90, 0x91, 0x92, 0x93,
+0x94, 0x95, 0x96, 0x97, 0x98, 0x99, 0x9a, 0x9b,
+0x9c, 0x9d, 0x9e, 0x9f, 0xa0, 0xa1, 0xa2, 0xa3,
+0xa4, 0xa5, 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xab,
+0xac, 0xad, 0xae, 0xaf, 0xb0, 0xb1, 0xb2, 0xb3,
+0xb4, 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xbb,
+0xbc, 0xbd, 0xbe, 0xbf, 0xc0, 0xc1, 0xc2, 0xc3,
+0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xcb,
+0xcc, 0xcd, 0xce, 0xcf, 0xd0, 0xd1, 0xd2, 0xd3,
+0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xdb,
+0xdc, 0xdd, 0xde, 0xdf, 0xe0, 0xe1, 0xe2, 0xe3,
+0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea, 0xeb,
+0xec, 0xed, 0xee, 0xef, 0xf0, 0xf1, 0xf2, 0xf3,
+0xf4, 0xf5, 0xf6, 0xf7, 0xf8, 0xf9, 0xfa, 0xfb,
+0xfc, 0xfd, 0xfe, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff
+};
+
+/* Division table for ( 65535/x ); x = 0 to 512 */
+const M4VIFI_UInt16  M4VIFI_DivTable[512]
+= {
+0, 65535, 32768, 21845, 16384, 13107, 10922, 9362,
+8192, 7281, 6553, 5957, 5461, 5041, 4681, 4369,
+4096, 3855, 3640, 3449, 3276, 3120, 2978, 2849,
+2730, 2621, 2520, 2427, 2340, 2259, 2184, 2114,
+2048, 1985, 1927, 1872, 1820, 1771, 1724, 1680,
+1638, 1598, 1560, 1524, 1489, 1456, 1424, 1394,
+1365, 1337, 1310, 1285, 1260, 1236, 1213, 1191,
+1170, 1149, 1129, 1110, 1092, 1074, 1057, 1040,
+1024, 1008, 992, 978, 963, 949, 936, 923,
+910, 897, 885, 873, 862, 851, 840, 829,
+819, 809, 799, 789, 780, 771, 762, 753,
+744, 736, 728, 720, 712, 704, 697, 689,
+682, 675, 668, 661, 655, 648, 642, 636,
+630, 624, 618, 612, 606, 601, 595, 590,
+585, 579, 574, 569, 564, 560, 555, 550,
+546, 541, 537, 532, 528, 524, 520, 516,
+512, 508, 504, 500, 496, 492, 489, 485,
+481, 478, 474, 471, 468, 464, 461, 458,
+455, 451, 448, 445, 442, 439, 436, 434,
+431, 428, 425, 422, 420, 417, 414, 412,
+409, 407, 404, 402, 399, 397, 394, 392,
+390, 387, 385, 383, 381, 378, 376, 374,
+372, 370, 368, 366, 364, 362, 360, 358,
+356, 354, 352, 350, 348, 346, 344, 343,
+341, 339, 337, 336, 334, 332, 330, 329,
+327, 326, 324, 322, 321, 319, 318, 316,
+315, 313, 312, 310, 309, 307, 306, 304,
+303, 302, 300, 299, 297, 296, 295, 293,
+292, 291, 289, 288, 287, 286, 284, 283,
+282, 281, 280, 278, 277, 276, 275, 274,
+273, 271, 270, 269, 268, 267, 266, 265,
+264, 263, 262, 261, 260, 259, 258, 257,
+256, 255, 254, 253, 252, 251, 250, 249,
+248, 247, 246, 245, 244, 243, 242, 241,
+240, 240, 239, 238, 237, 236, 235, 234,
+234, 233, 232, 231, 230, 229, 229, 228,
+227, 226, 225, 225, 224, 223, 222, 222,
+221, 220, 219, 219, 218, 217, 217, 216,
+215, 214, 214, 213, 212, 212, 211, 210,
+210, 209, 208, 208, 207, 206, 206, 205,
+204, 204, 203, 202, 202, 201, 201, 200,
+199, 199, 198, 197, 197, 196, 196, 195,
+195, 194, 193, 193, 192, 192, 191, 191,
+190, 189, 189, 188, 188, 187, 187, 186,
+186, 185, 185, 184, 184, 183, 183, 182,
+182, 181, 181, 180, 180, 179, 179, 178,
+178, 177, 177, 176, 176, 175, 175, 174,
+174, 173, 173, 172, 172, 172, 171, 171,
+170, 170, 169, 169, 168, 168, 168, 167,
+167, 166, 166, 165, 165, 165, 164, 164,
+163, 163, 163, 162, 162, 161, 161, 161,
+160, 160, 159, 159, 159, 158, 158, 157,
+157, 157, 156, 156, 156, 155, 155, 154,
+154, 154, 153, 153, 153, 152, 152, 152,
+151, 151, 151, 150, 150, 149, 149, 149,
+148, 148, 148, 147, 147, 147, 146, 146,
+146, 145, 145, 145, 144, 144, 144, 144,
+143, 143, 143, 142, 142, 142, 141, 141,
+141, 140, 140, 140, 140, 139, 139, 139,
+138, 138, 138, 137, 137, 137, 137, 136,
+136, 136, 135, 135, 135, 135, 134, 134,
+134, 134, 133, 133, 133, 132, 132, 132,
+132, 131, 131, 131, 131, 130, 130, 130,
+130, 129, 129, 129, 129, 128, 128, 128
+};
+
+const M4VIFI_Int32  const_storage1[8]
+= {
+0x00002568, 0x00003343,0x00000649,0x00000d0f, 0x0000D86C, 0x0000D83B, 0x00010000, 0x00010000
+};
+
+const M4VIFI_Int32  const_storage[8]
+= {
+0x00002568, 0x00003343, 0x1BF800, 0x00000649, 0x00000d0f, 0x110180, 0x40cf, 0x22BE00
+};
+
+
+const M4VIFI_UInt16  *M4VIFI_DivTable_zero
+ = &M4VIFI_DivTable[0];
+
+const M4VIFI_UInt8   *M4VIFI_ClipTable_zero
+ = &M4VIFI_ClipTable[500];
+
+M4VIFI_UInt8 M4VIFI_YUV420PlanarToYUV420Semiplanar(void *user_data,
+    M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane *PlaneOut ) {
+
+    M4VIFI_UInt32 i;
+    M4VIFI_UInt8 *p_buf_src, *p_buf_dest, *p_buf_src_u, *p_buf_src_v;
+    M4VIFI_UInt8    return_code = M4VIFI_OK;
+
+    /* the filter is implemented with the assumption that the width is equal to stride */
+    if(PlaneIn[0].u_width != PlaneIn[0].u_stride)
+        return M4VIFI_INVALID_PARAM;
+
+    /* The input Y Plane is the same as the output Y Plane */
+    p_buf_src = &(PlaneIn[0].pac_data[PlaneIn[0].u_topleft]);
+    p_buf_dest = &(PlaneOut[0].pac_data[PlaneOut[0].u_topleft]);
+    memcpy((void *)p_buf_dest,(void *)p_buf_src ,
+        PlaneOut[0].u_width * PlaneOut[0].u_height);
+
+    /* The U and V components are planar. The need to be made interleaved */
+    p_buf_src_u = &(PlaneIn[1].pac_data[PlaneIn[1].u_topleft]);
+    p_buf_src_v = &(PlaneIn[2].pac_data[PlaneIn[2].u_topleft]);
+    p_buf_dest  = &(PlaneOut[1].pac_data[PlaneOut[1].u_topleft]);
+
+    for(i = 0; i < PlaneOut[1].u_width*PlaneOut[1].u_height; i++)
+    {
+        *p_buf_dest++ = *p_buf_src_u++;
+        *p_buf_dest++ = *p_buf_src_v++;
+    }
+    return return_code;
+}
+
+M4VIFI_UInt8 M4VIFI_SemiplanarYUV420toYUV420(void *user_data,
+    M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane *PlaneOut ) {
+
+     M4VIFI_UInt32 i;
+     M4VIFI_UInt8 *p_buf_src, *p_buf_dest, *p_buf_src_u, *p_buf_src_v;
+     M4VIFI_UInt8 *p_buf_dest_u,*p_buf_dest_v,*p_buf_src_uv;
+     M4VIFI_UInt8     return_code = M4VIFI_OK;
+
+     /* the filter is implemented with the assumption that the width is equal to stride */
+     if(PlaneIn[0].u_width != PlaneIn[0].u_stride)
+        return M4VIFI_INVALID_PARAM;
+
+     /* The input Y Plane is the same as the output Y Plane */
+     p_buf_src = &(PlaneIn[0].pac_data[PlaneIn[0].u_topleft]);
+     p_buf_dest = &(PlaneOut[0].pac_data[PlaneOut[0].u_topleft]);
+     memcpy((void *)p_buf_dest,(void *)p_buf_src ,
+         PlaneOut[0].u_width * PlaneOut[0].u_height);
+
+     /* The U and V components are planar. The need to be made interleaved */
+     p_buf_src_uv = &(PlaneIn[1].pac_data[PlaneIn[1].u_topleft]);
+     p_buf_dest_u  = &(PlaneOut[1].pac_data[PlaneOut[1].u_topleft]);
+     p_buf_dest_v  = &(PlaneOut[2].pac_data[PlaneOut[2].u_topleft]);
+
+     for(i = 0; i < PlaneOut[1].u_width*PlaneOut[1].u_height; i++)
+     {
+        *p_buf_dest_u++ = *p_buf_src_uv++;
+        *p_buf_dest_v++ = *p_buf_src_uv++;
+     }
+     return return_code;
+}
+
+
+/**
+ ******************************************************************************
+ * prototype    M4VSS3GPP_externalVideoEffectColor(M4OSA_Void *pFunctionContext,
+ *                                                  M4VIFI_ImagePlane *PlaneIn,
+ *                                                  M4VIFI_ImagePlane *PlaneOut,
+ *                                                  M4VSS3GPP_ExternalProgress *pProgress,
+ *                                                  M4OSA_UInt32 uiEffectKind)
+ *
+ * @brief   This function apply a color effect on an input YUV420 planar frame
+ * @note
+ * @param   pFunctionContext(IN) Contains which color to apply (not very clean ...)
+ * @param   PlaneIn         (IN) Input YUV420 planar
+ * @param   PlaneOut        (IN/OUT) Output YUV420 planar
+ * @param   pProgress       (IN/OUT) Progress indication (0-100)
+ * @param   uiEffectKind    (IN) Unused
+ *
+ * @return  M4VIFI_OK:  No error
+ ******************************************************************************
+*/
+M4OSA_ERR M4VSS3GPP_externalVideoEffectColor(M4OSA_Void *pFunctionContext,
+            M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane *PlaneOut,
+            M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiEffectKind) {
+
+    M4VIFI_Int32 plane_number;
+    M4VIFI_UInt32 i,j;
+    M4VIFI_UInt8 *p_buf_src, *p_buf_dest;
+    M4xVSS_ColorStruct* ColorContext = (M4xVSS_ColorStruct*)pFunctionContext;
+
+    for (plane_number = 0; plane_number < 3; plane_number++)
+    {
+        p_buf_src =
+         &(PlaneIn[plane_number].pac_data[PlaneIn[plane_number].u_topleft]);
+
+        p_buf_dest =
+         &(PlaneOut[plane_number].pac_data[PlaneOut[plane_number].u_topleft]);
+        for (i = 0; i < PlaneOut[plane_number].u_height; i++)
+        {
+            /**
+             * Chrominance */
+            if(plane_number==1 || plane_number==2)
+            {
+                //switch ((M4OSA_UInt32)pFunctionContext) // commented because a structure for the effects context exist
+                switch (ColorContext->colorEffectType)
+                {
+                case M4xVSS_kVideoEffectType_BlackAndWhite:
+                    memset((void *)p_buf_dest,128,
+                     PlaneIn[plane_number].u_width);
+                    break;
+                case M4xVSS_kVideoEffectType_Pink:
+                    memset((void *)p_buf_dest,255,
+                     PlaneIn[plane_number].u_width);
+                    break;
+                case M4xVSS_kVideoEffectType_Green:
+                    memset((void *)p_buf_dest,0,
+                     PlaneIn[plane_number].u_width);
+                    break;
+                case M4xVSS_kVideoEffectType_Sepia:
+                    if(plane_number==1)
+                    {
+                        memset((void *)p_buf_dest,117,
+                         PlaneIn[plane_number].u_width);
+                    }
+                    else
+                    {
+                        memset((void *)p_buf_dest,139,
+                         PlaneIn[plane_number].u_width);
+                    }
+                    break;
+                case M4xVSS_kVideoEffectType_Negative:
+                    memcpy((void *)p_buf_dest,
+                     (void *)p_buf_src ,PlaneOut[plane_number].u_width);
+                    break;
+
+                case M4xVSS_kVideoEffectType_ColorRGB16:
+                    {
+                        M4OSA_UInt16 r = 0,g = 0,b = 0,y = 0,u = 0,v = 0;
+
+                        /*first get the r, g, b*/
+                        b = (ColorContext->rgb16ColorData &  0x001f);
+                        g = (ColorContext->rgb16ColorData &  0x07e0)>>5;
+                        r = (ColorContext->rgb16ColorData &  0xf800)>>11;
+
+                        /*keep y, but replace u and v*/
+                        if(plane_number==1)
+                        {
+                            /*then convert to u*/
+                            u = U16(r, g, b);
+                            memset((void *)p_buf_dest,(M4OSA_UInt8)u,
+                             PlaneIn[plane_number].u_width);
+                        }
+                        if(plane_number==2)
+                        {
+                            /*then convert to v*/
+                            v = V16(r, g, b);
+                            memset((void *)p_buf_dest,(M4OSA_UInt8)v,
+                             PlaneIn[plane_number].u_width);
+                        }
+                    }
+                    break;
+                case M4xVSS_kVideoEffectType_Gradient:
+                    {
+                        M4OSA_UInt16 r = 0,g = 0,b = 0,y = 0,u = 0,v = 0;
+
+                        /*first get the r, g, b*/
+                        b = (ColorContext->rgb16ColorData &  0x001f);
+                        g = (ColorContext->rgb16ColorData &  0x07e0)>>5;
+                        r = (ColorContext->rgb16ColorData &  0xf800)>>11;
+
+                        /*for color gradation*/
+                        b = (M4OSA_UInt16)( b - ((b*i)/PlaneIn[plane_number].u_height));
+                        g = (M4OSA_UInt16)(g - ((g*i)/PlaneIn[plane_number].u_height));
+                        r = (M4OSA_UInt16)(r - ((r*i)/PlaneIn[plane_number].u_height));
+
+                        /*keep y, but replace u and v*/
+                        if(plane_number==1)
+                        {
+                            /*then convert to u*/
+                            u = U16(r, g, b);
+                            memset((void *)p_buf_dest,(M4OSA_UInt8)u,
+                             PlaneIn[plane_number].u_width);
+                        }
+                        if(plane_number==2)
+                        {
+                            /*then convert to v*/
+                            v = V16(r, g, b);
+                            memset((void *)p_buf_dest,(M4OSA_UInt8)v,
+                             PlaneIn[plane_number].u_width);
+                        }
+                    }
+                    break;
+                default:
+                    return M4VIFI_INVALID_PARAM;
+                }
+            }
+            /**
+             * Luminance */
+            else
+            {
+                //switch ((M4OSA_UInt32)pFunctionContext)// commented because a structure for the effects context exist
+                switch (ColorContext->colorEffectType)
+                {
+                case M4xVSS_kVideoEffectType_Negative:
+                    for(j=0;j<PlaneOut[plane_number].u_width;j++)
+                    {
+                            p_buf_dest[j] = 255 - p_buf_src[j];
+                    }
+                    break;
+                default:
+                    memcpy((void *)p_buf_dest,
+                     (void *)p_buf_src ,PlaneOut[plane_number].u_width);
+                    break;
+                }
+            }
+            p_buf_src += PlaneIn[plane_number].u_stride;
+            p_buf_dest += PlaneOut[plane_number].u_stride;
+        }
+    }
+
+    return M4VIFI_OK;
+}
+
+/**
+ ******************************************************************************
+ * prototype    M4VSS3GPP_externalVideoEffectFraming(M4OSA_Void *pFunctionContext,
+ *                                                  M4VIFI_ImagePlane *PlaneIn,
+ *                                                  M4VIFI_ImagePlane *PlaneOut,
+ *                                                  M4VSS3GPP_ExternalProgress *pProgress,
+ *                                                  M4OSA_UInt32 uiEffectKind)
+ *
+ * @brief   This function add a fixed or animated image on an input YUV420 planar frame
+ * @note
+ * @param   pFunctionContext(IN) Contains which color to apply (not very clean ...)
+ * @param   PlaneIn         (IN) Input YUV420 planar
+ * @param   PlaneOut        (IN/OUT) Output YUV420 planar
+ * @param   pProgress       (IN/OUT) Progress indication (0-100)
+ * @param   uiEffectKind    (IN) Unused
+ *
+ * @return  M4VIFI_OK:  No error
+ ******************************************************************************
+*/
+M4OSA_ERR M4VSS3GPP_externalVideoEffectFraming(
+            M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn[3],
+            M4VIFI_ImagePlane *PlaneOut, M4VSS3GPP_ExternalProgress *pProgress,
+            M4OSA_UInt32 uiEffectKind ) {
+
+    M4VIFI_UInt32 x,y;
+
+    M4VIFI_UInt8 *p_in_Y = PlaneIn[0].pac_data;
+    M4VIFI_UInt8 *p_in_U = PlaneIn[1].pac_data;
+    M4VIFI_UInt8 *p_in_V = PlaneIn[2].pac_data;
+
+    M4xVSS_FramingStruct* Framing = M4OSA_NULL;
+    M4xVSS_FramingStruct* currentFraming = M4OSA_NULL;
+    M4VIFI_UInt8 *FramingRGB = M4OSA_NULL;
+
+    M4VIFI_UInt8 *p_out0;
+    M4VIFI_UInt8 *p_out1;
+    M4VIFI_UInt8 *p_out2;
+
+    M4VIFI_UInt32 topleft[2];
+
+    M4OSA_UInt8 transparent1 =
+     (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8);
+    M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR;
+
+#ifndef DECODE_GIF_ON_SAVING
+    Framing = (M4xVSS_FramingStruct *)userData;
+    currentFraming = (M4xVSS_FramingStruct *)Framing->pCurrent;
+    FramingRGB = Framing->FramingRgb->pac_data;
+#endif /*DECODE_GIF_ON_SAVING*/
+
+#ifdef DECODE_GIF_ON_SAVING
+    M4OSA_ERR err;
+    Framing =
+     (M4xVSS_FramingStruct *)((M4xVSS_FramingContext*)userData)->aFramingCtx;
+    if(Framing == M4OSA_NULL)
+    {
+        ((M4xVSS_FramingContext*)userData)->clipTime = pProgress->uiOutputTime;
+        err = M4xVSS_internalDecodeGIF(userData);
+        if(M4NO_ERROR != err)
+        {
+            M4OSA_TRACE1_1("M4VSS3GPP_externalVideoEffectFraming: \
+                Error in M4xVSS_internalDecodeGIF: 0x%x", err);
+            return err;
+        }
+        Framing =
+         (M4xVSS_FramingStruct *)((M4xVSS_FramingContext*)userData)->aFramingCtx;
+        /* Initializes first GIF time */
+        ((M4xVSS_FramingContext*)userData)->current_gif_time =
+          pProgress->uiOutputTime;
+    }
+    currentFraming = (M4xVSS_FramingStruct *)Framing;
+    FramingRGB = Framing->FramingRgb->pac_data;
+#endif /*DECODE_GIF_ON_SAVING*/
+
+    /**
+     * Initialize input / output plane pointers */
+    p_in_Y += PlaneIn[0].u_topleft;
+    p_in_U += PlaneIn[1].u_topleft;
+    p_in_V += PlaneIn[2].u_topleft;
+
+    p_out0 = PlaneOut[0].pac_data;
+    p_out1 = PlaneOut[1].pac_data;
+    p_out2 = PlaneOut[2].pac_data;
+
+    /**
+     * Depending on time, initialize Framing frame to use */
+    if(Framing->previousClipTime == -1)
+    {
+        Framing->previousClipTime = pProgress->uiOutputTime;
+    }
+
+    /**
+     * If the current clip time has reach the duration of one frame of the framing picture
+     * we need to step to next framing picture */
+#ifdef DECODE_GIF_ON_SAVING
+    if(((M4xVSS_FramingContext*)userData)->b_animated == M4OSA_TRUE)
+    {
+        while((((M4xVSS_FramingContext*)userData)->current_gif_time + currentFraming->duration) < pProgress->uiOutputTime)
+        {
+            ((M4xVSS_FramingContext*)userData)->clipTime =
+             pProgress->uiOutputTime;
+
+            err = M4xVSS_internalDecodeGIF(userData);
+            if(M4NO_ERROR != err)
+            {
+                M4OSA_TRACE1_1("M4VSS3GPP_externalVideoEffectFraming: Error in M4xVSS_internalDecodeGIF: 0x%x", err);
+                return err;
+            }
+            if(currentFraming->duration != 0)
+            {
+                ((M4xVSS_FramingContext*)userData)->current_gif_time += currentFraming->duration;
+            }
+            else
+            {
+                ((M4xVSS_FramingContext*)userData)->current_gif_time +=
+                 pProgress->uiOutputTime - Framing->previousClipTime;
+            }
+            Framing = (M4xVSS_FramingStruct *)((M4xVSS_FramingContext*)userData)->aFramingCtx;
+            currentFraming = (M4xVSS_FramingStruct *)Framing;
+            FramingRGB = Framing->FramingRgb->pac_data;
+        }
+    }
+#else
+            Framing->pCurrent = currentFraming->pNext;
+            currentFraming = (M4xVSS_FramingStruct*)Framing->pCurrent;
+#endif /*DECODE_GIF_ON_SAVING*/
+
+    Framing->previousClipTime = pProgress->uiOutputTime;
+    FramingRGB = currentFraming->FramingRgb->pac_data;
+    topleft[0] = currentFraming->topleft_x;
+    topleft[1] = currentFraming->topleft_y;
+
+    for( x=0 ;x < PlaneIn[0].u_height ; x++)
+    {
+        for( y=0 ;y < PlaneIn[0].u_width ; y++)
+        {
+            /**
+             * To handle framing with input size != output size
+             * Framing is applyed if coordinates matches between framing/topleft and input plane */
+            if( y < (topleft[0] + currentFraming->FramingYuv[0].u_width)  &&
+                y >= topleft[0] &&
+                x < (topleft[1] + currentFraming->FramingYuv[0].u_height) &&
+                x >= topleft[1])
+            {
+
+                /*Alpha blending support*/
+                M4OSA_Float alphaBlending = 1;
+#ifdef DECODE_GIF_ON_SAVING
+                M4xVSS_internalEffectsAlphaBlending* alphaBlendingStruct =
+                 (M4xVSS_internalEffectsAlphaBlending*)((M4xVSS_FramingContext*)userData)->alphaBlendingStruct;
+#else
+                M4xVSS_internalEffectsAlphaBlending* alphaBlendingStruct =
+                 (M4xVSS_internalEffectsAlphaBlending*)((M4xVSS_FramingStruct*)userData)->alphaBlendingStruct;
+#endif //#ifdef DECODE_GIF_ON_SAVING
+
+                if(alphaBlendingStruct != M4OSA_NULL)
+                {
+                    if(pProgress->uiProgress < (M4OSA_UInt32)(alphaBlendingStruct->m_fadeInTime*10))
+                    {
+                        alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle - alphaBlendingStruct->m_start)*pProgress->uiProgress/(alphaBlendingStruct->m_fadeInTime*10));
+                        alphaBlending += alphaBlendingStruct->m_start;
+                        alphaBlending /= 100;
+                    }
+                    else if(pProgress->uiProgress >= (M4OSA_UInt32)(alphaBlendingStruct->m_fadeInTime*10) && pProgress->uiProgress < 1000 - (M4OSA_UInt32)(alphaBlendingStruct->m_fadeOutTime*10))
+                    {
+                        alphaBlending = (M4OSA_Float)((M4OSA_Float)alphaBlendingStruct->m_middle/100);
+                    }
+                    else if(pProgress->uiProgress >= 1000 - (M4OSA_UInt32)(alphaBlendingStruct->m_fadeOutTime*10))
+                    {
+                        alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle - alphaBlendingStruct->m_end))*(1000 - pProgress->uiProgress)/(alphaBlendingStruct->m_fadeOutTime*10);
+                        alphaBlending += alphaBlendingStruct->m_end;
+                        alphaBlending /= 100;
+                    }
+                }
+
+                /**/
+
+                if((*(FramingRGB)==transparent1) && (*(FramingRGB+1)==transparent2))
+                {
+                    *( p_out0+y+x*PlaneOut[0].u_stride)=(*(p_in_Y+y+x*PlaneIn[0].u_stride));
+                    *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)=(*(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride));
+                    *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)=(*(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride));
+                }
+                else
+                {
+                    *( p_out0+y+x*PlaneOut[0].u_stride)=(*(currentFraming->FramingYuv[0].pac_data+(y-topleft[0])+(x-topleft[1])*currentFraming->FramingYuv[0].u_stride))*alphaBlending;
+                    *( p_out0+y+x*PlaneOut[0].u_stride)+=(*(p_in_Y+y+x*PlaneIn[0].u_stride))*(1-alphaBlending);
+                    *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)=(*(currentFraming->FramingYuv[1].pac_data+((y-topleft[0])>>1)+((x-topleft[1])>>1)*currentFraming->FramingYuv[1].u_stride))*alphaBlending;
+                    *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)+=(*(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride))*(1-alphaBlending);
+                    *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)=(*(currentFraming->FramingYuv[2].pac_data+((y-topleft[0])>>1)+((x-topleft[1])>>1)*currentFraming->FramingYuv[2].u_stride))*alphaBlending;
+                    *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)+=(*(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride))*(1-alphaBlending);
+                }
+                if( PlaneIn[0].u_width < (topleft[0] + currentFraming->FramingYuv[0].u_width) &&
+                    y == PlaneIn[0].u_width-1)
+                {
+                    FramingRGB = FramingRGB + 2 * (topleft[0] + currentFraming->FramingYuv[0].u_width - PlaneIn[0].u_width + 1);
+                }
+                else
+                {
+                    FramingRGB = FramingRGB + 2;
+                }
+            }
+            /**
+             * Just copy input plane to output plane */
+            else
+            {
+                *( p_out0+y+x*PlaneOut[0].u_stride)=*(p_in_Y+y+x*PlaneIn[0].u_stride);
+                *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)=*(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride);
+                *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)=*(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride);
+            }
+        }
+    }
+
+#ifdef DECODE_GIF_ON_SAVING
+    if(pProgress->bIsLast == M4OSA_TRUE
+        && (M4OSA_Bool)((M4xVSS_FramingContext*)userData)->b_IsFileGif == M4OSA_TRUE)
+    {
+        M4xVSS_internalDecodeGIF_Cleaning((M4xVSS_FramingContext*)userData);
+    }
+#endif /*DECODE_GIF_ON_SAVING*/
+    return M4VIFI_OK;
+}
+
+
+/**
+ ******************************************************************************
+ * prototype    M4VSS3GPP_externalVideoEffectFifties(M4OSA_Void *pFunctionContext,
+ *                                                  M4VIFI_ImagePlane *PlaneIn,
+ *                                                  M4VIFI_ImagePlane *PlaneOut,
+ *                                                  M4VSS3GPP_ExternalProgress *pProgress,
+ *                                                  M4OSA_UInt32 uiEffectKind)
+ *
+ * @brief   This function make a video look as if it was taken in the fifties
+ * @note
+ * @param   pUserData       (IN) Context
+ * @param   pPlaneIn        (IN) Input YUV420 planar
+ * @param   pPlaneOut       (IN/OUT) Output YUV420 planar
+ * @param   pProgress       (IN/OUT) Progress indication (0-100)
+ * @param   uiEffectKind    (IN) Unused
+ *
+ * @return  M4VIFI_OK:          No error
+ * @return  M4ERR_PARAMETER:    pFiftiesData, pPlaneOut or pProgress are NULL (DEBUG only)
+ ******************************************************************************
+*/
+M4OSA_ERR M4VSS3GPP_externalVideoEffectFifties(
+    M4OSA_Void *pUserData, M4VIFI_ImagePlane *pPlaneIn,
+    M4VIFI_ImagePlane *pPlaneOut, M4VSS3GPP_ExternalProgress *pProgress,
+    M4OSA_UInt32 uiEffectKind )
+{
+    M4VIFI_UInt32 x, y, xShift;
+    M4VIFI_UInt8 *pInY = pPlaneIn[0].pac_data;
+    M4VIFI_UInt8 *pOutY, *pInYbegin;
+    M4VIFI_UInt8 *pInCr,* pOutCr;
+    M4VIFI_Int32 plane_number;
+
+    /* Internal context*/
+    M4xVSS_FiftiesStruct* p_FiftiesData = (M4xVSS_FiftiesStruct *)pUserData;
+
+    /* Initialize input / output plane pointers */
+    pInY += pPlaneIn[0].u_topleft;
+    pOutY = pPlaneOut[0].pac_data;
+    pInYbegin  = pInY;
+
+    /* Initialize the random */
+    if(p_FiftiesData->previousClipTime < 0)
+    {
+        M4OSA_randInit();
+        M4OSA_rand((M4OSA_Int32*)&(p_FiftiesData->shiftRandomValue), (pPlaneIn[0].u_height) >> 4);
+        M4OSA_rand((M4OSA_Int32*)&(p_FiftiesData->stripeRandomValue), (pPlaneIn[0].u_width)<< 2);
+        p_FiftiesData->previousClipTime = pProgress->uiOutputTime;
+    }
+
+    /* Choose random values if we have reached the duration of a partial effect */
+    else if( (pProgress->uiOutputTime - p_FiftiesData->previousClipTime) > p_FiftiesData->fiftiesEffectDuration)
+    {
+        M4OSA_rand((M4OSA_Int32*)&(p_FiftiesData->shiftRandomValue), (pPlaneIn[0].u_height) >> 4);
+        M4OSA_rand((M4OSA_Int32*)&(p_FiftiesData->stripeRandomValue), (pPlaneIn[0].u_width)<< 2);
+        p_FiftiesData->previousClipTime = pProgress->uiOutputTime;
+    }
+
+    /* Put in Sepia the chrominance */
+    for (plane_number = 1; plane_number < 3; plane_number++)
+    {
+        pInCr  = pPlaneIn[plane_number].pac_data  + pPlaneIn[plane_number].u_topleft;
+        pOutCr = pPlaneOut[plane_number].pac_data + pPlaneOut[plane_number].u_topleft;
+
+        for (x = 0; x < pPlaneOut[plane_number].u_height; x++)
+        {
+            if (1 == plane_number)
+                memset((void *)pOutCr, 117,pPlaneIn[plane_number].u_width); /* U value */
+            else
+                memset((void *)pOutCr, 139,pPlaneIn[plane_number].u_width); /* V value */
+
+            pInCr  += pPlaneIn[plane_number].u_stride;
+            pOutCr += pPlaneOut[plane_number].u_stride;
+        }
+    }
+
+    /* Compute the new pixels values */
+    for( x = 0 ; x < pPlaneIn[0].u_height ; x++)
+    {
+        M4VIFI_UInt8 *p_outYtmp, *p_inYtmp;
+
+        /* Compute the xShift (random value) */
+        if (0 == (p_FiftiesData->shiftRandomValue % 5 ))
+            xShift = (x + p_FiftiesData->shiftRandomValue ) % (pPlaneIn[0].u_height - 1);
+        else
+            xShift = (x + (pPlaneIn[0].u_height - p_FiftiesData->shiftRandomValue) ) % (pPlaneIn[0].u_height - 1);
+
+        /* Initialize the pointers */
+        p_outYtmp = pOutY + 1;                                    /* yShift of 1 pixel */
+        p_inYtmp  = pInYbegin + (xShift * pPlaneIn[0].u_stride);  /* Apply the xShift */
+
+        for( y = 0 ; y < pPlaneIn[0].u_width ; y++)
+        {
+            /* Set Y value */
+            if (xShift > (pPlaneIn[0].u_height - 4))
+                *p_outYtmp = 40;        /* Add some horizontal black lines between the two parts of the image */
+            else if ( y == p_FiftiesData->stripeRandomValue)
+                *p_outYtmp = 90;        /* Add a random vertical line for the bulk */
+            else
+                *p_outYtmp = *p_inYtmp;
+
+
+            /* Go to the next pixel */
+            p_outYtmp++;
+            p_inYtmp++;
+
+            /* Restart at the beginning of the line for the last pixel*/
+            if (y == (pPlaneIn[0].u_width - 2))
+                p_outYtmp = pOutY;
+        }
+
+        /* Go to the next line */
+        pOutY += pPlaneOut[0].u_stride;
+    }
+
+    return M4VIFI_OK;
+}
+
+unsigned char M4VFL_modifyLumaWithScale(M4ViComImagePlane *plane_in,
+                                        M4ViComImagePlane *plane_out,
+                                        unsigned long lum_factor,
+                                        void *user_data)
+{
+    unsigned short *p_src, *p_dest, *p_src_line, *p_dest_line;
+    unsigned char *p_csrc, *p_cdest, *p_csrc_line, *p_cdest_line;
+    unsigned long pix_src;
+    unsigned long u_outpx, u_outpx2;
+    unsigned long u_width, u_stride, u_stride_out,u_height, pix;
+    long i, j;
+
+    /* copy or filter chroma */
+    u_width = plane_in[1].u_width;
+    u_height = plane_in[1].u_height;
+    u_stride = plane_in[1].u_stride;
+    u_stride_out = plane_out[1].u_stride;
+    p_cdest_line = (unsigned char *) &plane_out[1].pac_data[plane_out[1].u_topleft];
+    p_csrc_line = (unsigned char *) &plane_in[1].pac_data[plane_in[1].u_topleft];
+
+    if (lum_factor > 256)
+    {
+        p_cdest = (unsigned char *) &plane_out[2].pac_data[plane_out[2].u_topleft];
+        p_csrc = (unsigned char *) &plane_in[2].pac_data[plane_in[2].u_topleft];
+        /* copy chroma */
+        for (j = u_height; j != 0; j--)
+        {
+            for (i = u_width; i != 0; i--)
+            {
+                memcpy((void *)p_cdest_line, (void *)p_csrc_line, u_width);
+                memcpy((void *)p_cdest, (void *)p_csrc, u_width);
+            }
+            p_cdest_line += u_stride_out;
+            p_cdest += u_stride_out;
+            p_csrc_line += u_stride;
+            p_csrc += u_stride;
+        }
+    }
+    else
+    {
+        /* filter chroma */
+        pix = (1024 - lum_factor) << 7;
+        for (j = u_height; j != 0; j--)
+        {
+            p_cdest = p_cdest_line;
+            p_csrc = p_csrc_line;
+            for (i = u_width; i != 0; i--)
+            {
+                *p_cdest++ = ((pix + (*p_csrc++ & 0xFF) * lum_factor) >> LUM_FACTOR_MAX);
+            }
+            p_cdest_line += u_stride_out;
+            p_csrc_line += u_stride;
+        }
+        p_cdest_line = (unsigned char *) &plane_out[2].pac_data[plane_out[2].u_topleft];
+        p_csrc_line = (unsigned char *) &plane_in[2].pac_data[plane_in[2].u_topleft];
+        for (j = u_height; j != 0; j--)
+        {
+            p_cdest = p_cdest_line;
+            p_csrc = p_csrc_line;
+            for (i = u_width; i != 0; i--)
+            {
+                *p_cdest++ = ((pix + (*p_csrc & 0xFF) * lum_factor) >> LUM_FACTOR_MAX);
+            }
+            p_cdest_line += u_stride_out;
+            p_csrc_line += u_stride;
+        }
+    }
+    /* apply luma factor */
+    u_width = plane_in[0].u_width;
+    u_height = plane_in[0].u_height;
+    u_stride = (plane_in[0].u_stride >> 1);
+    u_stride_out = (plane_out[0].u_stride >> 1);
+    p_dest = (unsigned short *) &plane_out[0].pac_data[plane_out[0].u_topleft];
+    p_src = (unsigned short *) &plane_in[0].pac_data[plane_in[0].u_topleft];
+    p_dest_line = p_dest;
+    p_src_line = p_src;
+
+    for (j = u_height; j != 0; j--)
+    {
+        p_dest = p_dest_line;
+        p_src = p_src_line;
+        for (i = (u_width >> 1); i != 0; i--)
+        {
+            pix_src = (unsigned long) *p_src++;
+            pix = pix_src & 0xFF;
+            u_outpx = ((pix * lum_factor) >> LUM_FACTOR_MAX);
+            pix = ((pix_src & 0xFF00) >> 8);
+            u_outpx2 = (((pix * lum_factor) >> LUM_FACTOR_MAX)<< 8) ;
+            *p_dest++ = (unsigned short) (u_outpx2 | u_outpx);
+        }
+        p_dest_line += u_stride_out;
+        p_src_line += u_stride;
+    }
+
+    return 0;
+}
+
+/**
+ ******************************************************************************
+ * unsigned char M4VFL_applyCurtain(M4ViComImagePlane *plane_in, M4ViComImagePlane *plane_out, M4VFL_CurtainParam *curtain_factor, void *user_data)
+ * @author  Beatrice Nezot (PHILIPS Software Vision)
+ * @brief   This function applies a black curtain onto a YUV420 image.
+ * @note    THis function writes black lines either at the top of the image or at
+ *          the bottom of the image. The other lines are copied from the source image.
+ *          First the number of black lines is compted and is rounded to an even integer.
+ * @param   plane_in: (IN) pointer to the 3 image planes of the source image
+ * @param   plane_out: (OUT) pointer to the 3 image planes of the destination image
+ * @param   user_data: (IN) pointer to some user_data
+ * @param   curtain_factor: (IN) structure with the parameters of the curtain (nb of black lines and if at the top/bottom of the image)
+ * @return  0: there is no error
+ ******************************************************************************
+*/
+unsigned char M4VFL_applyCurtain(M4ViComImagePlane *plane_in, M4ViComImagePlane *plane_out, M4VFL_CurtainParam *curtain_factor, void *user_data)
+{
+    unsigned char *p_src, *p_srcu, *p_srcv,*p_dest, *p_destu, *p_destv;
+    unsigned long u_width, u_widthuv, u_stride_out, u_stride_out_uv,u_stride, u_stride_uv,u_height;
+    long j;
+    unsigned long nb_black_lines;
+
+    u_width = plane_in[0].u_width;
+    u_height = plane_in[0].u_height;
+    u_stride_out = plane_out[0].u_stride ;
+    u_stride_out_uv = plane_out[1].u_stride;
+    p_dest = (unsigned char *) &plane_out[0].pac_data[plane_out[0].u_topleft];
+    p_destu = (unsigned char *) &plane_out[1].pac_data[plane_out[1].u_topleft];
+    p_destv = (unsigned char *) &plane_out[2].pac_data[plane_out[2].u_topleft];
+    u_widthuv = u_width >> 1;
+    u_stride = plane_in[0].u_stride ;
+    u_stride_uv = plane_in[1].u_stride;
+
+    /* nb_black_lines is even */
+    nb_black_lines = (unsigned long) ((curtain_factor->nb_black_lines >> 1) << 1);
+
+    if (curtain_factor->top_is_black)
+    {
+        /* black lines first */
+        /* compute index of of first source pixels (Y, U and V) to copy after the black lines */
+        p_src = (unsigned char *) &plane_in[0].pac_data[plane_in[0].u_topleft + ((nb_black_lines) * plane_in[0].u_stride)];
+        p_srcu = (unsigned char *) &plane_in[1].pac_data[plane_in[1].u_topleft + (((nb_black_lines) * plane_in[1].u_stride) >> 1)];
+        p_srcv = (unsigned char *) &plane_in[2].pac_data[plane_in[2].u_topleft+ (((nb_black_lines) * plane_in[2].u_stride) >> 1)];
+
+        /* write black lines */
+        for (j = (nb_black_lines >> 1); j != 0; j--)
+        {
+            memset((void *)p_dest, 0,u_width);
+            p_dest += u_stride_out;
+            memset((void *)p_dest, 0,u_width);
+            p_dest += u_stride_out;
+            memset((void *)p_destu, 128,u_widthuv);
+            memset((void *)p_destv, 128,u_widthuv);
+            p_destu += u_stride_out_uv;
+            p_destv += u_stride_out_uv;
+        }
+
+        /* copy from source image */
+        for (j = (u_height - nb_black_lines) >> 1; j != 0; j--)
+        {
+            memcpy((void *)p_dest, (void *)p_src, u_width);
+            p_dest += u_stride_out;
+            p_src += u_stride;
+            memcpy((void *)p_dest, (void *)p_src, u_width);
+            p_dest += u_stride_out;
+            p_src += u_stride;
+            memcpy((void *)p_destu, (void *)p_srcu, u_widthuv);
+            memcpy((void *)p_destv, (void *)p_srcv, u_widthuv);
+            p_destu += u_stride_out_uv;
+            p_destv += u_stride_out_uv;
+            p_srcu += u_stride_uv;
+            p_srcv += u_stride_uv;
+        }
+    }
+    else
+    {
+        /* black lines at the bottom of the image */
+        p_src = (unsigned char *) &plane_in[0].pac_data[plane_in[0].u_topleft];
+        p_srcu = (unsigned char *) &plane_in[1].pac_data[plane_in[1].u_topleft];
+        p_srcv = (unsigned char *) &plane_in[2].pac_data[plane_in[2].u_topleft];
+
+        /* copy from source image image */
+        for (j = (nb_black_lines >> 1); j != 0; j--)
+        {
+            memcpy((void *)p_dest, (void *)p_src, u_width);
+            p_dest += u_stride_out;
+            p_src += u_stride;
+            memcpy((void *)p_dest, (void *)p_src, u_width);
+            p_dest += u_stride_out;
+            p_src += u_stride;
+            memcpy((void *)p_destu, (void *)p_srcu, u_widthuv);
+            memcpy((void *)p_destv, (void *)p_srcv, u_widthuv);
+            p_destu += u_stride_out_uv;
+            p_destv += u_stride_out_uv;
+            p_srcu += u_stride_uv;
+            p_srcv += u_stride_uv;
+        }
+
+        /* write black lines*/
+        /* the pointers to p_dest, p_destu and p_destv are used through the two loops "for" */
+        for (j = (u_height - nb_black_lines) >> 1; j != 0; j--)
+        {
+            memset((void *)p_dest, 0,u_width);
+            p_dest += u_stride_out;
+            memset((void *)p_dest, 0,u_width);
+            p_dest += u_stride_out;
+            memset((void *)p_destu, 128,u_widthuv);
+            memset((void *)p_destv, 128,u_widthuv);
+            p_destu += u_stride_out_uv;
+            p_destv += u_stride_out_uv;
+        }
+    }
+
+    return 0;
+}
+
+
+/******************************************************************************
+ * prototype    M4OSA_ERR M4xVSS_internalConvertRGBtoYUV(M4xVSS_FramingStruct* framingCtx)
+ * @brief   This function converts an RGB565 plane to YUV420 planar
+ * @note    It is used only for framing effect
+ *          It allocates output YUV planes
+ * @param   framingCtx  (IN) The framing struct containing input RGB565 plane
+ *
+ * @return  M4NO_ERROR: No error
+ * @return  M4ERR_PARAMETER: At least one of the function parameters is null
+ * @return  M4ERR_ALLOC: Allocation error (no more memory)
+ ******************************************************************************
+*/
+M4OSA_ERR M4xVSS_internalConvertRGBtoYUV(M4xVSS_FramingStruct* framingCtx)
+{
+    M4OSA_ERR err;
+
+    /**
+     * Allocate output YUV planes */
+    framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(3*sizeof(M4VIFI_ImagePlane), M4VS, (M4OSA_Char*)"M4xVSS_internalConvertRGBtoYUV: Output plane YUV");
+    if(framingCtx->FramingYuv == M4OSA_NULL)
+    {
+        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV");
+        return M4ERR_ALLOC;
+    }
+    framingCtx->FramingYuv[0].u_width = framingCtx->FramingRgb->u_width;
+    framingCtx->FramingYuv[0].u_height = framingCtx->FramingRgb->u_height;
+    framingCtx->FramingYuv[0].u_topleft = 0;
+    framingCtx->FramingYuv[0].u_stride = framingCtx->FramingRgb->u_width;
+    framingCtx->FramingYuv[0].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc((framingCtx->FramingYuv[0].u_width*framingCtx->FramingYuv[0].u_height*3)>>1, M4VS, (M4OSA_Char*)"Alloc for the Convertion output YUV");;
+    if(framingCtx->FramingYuv[0].pac_data == M4OSA_NULL)
+    {
+        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV");
+        return M4ERR_ALLOC;
+    }
+    framingCtx->FramingYuv[1].u_width = (framingCtx->FramingRgb->u_width)>>1;
+    framingCtx->FramingYuv[1].u_height = (framingCtx->FramingRgb->u_height)>>1;
+    framingCtx->FramingYuv[1].u_topleft = 0;
+    framingCtx->FramingYuv[1].u_stride = (framingCtx->FramingRgb->u_width)>>1;
+    framingCtx->FramingYuv[1].pac_data = framingCtx->FramingYuv[0].pac_data + framingCtx->FramingYuv[0].u_width * framingCtx->FramingYuv[0].u_height;
+    framingCtx->FramingYuv[2].u_width = (framingCtx->FramingRgb->u_width)>>1;
+    framingCtx->FramingYuv[2].u_height = (framingCtx->FramingRgb->u_height)>>1;
+    framingCtx->FramingYuv[2].u_topleft = 0;
+    framingCtx->FramingYuv[2].u_stride = (framingCtx->FramingRgb->u_width)>>1;
+    framingCtx->FramingYuv[2].pac_data = framingCtx->FramingYuv[1].pac_data + framingCtx->FramingYuv[1].u_width * framingCtx->FramingYuv[1].u_height;
+
+    /**
+     * Convert input RGB 565 to YUV 420 to be able to merge it with output video in framing effect */
+    err = M4VIFI_xVSS_RGB565toYUV420(M4OSA_NULL, framingCtx->FramingRgb, framingCtx->FramingYuv);
+    if(err != M4NO_ERROR)
+    {
+        M4OSA_TRACE1_1("M4xVSS_internalConvertRGBtoYUV: error when converting from RGB to YUV: 0x%x\n", err);
+    }
+
+    framingCtx->duration = 0;
+    framingCtx->previousClipTime = -1;
+    framingCtx->previewOffsetClipTime = -1;
+
+    /**
+     * Only one element in the chained list (no animated image with RGB buffer...) */
+    framingCtx->pCurrent = framingCtx;
+    framingCtx->pNext = framingCtx;
+
+    return M4NO_ERROR;
+}
+
+/******************************************************************************
+ * prototype    M4OSA_ERR M4xVSS_internalConvertRGB888toYUV(M4xVSS_FramingStruct* framingCtx)
+ * @brief   This function converts an RGB888 plane to YUV420 planar
+ * @note    It is used only for framing effect
+ *          It allocates output YUV planes
+ * @param   framingCtx  (IN) The framing struct containing input RGB888 plane
+ *
+ * @return  M4NO_ERROR: No error
+ * @return  M4ERR_PARAMETER: At least one of the function parameters is null
+ * @return  M4ERR_ALLOC: Allocation error (no more memory)
+ ******************************************************************************
+*/
+M4OSA_ERR M4xVSS_internalConvertRGB888toYUV(M4xVSS_FramingStruct* framingCtx)
+{
+    M4OSA_ERR err;
+
+    /**
+     * Allocate output YUV planes */
+    framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(3*sizeof(M4VIFI_ImagePlane), M4VS, (M4OSA_Char*)"M4xVSS_internalConvertRGBtoYUV: Output plane YUV");
+    if(framingCtx->FramingYuv == M4OSA_NULL)
+    {
+        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV");
+        return M4ERR_ALLOC;
+    }
+    framingCtx->FramingYuv[0].u_width = framingCtx->FramingRgb->u_width;
+    framingCtx->FramingYuv[0].u_height = framingCtx->FramingRgb->u_height;
+    framingCtx->FramingYuv[0].u_topleft = 0;
+    framingCtx->FramingYuv[0].u_stride = framingCtx->FramingRgb->u_width;
+    framingCtx->FramingYuv[0].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc((framingCtx->FramingYuv[0].u_width*framingCtx->FramingYuv[0].u_height*3)>>1, M4VS, (M4OSA_Char*)"Alloc for the Convertion output YUV");;
+    if(framingCtx->FramingYuv[0].pac_data == M4OSA_NULL)
+    {
+        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV");
+        return M4ERR_ALLOC;
+    }
+    framingCtx->FramingYuv[1].u_width = (framingCtx->FramingRgb->u_width)>>1;
+    framingCtx->FramingYuv[1].u_height = (framingCtx->FramingRgb->u_height)>>1;
+    framingCtx->FramingYuv[1].u_topleft = 0;
+    framingCtx->FramingYuv[1].u_stride = (framingCtx->FramingRgb->u_width)>>1;
+    framingCtx->FramingYuv[1].pac_data = framingCtx->FramingYuv[0].pac_data + framingCtx->FramingYuv[0].u_width * framingCtx->FramingYuv[0].u_height;
+    framingCtx->FramingYuv[2].u_width = (framingCtx->FramingRgb->u_width)>>1;
+    framingCtx->FramingYuv[2].u_height = (framingCtx->FramingRgb->u_height)>>1;
+    framingCtx->FramingYuv[2].u_topleft = 0;
+    framingCtx->FramingYuv[2].u_stride = (framingCtx->FramingRgb->u_width)>>1;
+    framingCtx->FramingYuv[2].pac_data = framingCtx->FramingYuv[1].pac_data + framingCtx->FramingYuv[1].u_width * framingCtx->FramingYuv[1].u_height;
+
+    /**
+     * Convert input RGB888 to YUV 420 to be able to merge it with output video in framing effect */
+    err = M4VIFI_RGB888toYUV420(M4OSA_NULL, framingCtx->FramingRgb, framingCtx->FramingYuv);
+    if(err != M4NO_ERROR)
+    {
+        M4OSA_TRACE1_1("M4xVSS_internalConvertRGBtoYUV: error when converting from RGB to YUV: 0x%x\n", err);
+    }
+
+    framingCtx->duration = 0;
+    framingCtx->previousClipTime = -1;
+    framingCtx->previewOffsetClipTime = -1;
+
+    /**
+     * Only one element in the chained list (no animated image with RGB buffer...) */
+    framingCtx->pCurrent = framingCtx;
+    framingCtx->pNext = framingCtx;
+
+    return M4NO_ERROR;
+}
+
+/**
+ ******************************************************************************
+ * M4VIFI_UInt8 M4VIFI_RGB565toYUV420 (void *pUserData,
+ *                                   M4VIFI_ImagePlane *pPlaneIn,
+ *                                   M4VIFI_ImagePlane *pPlaneOut)
+ * @author  Patrice Martinez / Philips Digital Networks - MP4Net
+ * @brief   transform RGB565 image to a YUV420 image.
+ * @note    Convert RGB565 to YUV420,
+ *          Loop on each row ( 2 rows by 2 rows )
+ *              Loop on each column ( 2 col by 2 col )
+ *                  Get 4 RGB samples from input data and build 4 output Y samples
+ *                  and each single U & V data
+ *              end loop on col
+ *          end loop on row
+ * @param   pUserData: (IN) User Specific Data
+ * @param   pPlaneIn: (IN) Pointer to RGB565 Plane
+ * @param   pPlaneOut: (OUT) Pointer to  YUV420 buffer Plane
+ * @return  M4VIFI_OK: there is no error
+ * @return  M4VIFI_ILLEGAL_FRAME_HEIGHT: YUV Plane height is ODD
+ * @return  M4VIFI_ILLEGAL_FRAME_WIDTH:  YUV Plane width is ODD
+ ******************************************************************************
+*/
+M4VIFI_UInt8    M4VIFI_xVSS_RGB565toYUV420(void *pUserData, M4VIFI_ImagePlane *pPlaneIn,
+                                                      M4VIFI_ImagePlane *pPlaneOut)
+{
+    M4VIFI_UInt32   u32_width, u32_height;
+    M4VIFI_UInt32   u32_stride_Y, u32_stride2_Y, u32_stride_U, u32_stride_V;
+    M4VIFI_UInt32   u32_stride_rgb, u32_stride_2rgb;
+    M4VIFI_UInt32   u32_col, u32_row;
+
+    M4VIFI_Int32    i32_r00, i32_r01, i32_r10, i32_r11;
+    M4VIFI_Int32    i32_g00, i32_g01, i32_g10, i32_g11;
+    M4VIFI_Int32    i32_b00, i32_b01, i32_b10, i32_b11;
+    M4VIFI_Int32    i32_y00, i32_y01, i32_y10, i32_y11;
+    M4VIFI_Int32    i32_u00, i32_u01, i32_u10, i32_u11;
+    M4VIFI_Int32    i32_v00, i32_v01, i32_v10, i32_v11;
+    M4VIFI_UInt8    *pu8_yn, *pu8_ys, *pu8_u, *pu8_v;
+    M4VIFI_UInt8    *pu8_y_data, *pu8_u_data, *pu8_v_data;
+    M4VIFI_UInt8    *pu8_rgbn_data, *pu8_rgbn;
+    M4VIFI_UInt16   u16_pix1, u16_pix2, u16_pix3, u16_pix4;
+    M4VIFI_UInt8 count_null=0;
+
+    /* Check planes height are appropriate */
+    if( (pPlaneIn->u_height != pPlaneOut[0].u_height)           ||
+        (pPlaneOut[0].u_height != (pPlaneOut[1].u_height<<1))   ||
+        (pPlaneOut[0].u_height != (pPlaneOut[2].u_height<<1)))
+    {
+        return M4VIFI_ILLEGAL_FRAME_HEIGHT;
+    }
+
+    /* Check planes width are appropriate */
+    if( (pPlaneIn->u_width != pPlaneOut[0].u_width)         ||
+        (pPlaneOut[0].u_width != (pPlaneOut[1].u_width<<1)) ||
+        (pPlaneOut[0].u_width != (pPlaneOut[2].u_width<<1)))
+    {
+        return M4VIFI_ILLEGAL_FRAME_WIDTH;
+    }
+
+    /* Set the pointer to the beginning of the output data buffers */
+    pu8_y_data = pPlaneOut[0].pac_data + pPlaneOut[0].u_topleft;
+    pu8_u_data = pPlaneOut[1].pac_data + pPlaneOut[1].u_topleft;
+    pu8_v_data = pPlaneOut[2].pac_data + pPlaneOut[2].u_topleft;
+
+    /* Set the pointer to the beginning of the input data buffers */
+    pu8_rgbn_data   = pPlaneIn->pac_data + pPlaneIn->u_topleft;
+
+    /* Get the size of the output image */
+    u32_width = pPlaneOut[0].u_width;
+    u32_height = pPlaneOut[0].u_height;
+
+    /* Set the size of the memory jumps corresponding to row jump in each output plane */
+    u32_stride_Y = pPlaneOut[0].u_stride;
+    u32_stride2_Y = u32_stride_Y << 1;
+    u32_stride_U = pPlaneOut[1].u_stride;
+    u32_stride_V = pPlaneOut[2].u_stride;
+
+    /* Set the size of the memory jumps corresponding to row jump in input plane */
+    u32_stride_rgb = pPlaneIn->u_stride;
+    u32_stride_2rgb = u32_stride_rgb << 1;
+
+
+    /* Loop on each row of the output image, input coordinates are estimated from output ones */
+    /* Two YUV rows are computed at each pass */
+    for (u32_row = u32_height ;u32_row != 0; u32_row -=2)
+    {
+        /* Current Y plane row pointers */
+        pu8_yn = pu8_y_data;
+        /* Next Y plane row pointers */
+        pu8_ys = pu8_yn + u32_stride_Y;
+        /* Current U plane row pointer */
+        pu8_u = pu8_u_data;
+        /* Current V plane row pointer */
+        pu8_v = pu8_v_data;
+
+        pu8_rgbn = pu8_rgbn_data;
+
+        /* Loop on each column of the output image */
+        for (u32_col = u32_width; u32_col != 0 ; u32_col -=2)
+        {
+            /* Get four RGB 565 samples from input data */
+            u16_pix1 = *( (M4VIFI_UInt16 *) pu8_rgbn);
+            u16_pix2 = *( (M4VIFI_UInt16 *) (pu8_rgbn + CST_RGB_16_SIZE));
+            u16_pix3 = *( (M4VIFI_UInt16 *) (pu8_rgbn + u32_stride_rgb));
+            u16_pix4 = *( (M4VIFI_UInt16 *) (pu8_rgbn + u32_stride_rgb + CST_RGB_16_SIZE));
+
+            /* Unpack RGB565 to 8bit R, G, B */
+            /* (x,y) */
+            GET_RGB565(i32_b00,i32_g00,i32_r00,u16_pix1);
+            /* (x+1,y) */
+            GET_RGB565(i32_b10,i32_g10,i32_r10,u16_pix2);
+            /* (x,y+1) */
+            GET_RGB565(i32_b01,i32_g01,i32_r01,u16_pix3);
+            /* (x+1,y+1) */
+            GET_RGB565(i32_b11,i32_g11,i32_r11,u16_pix4);
+            /* If RGB is transparent color (0, 63, 0), we transform it to white (31,63,31) */
+            if(i32_b00 == 0 && i32_g00 == 63 && i32_r00 == 0)
+            {
+                i32_b00 = 31;
+                i32_r00 = 31;
+            }
+            if(i32_b10 == 0 && i32_g10 == 63 && i32_r10 == 0)
+            {
+                i32_b10 = 31;
+                i32_r10 = 31;
+            }
+            if(i32_b01 == 0 && i32_g01 == 63 && i32_r01 == 0)
+            {
+                i32_b01 = 31;
+                i32_r01 = 31;
+            }
+            if(i32_b11 == 0 && i32_g11 == 63 && i32_r11 == 0)
+            {
+                i32_b11 = 31;
+                i32_r11 = 31;
+            }
+            /* Convert RGB value to YUV */
+            i32_u00 = U16(i32_r00, i32_g00, i32_b00);
+            i32_v00 = V16(i32_r00, i32_g00, i32_b00);
+            /* luminance value */
+            i32_y00 = Y16(i32_r00, i32_g00, i32_b00);
+
+            i32_u10 = U16(i32_r10, i32_g10, i32_b10);
+            i32_v10 = V16(i32_r10, i32_g10, i32_b10);
+            /* luminance value */
+            i32_y10 = Y16(i32_r10, i32_g10, i32_b10);
+
+            i32_u01 = U16(i32_r01, i32_g01, i32_b01);
+            i32_v01 = V16(i32_r01, i32_g01, i32_b01);
+            /* luminance value */
+            i32_y01 = Y16(i32_r01, i32_g01, i32_b01);
+
+            i32_u11 = U16(i32_r11, i32_g11, i32_b11);
+            i32_v11 = V16(i32_r11, i32_g11, i32_b11);
+            /* luminance value */
+            i32_y11 = Y16(i32_r11, i32_g11, i32_b11);
+
+            /* Store luminance data */
+            pu8_yn[0] = (M4VIFI_UInt8)i32_y00;
+            pu8_yn[1] = (M4VIFI_UInt8)i32_y10;
+            pu8_ys[0] = (M4VIFI_UInt8)i32_y01;
+            pu8_ys[1] = (M4VIFI_UInt8)i32_y11;
+            *pu8_u = (M4VIFI_UInt8)((i32_u00 + i32_u01 + i32_u10 + i32_u11 + 2) >> 2);
+            *pu8_v = (M4VIFI_UInt8)((i32_v00 + i32_v01 + i32_v10 + i32_v11 + 2) >> 2);
+            /* Prepare for next column */
+            pu8_rgbn += (CST_RGB_16_SIZE<<1);
+            /* Update current Y plane line pointer*/
+            pu8_yn += 2;
+            /* Update next Y plane line pointer*/
+            pu8_ys += 2;
+            /* Update U plane line pointer*/
+            pu8_u ++;
+            /* Update V plane line pointer*/
+            pu8_v ++;
+        } /* End of horizontal scanning */
+
+        /* Prepare pointers for the next row */
+        pu8_y_data += u32_stride2_Y;
+        pu8_u_data += u32_stride_U;
+        pu8_v_data += u32_stride_V;
+        pu8_rgbn_data += u32_stride_2rgb;
+
+
+    } /* End of vertical scanning */
+
+    return M4VIFI_OK;
+}
+
+/***************************************************************************
+Proto:
+M4VIFI_UInt8    M4VIFI_RGB888toYUV420(void *pUserData, M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane PlaneOut[3]);
+Author:     Patrice Martinez / Philips Digital Networks - MP4Net
+Purpose:    filling of the YUV420 plane from a BGR24 plane
+Abstract:   Loop on each row ( 2 rows by 2 rows )
+                Loop on each column ( 2 col by 2 col )
+                    Get 4 BGR samples from input data and build 4 output Y samples and each single U & V data
+                end loop on col
+            end loop on row
+
+In:         RGB24 plane
+InOut:      none
+Out:        array of 3 M4VIFI_ImagePlane structures
+Modified:   ML: RGB function modified to BGR.
+***************************************************************************/
+M4VIFI_UInt8 M4VIFI_RGB888toYUV420(void *pUserData, M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane PlaneOut[3])
+{
+
+    M4VIFI_UInt32   u32_width, u32_height;
+    M4VIFI_UInt32   u32_stride_Y, u32_stride2_Y, u32_stride_U, u32_stride_V, u32_stride_rgb, u32_stride_2rgb;
+    M4VIFI_UInt32   u32_col, u32_row;
+
+    M4VIFI_Int32    i32_r00, i32_r01, i32_r10, i32_r11;
+    M4VIFI_Int32    i32_g00, i32_g01, i32_g10, i32_g11;
+    M4VIFI_Int32    i32_b00, i32_b01, i32_b10, i32_b11;
+    M4VIFI_Int32    i32_y00, i32_y01, i32_y10, i32_y11;
+    M4VIFI_Int32    i32_u00, i32_u01, i32_u10, i32_u11;
+    M4VIFI_Int32    i32_v00, i32_v01, i32_v10, i32_v11;
+    M4VIFI_UInt8    *pu8_yn, *pu8_ys, *pu8_u, *pu8_v;
+    M4VIFI_UInt8    *pu8_y_data, *pu8_u_data, *pu8_v_data;
+    M4VIFI_UInt8    *pu8_rgbn_data, *pu8_rgbn;
+
+    /* check sizes */
+    if( (PlaneIn->u_height != PlaneOut[0].u_height)         ||
+        (PlaneOut[0].u_height != (PlaneOut[1].u_height<<1)) ||
+        (PlaneOut[0].u_height != (PlaneOut[2].u_height<<1)))
+        return M4VIFI_ILLEGAL_FRAME_HEIGHT;
+
+    if( (PlaneIn->u_width != PlaneOut[0].u_width)       ||
+        (PlaneOut[0].u_width != (PlaneOut[1].u_width<<1))   ||
+        (PlaneOut[0].u_width != (PlaneOut[2].u_width<<1)))
+        return M4VIFI_ILLEGAL_FRAME_WIDTH;
+
+
+    /* set the pointer to the beginning of the output data buffers */
+    pu8_y_data  = PlaneOut[0].pac_data + PlaneOut[0].u_topleft;
+    pu8_u_data  = PlaneOut[1].pac_data + PlaneOut[1].u_topleft;
+    pu8_v_data  = PlaneOut[2].pac_data + PlaneOut[2].u_topleft;
+
+    /* idem for input buffer */
+    pu8_rgbn_data   = PlaneIn->pac_data + PlaneIn->u_topleft;
+
+    /* get the size of the output image */
+    u32_width   = PlaneOut[0].u_width;
+    u32_height  = PlaneOut[0].u_height;
+
+    /* set the size of the memory jumps corresponding to row jump in each output plane */
+    u32_stride_Y = PlaneOut[0].u_stride;
+    u32_stride2_Y= u32_stride_Y << 1;
+    u32_stride_U = PlaneOut[1].u_stride;
+    u32_stride_V = PlaneOut[2].u_stride;
+
+    /* idem for input plane */
+    u32_stride_rgb = PlaneIn->u_stride;
+    u32_stride_2rgb = u32_stride_rgb << 1;
+
+    /* loop on each row of the output image, input coordinates are estimated from output ones */
+    /* two YUV rows are computed at each pass */
+    for (u32_row = u32_height ;u32_row != 0; u32_row -=2)
+    {
+        /* update working pointers */
+        pu8_yn  = pu8_y_data;
+        pu8_ys  = pu8_yn + u32_stride_Y;
+
+        pu8_u   = pu8_u_data;
+        pu8_v   = pu8_v_data;
+
+        pu8_rgbn= pu8_rgbn_data;
+
+        /* loop on each column of the output image*/
+        for (u32_col = u32_width; u32_col != 0 ; u32_col -=2)
+        {
+            /* get RGB samples of 4 pixels */
+            GET_RGB24(i32_r00, i32_g00, i32_b00, pu8_rgbn, 0);
+            GET_RGB24(i32_r10, i32_g10, i32_b10, pu8_rgbn, CST_RGB_24_SIZE);
+            GET_RGB24(i32_r01, i32_g01, i32_b01, pu8_rgbn, u32_stride_rgb);
+            GET_RGB24(i32_r11, i32_g11, i32_b11, pu8_rgbn, u32_stride_rgb + CST_RGB_24_SIZE);
+
+            i32_u00 = U24(i32_r00, i32_g00, i32_b00);
+            i32_v00 = V24(i32_r00, i32_g00, i32_b00);
+            i32_y00 = Y24(i32_r00, i32_g00, i32_b00);       /* matrix luminance */
+            pu8_yn[0]= (M4VIFI_UInt8)i32_y00;
+
+            i32_u10 = U24(i32_r10, i32_g10, i32_b10);
+            i32_v10 = V24(i32_r10, i32_g10, i32_b10);
+            i32_y10 = Y24(i32_r10, i32_g10, i32_b10);
+            pu8_yn[1]= (M4VIFI_UInt8)i32_y10;
+
+            i32_u01 = U24(i32_r01, i32_g01, i32_b01);
+            i32_v01 = V24(i32_r01, i32_g01, i32_b01);
+            i32_y01 = Y24(i32_r01, i32_g01, i32_b01);
+            pu8_ys[0]= (M4VIFI_UInt8)i32_y01;
+
+            i32_u11 = U24(i32_r11, i32_g11, i32_b11);
+            i32_v11 = V24(i32_r11, i32_g11, i32_b11);
+            i32_y11 = Y24(i32_r11, i32_g11, i32_b11);
+            pu8_ys[1] = (M4VIFI_UInt8)i32_y11;
+
+            *pu8_u  = (M4VIFI_UInt8)((i32_u00 + i32_u01 + i32_u10 + i32_u11 + 2) >> 2);
+            *pu8_v  = (M4VIFI_UInt8)((i32_v00 + i32_v01 + i32_v10 + i32_v11 + 2) >> 2);
+
+            pu8_rgbn    +=  (CST_RGB_24_SIZE<<1);
+            pu8_yn      += 2;
+            pu8_ys      += 2;
+
+            pu8_u ++;
+            pu8_v ++;
+        } /* end of horizontal scanning */
+
+        pu8_y_data      += u32_stride2_Y;
+        pu8_u_data      += u32_stride_U;
+        pu8_v_data      += u32_stride_V;
+        pu8_rgbn_data   += u32_stride_2rgb;
+
+
+    } /* End of vertical scanning */
+
+    return M4VIFI_OK;
+}
+
+/** YUV420 to YUV420 */
+/**
+ *******************************************************************************************
+ * M4VIFI_UInt8 M4VIFI_YUV420toYUV420 (void *pUserData,
+ *                                     M4VIFI_ImagePlane *pPlaneIn,
+ *                                     M4VIFI_ImagePlane *pPlaneOut)
+ * @brief   Transform YUV420 image to a YUV420 image.
+ * @param   pUserData: (IN) User Specific Data (Unused - could be NULL)
+ * @param   pPlaneIn: (IN) Pointer to YUV plane buffer
+ * @param   pPlaneOut: (OUT) Pointer to YUV Plane
+ * @return  M4VIFI_OK: there is no error
+ * @return  M4VIFI_ILLEGAL_FRAME_HEIGHT: Error in plane height
+ * @return  M4VIFI_ILLEGAL_FRAME_WIDTH:  Error in plane width
+ *******************************************************************************************
+ */
+
+M4VIFI_UInt8 M4VIFI_YUV420toYUV420(void *user_data, M4VIFI_ImagePlane PlaneIn[3], M4VIFI_ImagePlane *PlaneOut )
+{
+    M4VIFI_Int32 plane_number;
+    M4VIFI_UInt32 i;
+    M4VIFI_UInt8 *p_buf_src, *p_buf_dest;
+
+    for (plane_number = 0; plane_number < 3; plane_number++)
+    {
+        p_buf_src = &(PlaneIn[plane_number].pac_data[PlaneIn[plane_number].u_topleft]);
+        p_buf_dest = &(PlaneOut[plane_number].pac_data[PlaneOut[plane_number].u_topleft]);
+        for (i = 0; i < PlaneOut[plane_number].u_height; i++)
+        {
+            memcpy((void *)p_buf_dest, (void *)p_buf_src ,PlaneOut[plane_number].u_width);
+            p_buf_src += PlaneIn[plane_number].u_stride;
+            p_buf_dest += PlaneOut[plane_number].u_stride;
+        }
+    }
+    return M4VIFI_OK;
+}
+
+/**
+ ***********************************************************************************************
+ * M4VIFI_UInt8 M4VIFI_ResizeBilinearYUV420toYUV420(void *pUserData, M4VIFI_ImagePlane *pPlaneIn,
+ *                                                                  M4VIFI_ImagePlane *pPlaneOut)
+ * @author  David Dana (PHILIPS Software)
+ * @brief   Resizes YUV420 Planar plane.
+ * @note    Basic structure of the function
+ *          Loop on each row (step 2)
+ *              Loop on each column (step 2)
+ *                  Get four Y samples and 1 U & V sample
+ *                  Resize the Y with corresponing U and V samples
+ *                  Place the YUV in the ouput plane
+ *              end loop column
+ *          end loop row
+ *          For resizing bilinear interpolation linearly interpolates along
+ *          each row, and then uses that result in a linear interpolation down each column.
+ *          Each estimated pixel in the output image is a weighted
+ *          combination of its four neighbours. The ratio of compression
+ *          or dilatation is estimated using input and output sizes.
+ * @param   pUserData: (IN) User Data
+ * @param   pPlaneIn: (IN) Pointer to YUV420 (Planar) plane buffer
+ * @param   pPlaneOut: (OUT) Pointer to YUV420 (Planar) plane
+ * @return  M4VIFI_OK: there is no error
+ * @return  M4VIFI_ILLEGAL_FRAME_HEIGHT: Error in height
+ * @return  M4VIFI_ILLEGAL_FRAME_WIDTH:  Error in width
+ ***********************************************************************************************
+*/
+M4VIFI_UInt8    M4VIFI_ResizeBilinearYUV420toYUV420(void *pUserData,
+                                                                M4VIFI_ImagePlane *pPlaneIn,
+                                                                M4VIFI_ImagePlane *pPlaneOut)
+{
+    M4VIFI_UInt8    *pu8_data_in, *pu8_data_out, *pu8dum;
+    M4VIFI_UInt32   u32_plane;
+    M4VIFI_UInt32   u32_width_in, u32_width_out, u32_height_in, u32_height_out;
+    M4VIFI_UInt32   u32_stride_in, u32_stride_out;
+    M4VIFI_UInt32   u32_x_inc, u32_y_inc;
+    M4VIFI_UInt32   u32_x_accum, u32_y_accum, u32_x_accum_start;
+    M4VIFI_UInt32   u32_width, u32_height;
+    M4VIFI_UInt32   u32_y_frac;
+    M4VIFI_UInt32   u32_x_frac;
+    M4VIFI_UInt32   u32_temp_value;
+    M4VIFI_UInt8    *pu8_src_top;
+    M4VIFI_UInt8    *pu8_src_bottom;
+
+    M4VIFI_UInt8    u8Wflag = 0;
+    M4VIFI_UInt8    u8Hflag = 0;
+    M4VIFI_UInt32   loop = 0;
+
+
+    /*
+     If input width is equal to output width and input height equal to
+     output height then M4VIFI_YUV420toYUV420 is called.
+    */
+    if ((pPlaneIn[0].u_height == pPlaneOut[0].u_height) &&
+              (pPlaneIn[0].u_width == pPlaneOut[0].u_width))
+    {
+        return M4VIFI_YUV420toYUV420(pUserData, pPlaneIn, pPlaneOut);
+    }
+
+    /* Check for the YUV width and height are even */
+    if ((IS_EVEN(pPlaneIn[0].u_height) == FALSE)    ||
+        (IS_EVEN(pPlaneOut[0].u_height) == FALSE))
+    {
+        return M4VIFI_ILLEGAL_FRAME_HEIGHT;
+    }
+
+    if ((IS_EVEN(pPlaneIn[0].u_width) == FALSE) ||
+        (IS_EVEN(pPlaneOut[0].u_width) == FALSE))
+    {
+        return M4VIFI_ILLEGAL_FRAME_WIDTH;
+    }
+
+    /* Loop on planes */
+    for(u32_plane = 0;u32_plane < PLANES;u32_plane++)
+    {
+        /* Set the working pointers at the beginning of the input/output data field */
+        pu8_data_in     = pPlaneIn[u32_plane].pac_data + pPlaneIn[u32_plane].u_topleft;
+        pu8_data_out    = pPlaneOut[u32_plane].pac_data + pPlaneOut[u32_plane].u_topleft;
+
+        /* Get the memory jump corresponding to a row jump */
+        u32_stride_in   = pPlaneIn[u32_plane].u_stride;
+        u32_stride_out  = pPlaneOut[u32_plane].u_stride;
+
+        /* Set the bounds of the active image */
+        u32_width_in    = pPlaneIn[u32_plane].u_width;
+        u32_height_in   = pPlaneIn[u32_plane].u_height;
+
+        u32_width_out   = pPlaneOut[u32_plane].u_width;
+        u32_height_out  = pPlaneOut[u32_plane].u_height;
+
+        /*
+        For the case , width_out = width_in , set the flag to avoid
+        accessing one column beyond the input width.In this case the last
+        column is replicated for processing
+        */
+        if (u32_width_out == u32_width_in) {
+            u32_width_out = u32_width_out-1;
+            u8Wflag = 1;
+        }
+
+        /* Compute horizontal ratio between src and destination width.*/
+        if (u32_width_out >= u32_width_in)
+        {
+            u32_x_inc   = ((u32_width_in-1) * MAX_SHORT) / (u32_width_out-1);
+        }
+        else
+        {
+            u32_x_inc   = (u32_width_in * MAX_SHORT) / (u32_width_out);
+        }
+
+        /*
+        For the case , height_out = height_in , set the flag to avoid
+        accessing one row beyond the input height.In this case the last
+        row is replicated for processing
+        */
+        if (u32_height_out == u32_height_in) {
+            u32_height_out = u32_height_out-1;
+            u8Hflag = 1;
+        }
+
+        /* Compute vertical ratio between src and destination height.*/
+        if (u32_height_out >= u32_height_in)
+        {
+            u32_y_inc   = ((u32_height_in - 1) * MAX_SHORT) / (u32_height_out-1);
+        }
+        else
+        {
+            u32_y_inc = (u32_height_in * MAX_SHORT) / (u32_height_out);
+        }
+
+        /*
+        Calculate initial accumulator value : u32_y_accum_start.
+        u32_y_accum_start is coded on 15 bits, and represents a value
+        between 0 and 0.5
+        */
+        if (u32_y_inc >= MAX_SHORT)
+        {
+        /*
+        Keep the fractionnal part, assimung that integer  part is coded
+        on the 16 high bits and the fractional on the 15 low bits
+        */
+            u32_y_accum = u32_y_inc & 0xffff;
+
+            if (!u32_y_accum)
+            {
+                u32_y_accum = MAX_SHORT;
+            }
+
+            u32_y_accum >>= 1;
+        }
+        else
+        {
+            u32_y_accum = 0;
+        }
+
+
+        /*
+        Calculate initial accumulator value : u32_x_accum_start.
+        u32_x_accum_start is coded on 15 bits, and represents a value
+        between 0 and 0.5
+        */
+        if (u32_x_inc >= MAX_SHORT)
+        {
+            u32_x_accum_start = u32_x_inc & 0xffff;
+
+            if (!u32_x_accum_start)
+            {
+                u32_x_accum_start = MAX_SHORT;
+            }
+
+            u32_x_accum_start >>= 1;
+        }
+        else
+        {
+            u32_x_accum_start = 0;
+        }
+
+        u32_height = u32_height_out;
+
+        /*
+        Bilinear interpolation linearly interpolates along each row, and
+        then uses that result in a linear interpolation donw each column.
+        Each estimated pixel in the output image is a weighted combination
+        of its four neighbours according to the formula:
+        F(p',q')=f(p,q)R(-a)R(b)+f(p,q-1)R(-a)R(b-1)+f(p+1,q)R(1-a)R(b)+
+        f(p+&,q+1)R(1-a)R(b-1) with  R(x) = / x+1  -1 =< x =< 0 \ 1-x
+        0 =< x =< 1 and a (resp. b)weighting coefficient is the distance
+        from the nearest neighbor in the p (resp. q) direction
+        */
+
+        do { /* Scan all the row */
+
+            /* Vertical weight factor */
+            u32_y_frac = (u32_y_accum>>12)&15;
+
+            /* Reinit accumulator */
+            u32_x_accum = u32_x_accum_start;
+
+            u32_width = u32_width_out;
+
+            do { /* Scan along each row */
+                pu8_src_top = pu8_data_in + (u32_x_accum >> 16);
+                pu8_src_bottom = pu8_src_top + u32_stride_in;
+                u32_x_frac = (u32_x_accum >> 12)&15; /* Horizontal weight factor */
+
+                /* Weighted combination */
+                u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) +
+                                                 pu8_src_top[1]*u32_x_frac)*(16-u32_y_frac) +
+                                                (pu8_src_bottom[0]*(16-u32_x_frac) +
+                                                 pu8_src_bottom[1]*u32_x_frac)*u32_y_frac )>>8);
+
+                *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;
+
+                /* Update horizontal accumulator */
+                u32_x_accum += u32_x_inc;
+            } while(--u32_width);
+
+            /*
+               This u8Wflag flag gets in to effect if input and output
+               width is same, and height may be different. So previous
+               pixel is replicated here
+            */
+            if (u8Wflag) {
+                *pu8_data_out = (M4VIFI_UInt8)u32_temp_value;
+            }
+
+            pu8dum = (pu8_data_out-u32_width_out);
+            pu8_data_out = pu8_data_out + u32_stride_out - u32_width_out;
+
+            /* Update vertical accumulator */
+            u32_y_accum += u32_y_inc;
+            if (u32_y_accum>>16) {
+                pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * u32_stride_in;
+                u32_y_accum &= 0xffff;
+            }
+        } while(--u32_height);
+
+        /*
+        This u8Hflag flag gets in to effect if input and output height
+        is same, and width may be different. So previous pixel row is
+        replicated here
+        */
+        if (u8Hflag) {
+            for(loop =0; loop < (u32_width_out+u8Wflag); loop++) {
+                *pu8_data_out++ = (M4VIFI_UInt8)*pu8dum++;
+            }
+        }
+    }
+
+    return M4VIFI_OK;
+}
+
+M4OSA_ERR applyRenderingMode(M4VIFI_ImagePlane* pPlaneIn, M4VIFI_ImagePlane* pPlaneOut, M4xVSS_MediaRendering mediaRendering)
+{
+    M4OSA_ERR err = M4NO_ERROR;
+
+    if(mediaRendering == M4xVSS_kResizing)
+    {
+        /**
+         * Call the resize filter. From the intermediate frame to the encoder image plane */
+        err = M4VIFI_ResizeBilinearYUV420toYUV420(M4OSA_NULL, pPlaneIn, pPlaneOut);
+        if (M4NO_ERROR != err)
+        {
+            M4OSA_TRACE1_1("applyRenderingMode: M4ViFilResizeBilinearYUV420toYUV420 returns 0x%x!", err);
+            return err;
+        }
+    }
+    else
+    {
+        M4AIR_Params Params;
+        M4OSA_Context m_air_context;
+        M4VIFI_ImagePlane pImagePlanesTemp[3];
+        M4VIFI_ImagePlane* pPlaneTemp;
+        M4OSA_UInt8* pOutPlaneY = pPlaneOut[0].pac_data + pPlaneOut[0].u_topleft;
+        M4OSA_UInt8* pOutPlaneU = pPlaneOut[1].pac_data + pPlaneOut[1].u_topleft;
+        M4OSA_UInt8* pOutPlaneV = pPlaneOut[2].pac_data + pPlaneOut[2].u_topleft;
+        M4OSA_UInt8* pInPlaneY = NULL;
+        M4OSA_UInt8* pInPlaneU = NULL;
+        M4OSA_UInt8* pInPlaneV = NULL;
+        M4OSA_UInt32 i;
+
+        /*to keep media aspect ratio*/
+        /*Initialize AIR Params*/
+        Params.m_inputCoord.m_x = 0;
+        Params.m_inputCoord.m_y = 0;
+        Params.m_inputSize.m_height = pPlaneIn->u_height;
+        Params.m_inputSize.m_width = pPlaneIn->u_width;
+        Params.m_outputSize.m_width = pPlaneOut->u_width;
+        Params.m_outputSize.m_height = pPlaneOut->u_height;
+        Params.m_bOutputStripe = M4OSA_FALSE;
+        Params.m_outputOrientation = M4COMMON_kOrientationTopLeft;
+
+        /**
+        Media rendering: Black borders*/
+        if(mediaRendering == M4xVSS_kBlackBorders)
+        {
+            memset((void *)pPlaneOut[0].pac_data,Y_PLANE_BORDER_VALUE,(pPlaneOut[0].u_height*pPlaneOut[0].u_stride));
+            memset((void *)pPlaneOut[1].pac_data,U_PLANE_BORDER_VALUE,(pPlaneOut[1].u_height*pPlaneOut[1].u_stride));
+            memset((void *)pPlaneOut[2].pac_data,V_PLANE_BORDER_VALUE,(pPlaneOut[2].u_height*pPlaneOut[2].u_stride));
+
+            pImagePlanesTemp[0].u_width = pPlaneOut[0].u_width;
+            pImagePlanesTemp[0].u_height = pPlaneOut[0].u_height;
+            pImagePlanesTemp[0].u_stride = pPlaneOut[0].u_width;
+            pImagePlanesTemp[0].u_topleft = 0;
+            pImagePlanesTemp[0].pac_data = M4OSA_NULL;
+
+            pImagePlanesTemp[1].u_width = pPlaneOut[1].u_width;
+            pImagePlanesTemp[1].u_height = pPlaneOut[1].u_height;
+            pImagePlanesTemp[1].u_stride = pPlaneOut[1].u_width;
+            pImagePlanesTemp[1].u_topleft = 0;
+            pImagePlanesTemp[1].pac_data = M4OSA_NULL;
+
+            pImagePlanesTemp[2].u_width = pPlaneOut[2].u_width;
+            pImagePlanesTemp[2].u_height = pPlaneOut[2].u_height;
+            pImagePlanesTemp[2].u_stride = pPlaneOut[2].u_width;
+            pImagePlanesTemp[2].u_topleft = 0;
+            pImagePlanesTemp[2].pac_data = M4OSA_NULL;
+
+            /* Allocates plan in local image plane structure */
+            pImagePlanesTemp[0].pac_data = (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc(pImagePlanesTemp[0].u_width * pImagePlanesTemp[0].u_height, M4VS, (M4OSA_Char*)"applyRenderingMode: temporary plane bufferY") ;
+            if(pImagePlanesTemp[0].pac_data == M4OSA_NULL)
+            {
+                M4OSA_TRACE1_0("Error alloc in applyRenderingMode");
+                return M4ERR_ALLOC;
+            }
+            pImagePlanesTemp[1].pac_data = (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc(pImagePlanesTemp[1].u_width * pImagePlanesTemp[1].u_height, M4VS, (M4OSA_Char*)"applyRenderingMode: temporary plane bufferU") ;
+            if(pImagePlanesTemp[1].pac_data == M4OSA_NULL)
+            {
+
+                M4OSA_TRACE1_0("Error alloc in applyRenderingMode");
+                return M4ERR_ALLOC;
+            }
+            pImagePlanesTemp[2].pac_data = (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc(pImagePlanesTemp[2].u_width * pImagePlanesTemp[2].u_height, M4VS, (M4OSA_Char*)"applyRenderingMode: temporary plane bufferV") ;
+            if(pImagePlanesTemp[2].pac_data == M4OSA_NULL)
+            {
+
+                M4OSA_TRACE1_0("Error alloc in applyRenderingMode");
+                return M4ERR_ALLOC;
+            }
+
+            pInPlaneY = pImagePlanesTemp[0].pac_data ;
+            pInPlaneU = pImagePlanesTemp[1].pac_data ;
+            pInPlaneV = pImagePlanesTemp[2].pac_data ;
+
+            memset((void *)pImagePlanesTemp[0].pac_data,Y_PLANE_BORDER_VALUE,(pImagePlanesTemp[0].u_height*pImagePlanesTemp[0].u_stride));
+            memset((void *)pImagePlanesTemp[1].pac_data,U_PLANE_BORDER_VALUE,(pImagePlanesTemp[1].u_height*pImagePlanesTemp[1].u_stride));
+            memset((void *)pImagePlanesTemp[2].pac_data,V_PLANE_BORDER_VALUE,(pImagePlanesTemp[2].u_height*pImagePlanesTemp[2].u_stride));
+
+            if((M4OSA_UInt32)((pPlaneIn->u_height * pPlaneOut->u_width) /pPlaneIn->u_width) <= pPlaneOut->u_height)//Params.m_inputSize.m_height < Params.m_inputSize.m_width)
+            {
+                /*it is height so black borders will be on the top and on the bottom side*/
+                Params.m_outputSize.m_width = pPlaneOut->u_width;
+                Params.m_outputSize.m_height = (M4OSA_UInt32)((pPlaneIn->u_height * pPlaneOut->u_width) /pPlaneIn->u_width);
+                /*number of lines at the top*/
+                pImagePlanesTemp[0].u_topleft = (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[0].u_height-Params.m_outputSize.m_height)>>1))*pImagePlanesTemp[0].u_stride;
+                pImagePlanesTemp[0].u_height = Params.m_outputSize.m_height;
+                pImagePlanesTemp[1].u_topleft = (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[1].u_height-(Params.m_outputSize.m_height>>1)))>>1)*pImagePlanesTemp[1].u_stride;
+                pImagePlanesTemp[1].u_height = Params.m_outputSize.m_height>>1;
+                pImagePlanesTemp[2].u_topleft = (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[2].u_height-(Params.m_outputSize.m_height>>1)))>>1)*pImagePlanesTemp[2].u_stride;
+                pImagePlanesTemp[2].u_height = Params.m_outputSize.m_height>>1;
+            }
+            else
+            {
+                /*it is width so black borders will be on the left and right side*/
+                Params.m_outputSize.m_height = pPlaneOut->u_height;
+                Params.m_outputSize.m_width = (M4OSA_UInt32)((pPlaneIn->u_width * pPlaneOut->u_height) /pPlaneIn->u_height);
+
+                pImagePlanesTemp[0].u_topleft = (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[0].u_width-Params.m_outputSize.m_width)>>1));
+                pImagePlanesTemp[0].u_width = Params.m_outputSize.m_width;
+                pImagePlanesTemp[1].u_topleft = (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[1].u_width-(Params.m_outputSize.m_width>>1)))>>1);
+                pImagePlanesTemp[1].u_width = Params.m_outputSize.m_width>>1;
+                pImagePlanesTemp[2].u_topleft = (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[2].u_width-(Params.m_outputSize.m_width>>1)))>>1);
+                pImagePlanesTemp[2].u_width = Params.m_outputSize.m_width>>1;
+            }
+
+            /*Width and height have to be even*/
+            Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1;
+            Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1;
+            Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1;
+            Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1;
+            pImagePlanesTemp[0].u_width = (pImagePlanesTemp[0].u_width>>1)<<1;
+            pImagePlanesTemp[1].u_width = (pImagePlanesTemp[1].u_width>>1)<<1;
+            pImagePlanesTemp[2].u_width = (pImagePlanesTemp[2].u_width>>1)<<1;
+            pImagePlanesTemp[0].u_height = (pImagePlanesTemp[0].u_height>>1)<<1;
+            pImagePlanesTemp[1].u_height = (pImagePlanesTemp[1].u_height>>1)<<1;
+            pImagePlanesTemp[2].u_height = (pImagePlanesTemp[2].u_height>>1)<<1;
+
+            /*Check that values are coherent*/
+            if(Params.m_inputSize.m_height == Params.m_outputSize.m_height)
+            {
+                Params.m_inputSize.m_width = Params.m_outputSize.m_width;
+            }
+            else if(Params.m_inputSize.m_width == Params.m_outputSize.m_width)
+            {
+                Params.m_inputSize.m_height = Params.m_outputSize.m_height;
+            }
+            pPlaneTemp = pImagePlanesTemp;
+
+
+        }
+
+        /**
+        Media rendering: Cropping*/
+        if(mediaRendering == M4xVSS_kCropping)
+        {
+            Params.m_outputSize.m_height = pPlaneOut->u_height;
+            Params.m_outputSize.m_width = pPlaneOut->u_width;
+            if((Params.m_outputSize.m_height * Params.m_inputSize.m_width) /Params.m_outputSize.m_width<Params.m_inputSize.m_height)
+            {
+                /*height will be cropped*/
+                Params.m_inputSize.m_height = (M4OSA_UInt32)((Params.m_outputSize.m_height * Params.m_inputSize.m_width) /Params.m_outputSize.m_width);
+                Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1;
+                Params.m_inputCoord.m_y = (M4OSA_Int32)((M4OSA_Int32)((pPlaneIn->u_height - Params.m_inputSize.m_height))>>1);
+            }
+            else
+            {
+                /*width will be cropped*/
+                Params.m_inputSize.m_width = (M4OSA_UInt32)((Params.m_outputSize.m_width * Params.m_inputSize.m_height) /Params.m_outputSize.m_height);
+                Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1;
+                Params.m_inputCoord.m_x = (M4OSA_Int32)((M4OSA_Int32)((pPlaneIn->u_width - Params.m_inputSize.m_width))>>1);
+            }
+            pPlaneTemp = pPlaneOut;
+        }
+
+        /**
+         * Call AIR functions */
+        err = M4AIR_create(&m_air_context, M4AIR_kYUV420P);
+        if(err != M4NO_ERROR)
+        {
+
+            M4OSA_TRACE1_1("applyRenderingMode: Error when initializing AIR: 0x%x", err);
+            for(i=0; i<3; i++)
+            {
+                if(pImagePlanesTemp[i].pac_data != M4OSA_NULL)
+                {
+                    free(pImagePlanesTemp[i].pac_data);
+                    pImagePlanesTemp[i].pac_data = M4OSA_NULL;
+                }
+            }
+            return err;
+        }
+
+
+        err = M4AIR_configure(m_air_context, &Params);
+        if(err != M4NO_ERROR)
+        {
+
+            M4OSA_TRACE1_1("applyRenderingMode: Error when configuring AIR: 0x%x", err);
+            M4AIR_cleanUp(m_air_context);
+            for(i=0; i<3; i++)
+            {
+                if(pImagePlanesTemp[i].pac_data != M4OSA_NULL)
+                {
+                    free(pImagePlanesTemp[i].pac_data);
+                    pImagePlanesTemp[i].pac_data = M4OSA_NULL;
+                }
+            }
+            return err;
+        }
+
+        err = M4AIR_get(m_air_context, pPlaneIn, pPlaneTemp);
+        if(err != M4NO_ERROR)
+        {
+            M4OSA_TRACE1_1("applyRenderingMode: Error when getting AIR plane: 0x%x", err);
+            M4AIR_cleanUp(m_air_context);
+            for(i=0; i<3; i++)
+            {
+                if(pImagePlanesTemp[i].pac_data != M4OSA_NULL)
+                {
+                    free(pImagePlanesTemp[i].pac_data);
+                    pImagePlanesTemp[i].pac_data = M4OSA_NULL;
+                }
+            }
+            return err;
+        }
+
+        if(mediaRendering == M4xVSS_kBlackBorders)
+        {
+            for(i=0; i<pPlaneOut[0].u_height; i++)
+            {
+                memcpy((void *)pOutPlaneY, (void *)pInPlaneY, pPlaneOut[0].u_width);
+                pInPlaneY += pPlaneOut[0].u_width;
+                pOutPlaneY += pPlaneOut[0].u_stride;
+            }
+            for(i=0; i<pPlaneOut[1].u_height; i++)
+            {
+                memcpy((void *)pOutPlaneU, (void *)pInPlaneU, pPlaneOut[1].u_width);
+                pInPlaneU += pPlaneOut[1].u_width;
+                pOutPlaneU += pPlaneOut[1].u_stride;
+            }
+            for(i=0; i<pPlaneOut[2].u_height; i++)
+            {
+                memcpy((void *)pOutPlaneV, (void *)pInPlaneV, pPlaneOut[2].u_width);
+                pInPlaneV += pPlaneOut[2].u_width;
+                pOutPlaneV += pPlaneOut[2].u_stride;
+            }
+
+            for(i=0; i<3; i++)
+            {
+                if(pImagePlanesTemp[i].pac_data != M4OSA_NULL)
+                {
+                    free(pImagePlanesTemp[i].pac_data);
+                    pImagePlanesTemp[i].pac_data = M4OSA_NULL;
+                }
+            }
+        }
+
+        if (m_air_context != M4OSA_NULL) {
+            M4AIR_cleanUp(m_air_context);
+            m_air_context = M4OSA_NULL;
+        }
+    }
+
+    return err;
+}
+
+//TODO: remove this code after link with videoartist lib
+/* M4AIR code*/
+#define M4AIR_YUV420_FORMAT_SUPPORTED
+#define M4AIR_YUV420A_FORMAT_SUPPORTED
+
+/************************* COMPILATION CHECKS ***************************/
+#ifndef M4AIR_YUV420_FORMAT_SUPPORTED
+#ifndef M4AIR_BGR565_FORMAT_SUPPORTED
+#ifndef M4AIR_RGB565_FORMAT_SUPPORTED
+#ifndef M4AIR_BGR888_FORMAT_SUPPORTED
+#ifndef M4AIR_RGB888_FORMAT_SUPPORTED
+#ifndef M4AIR_JPG_FORMAT_SUPPORTED
+
+#error "Please define at least one input format for the AIR component"
+
+#endif
+#endif
+#endif
+#endif
+#endif
+#endif
+
+/************************ M4AIR INTERNAL TYPES DEFINITIONS ***********************/
+
+/**
+ ******************************************************************************
+ * enum         M4AIR_States
+ * @brief       The following enumeration defines the internal states of the AIR.
+ ******************************************************************************
+*/
+typedef enum
+{
+    M4AIR_kCreated,         /**< State after M4AIR_create has been called */
+    M4AIR_kConfigured           /**< State after M4AIR_configure has been called */
+}M4AIR_States;
+
+
+/**
+ ******************************************************************************
+ * struct       M4AIR_InternalContext
+ * @brief       The following structure is the internal context of the AIR.
+ ******************************************************************************
+*/
+typedef struct
+{
+    M4AIR_States                m_state;            /**< Internal state */
+    M4AIR_InputFormatType   m_inputFormat;      /**< Input format like YUV420Planar, RGB565, JPG, etc ... */
+    M4AIR_Params            m_params;           /**< Current input Parameter of  the processing */
+    M4OSA_UInt32            u32_x_inc[4];       /**< ratio between input and ouput width for YUV */
+    M4OSA_UInt32            u32_y_inc[4];       /**< ratio between input and ouput height for YUV */
+    M4OSA_UInt32            u32_x_accum_start[4];   /**< horizontal initial accumulator value */
+    M4OSA_UInt32            u32_y_accum_start[4];   /**< Vertical initial accumulator value */
+    M4OSA_UInt32            u32_x_accum[4];     /**< save of horizontal accumulator value */
+    M4OSA_UInt32            u32_y_accum[4];     /**< save of vertical accumulator value */
+    M4OSA_UInt8*            pu8_data_in[4];         /**< Save of input plane pointers in case of stripe mode */
+    M4OSA_UInt32            m_procRows;         /**< Number of processed rows, used in stripe mode only */
+    M4OSA_Bool              m_bOnlyCopy;            /**< Flag to know if we just perform a copy or a bilinear interpolation */
+    M4OSA_Bool              m_bFlipX;               /**< Depend on output orientation, used during processing to revert processing order in X coordinates */
+    M4OSA_Bool              m_bFlipY;               /**< Depend on output orientation, used during processing to revert processing order in Y coordinates */
+    M4OSA_Bool              m_bRevertXY;            /**< Depend on output orientation, used during processing to revert X and Y processing order (+-90° rotation) */
+}M4AIR_InternalContext;
+
+/********************************* MACROS *******************************/
+#define M4ERR_CHECK_NULL_RETURN_VALUE(retval, pointer) if ((pointer) == M4OSA_NULL) return ((M4OSA_ERR)(retval));
+
+
+/********************** M4AIR PUBLIC API IMPLEMENTATION ********************/
+/**
+ ******************************************************************************
+ * M4OSA_ERR M4AIR_create(M4OSA_Context* pContext,M4AIR_InputFormatType inputFormat)
+ * @author  Arnaud Collard
+ * @brief       This function initialize an instance of the AIR.
+ * @param   pContext:   (IN/OUT) Address of the context to create
+ * @param   inputFormat:    (IN) input format type.
+ * @return  M4NO_ERROR: there is no error
+ * @return  M4ERR_PARAMETER: pContext is M4OSA_NULL (debug only). Invalid formatType
+ * @return  M4ERR_ALLOC: No more memory is available
+ ******************************************************************************
+*/
+M4OSA_ERR M4AIR_create(M4OSA_Context* pContext,M4AIR_InputFormatType inputFormat)
+{
+    M4OSA_ERR err = M4NO_ERROR ;
+    M4AIR_InternalContext* pC = M4OSA_NULL ;
+    /* Check that the address on the context is not NULL */
+    M4ERR_CHECK_NULL_RETURN_VALUE(M4ERR_PARAMETER, pContext) ;
+
+    *pContext = M4OSA_NULL ;
+
+    /* Internal Context creation */
+    pC = (M4AIR_InternalContext*)M4OSA_32bitAlignedMalloc(sizeof(M4AIR_InternalContext), M4AIR, (M4OSA_Char*)"AIR internal context") ;
+    M4ERR_CHECK_NULL_RETURN_VALUE(M4ERR_ALLOC, pC) ;
+
+
+    /* Check if the input format is supported */
+    switch(inputFormat)
+    {
+#ifdef M4AIR_YUV420_FORMAT_SUPPORTED
+        case M4AIR_kYUV420P:
+        break ;
+#endif
+#ifdef M4AIR_YUV420A_FORMAT_SUPPORTED
+        case M4AIR_kYUV420AP:
+        break ;
+#endif
+        default:
+            err = M4ERR_AIR_FORMAT_NOT_SUPPORTED;
+            goto M4AIR_create_cleanup ;
+    }
+
+    /**< Save input format and update state */
+    pC->m_inputFormat = inputFormat;
+    pC->m_state = M4AIR_kCreated;
+
+    /* Return the context to the caller */
+    *pContext = pC ;
+
+    return M4NO_ERROR ;
+
+M4AIR_create_cleanup:
+    /* Error management : we destroy the context if needed */
+    if(M4OSA_NULL != pC)
+    {
+        free(pC) ;
+    }
+
+    *pContext = M4OSA_NULL ;
+
+    return err ;
+}
+
+
+
+/**
+ ******************************************************************************
+ * M4OSA_ERR M4AIR_cleanUp(M4OSA_Context pContext)
+ * @author  Arnaud Collard
+ * @brief       This function destroys an instance of the AIR component
+ * @param   pContext:   (IN) Context identifying the instance to destroy
+ * @return  M4NO_ERROR: there is no error
+ * @return  M4ERR_PARAMETER: pContext is M4OSA_NULL (debug only).
+ * @return  M4ERR_STATE: Internal state is incompatible with this function call.
+******************************************************************************
+*/
+M4OSA_ERR M4AIR_cleanUp(M4OSA_Context pContext)
+{
+    M4AIR_InternalContext* pC = (M4AIR_InternalContext*)pContext ;
+
+    M4ERR_CHECK_NULL_RETURN_VALUE(M4ERR_PARAMETER, pContext) ;
+
+    /**< Check state */
+    if((M4AIR_kCreated != pC->m_state)&&(M4AIR_kConfigured != pC->m_state))
+    {
+        return M4ERR_STATE;
+    }
+    free(pC) ;
+
+    return M4NO_ERROR ;
+
+}
+
+
+/**
+ ******************************************************************************
+ * M4OSA_ERR M4AIR_configure(M4OSA_Context pContext, M4AIR_Params* pParams)
+ * @brief       This function will configure the AIR.
+ * @note    It will set the input and output coordinates and sizes,
+ *          and indicates if we will proceed in stripe or not.
+ *          In case a M4AIR_get in stripe mode was on going, it will cancel this previous processing
+ *          and reset the get process.
+ * @param   pContext:               (IN) Context identifying the instance
+ * @param   pParams->m_bOutputStripe:(IN) Stripe mode.
+ * @param   pParams->m_inputCoord:  (IN) X,Y coordinates of the first valid pixel in input.
+ * @param   pParams->m_inputSize:   (IN) input ROI size.
+ * @param   pParams->m_outputSize:  (IN) output size.
+ * @return  M4NO_ERROR: there is no error
+ * @return  M4ERR_ALLOC: No more memory space to add a new effect.
+ * @return  M4ERR_PARAMETER: pContext is M4OSA_NULL (debug only).
+ * @return  M4ERR_AIR_FORMAT_NOT_SUPPORTED: the requested input format is not supported.
+ ******************************************************************************
+*/
+M4OSA_ERR M4AIR_configure(M4OSA_Context pContext, M4AIR_Params* pParams)
+{
+    M4AIR_InternalContext* pC = (M4AIR_InternalContext*)pContext ;
+    M4OSA_UInt32    i,u32_width_in, u32_width_out, u32_height_in, u32_height_out;
+    M4OSA_UInt32    nb_planes;
+
+    M4ERR_CHECK_NULL_RETURN_VALUE(M4ERR_PARAMETER, pContext) ;
+
+    if(M4AIR_kYUV420AP == pC->m_inputFormat)
+    {
+        nb_planes = 4;
+    }
+    else
+    {
+        nb_planes = 3;
+    }
+
+    /**< Check state */
+    if((M4AIR_kCreated != pC->m_state)&&(M4AIR_kConfigured != pC->m_state))
+    {
+        return M4ERR_STATE;
+    }
+
+    /** Save parameters */
+    pC->m_params = *pParams;
+
+    /* Check for the input&output width and height are even */
+        if( ((pC->m_params.m_inputSize.m_height)&0x1)    ||
+         ((pC->m_params.m_inputSize.m_height)&0x1))
+        {
+         return M4ERR_AIR_ILLEGAL_FRAME_SIZE;
+        }
+
+    if( ((pC->m_params.m_inputSize.m_width)&0x1)    ||
+         ((pC->m_params.m_inputSize.m_width)&0x1))
+        {
+            return M4ERR_AIR_ILLEGAL_FRAME_SIZE;
+        }
+    if(((pC->m_params.m_inputSize.m_width) == (pC->m_params.m_outputSize.m_width))
+        &&((pC->m_params.m_inputSize.m_height) == (pC->m_params.m_outputSize.m_height)))
+    {
+        /**< No resize in this case, we will just copy input in output */
+        pC->m_bOnlyCopy = M4OSA_TRUE;
+    }
+    else
+    {
+        pC->m_bOnlyCopy = M4OSA_FALSE;
+
+        /**< Initialize internal variables used for resize filter */
+        for(i=0;i<nb_planes;i++)
+        {
+
+            u32_width_in = ((i==0)||(i==3))?pC->m_params.m_inputSize.m_width:(pC->m_params.m_inputSize.m_width+1)>>1;
+            u32_height_in = ((i==0)||(i==3))?pC->m_params.m_inputSize.m_height:(pC->m_params.m_inputSize.m_height+1)>>1;
+            u32_width_out = ((i==0)||(i==3))?pC->m_params.m_outputSize.m_width:(pC->m_params.m_outputSize.m_width+1)>>1;
+            u32_height_out = ((i==0)||(i==3))?pC->m_params.m_outputSize.m_height:(pC->m_params.m_outputSize.m_height+1)>>1;
+
+                /* Compute horizontal ratio between src and destination width.*/
+                if (u32_width_out >= u32_width_in)
+                {
+                    pC->u32_x_inc[i]   = ((u32_width_in-1) * 0x10000) / (u32_width_out-1);
+                }
+                else
+                {
+                    pC->u32_x_inc[i]   = (u32_width_in * 0x10000) / (u32_width_out);
+                }
+
+                /* Compute vertical ratio between src and destination height.*/
+                if (u32_height_out >= u32_height_in)
+                {
+                    pC->u32_y_inc[i]   = ((u32_height_in - 1) * 0x10000) / (u32_height_out-1);
+                }
+                else
+                {
+                    pC->u32_y_inc[i] = (u32_height_in * 0x10000) / (u32_height_out);
+                }
+
+                /*
+                Calculate initial accumulator value : u32_y_accum_start.
+                u32_y_accum_start is coded on 15 bits, and represents a value between 0 and 0.5
+                */
+                if (pC->u32_y_inc[i] >= 0x10000)
+                {
+                    /*
+                        Keep the fractionnal part, assimung that integer  part is coded
+                        on the 16 high bits and the fractionnal on the 15 low bits
+                    */
+                    pC->u32_y_accum_start[i] = pC->u32_y_inc[i] & 0xffff;
+
+                    if (!pC->u32_y_accum_start[i])
+                    {
+                        pC->u32_y_accum_start[i] = 0x10000;
+                    }
+
+                    pC->u32_y_accum_start[i] >>= 1;
+                }
+                else
+                {
+                    pC->u32_y_accum_start[i] = 0;
+                }
+                /**< Take into account that Y coordinate can be odd
+                    in this case we have to put a 0.5 offset
+                    for U and V plane as there a 2 times sub-sampled vs Y*/
+                if((pC->m_params.m_inputCoord.m_y&0x1)&&((i==1)||(i==2)))
+                {
+                    pC->u32_y_accum_start[i] += 0x8000;
+                }
+
+                /*
+                    Calculate initial accumulator value : u32_x_accum_start.
+                    u32_x_accum_start is coded on 15 bits, and represents a value between 0 and 0.5
+                */
+
+                if (pC->u32_x_inc[i] >= 0x10000)
+                {
+                    pC->u32_x_accum_start[i] = pC->u32_x_inc[i] & 0xffff;
+
+                    if (!pC->u32_x_accum_start[i])
+                    {
+                        pC->u32_x_accum_start[i] = 0x10000;
+                    }
+
+                    pC->u32_x_accum_start[i] >>= 1;
+                }
+                else
+                {
+                    pC->u32_x_accum_start[i] = 0;
+                }
+                /**< Take into account that X coordinate can be odd
+                    in this case we have to put a 0.5 offset
+                    for U and V plane as there a 2 times sub-sampled vs Y*/
+                if((pC->m_params.m_inputCoord.m_x&0x1)&&((i==1)||(i==2)))
+                {
+                    pC->u32_x_accum_start[i] += 0x8000;
+                }
+        }
+    }
+
+    /**< Reset variable used for stripe mode */
+    pC->m_procRows = 0;
+
+    /**< Initialize var for X/Y processing order according to orientation */
+    pC->m_bFlipX = M4OSA_FALSE;
+    pC->m_bFlipY = M4OSA_FALSE;
+    pC->m_bRevertXY = M4OSA_FALSE;
+    switch(pParams->m_outputOrientation)
+    {
+        case M4COMMON_kOrientationTopLeft:
+            break;
+        case M4COMMON_kOrientationTopRight:
+            pC->m_bFlipX = M4OSA_TRUE;
+            break;
+        case M4COMMON_kOrientationBottomRight:
+            pC->m_bFlipX = M4OSA_TRUE;
+            pC->m_bFlipY = M4OSA_TRUE;
+            break;
+        case M4COMMON_kOrientationBottomLeft:
+            pC->m_bFlipY = M4OSA_TRUE;
+            break;
+        case M4COMMON_kOrientationLeftTop:
+            pC->m_bRevertXY = M4OSA_TRUE;
+            break;
+        case M4COMMON_kOrientationRightTop:
+            pC->m_bRevertXY = M4OSA_TRUE;
+            pC->m_bFlipY = M4OSA_TRUE;
+        break;
+        case M4COMMON_kOrientationRightBottom:
+            pC->m_bRevertXY = M4OSA_TRUE;
+            pC->m_bFlipX = M4OSA_TRUE;
+            pC->m_bFlipY = M4OSA_TRUE;
+            break;
+        case M4COMMON_kOrientationLeftBottom:
+            pC->m_bRevertXY = M4OSA_TRUE;
+            pC->m_bFlipX = M4OSA_TRUE;
+            break;
+        default:
+        return M4ERR_PARAMETER;
+    }
+    /**< Update state */
+    pC->m_state = M4AIR_kConfigured;
+
+    return M4NO_ERROR ;
+}
+
+
+/**
+ ******************************************************************************
+ * M4OSA_ERR M4AIR_get(M4OSA_Context pContext, M4VIFI_ImagePlane* pIn, M4VIFI_ImagePlane* pOut)
+ * @brief   This function will provide the requested resized area of interest according to settings
+ *          provided in M4AIR_configure.
+ * @note    In case the input format type is JPEG, input plane(s)
+ *          in pIn is not used. In normal mode, dimension specified in output plane(s) structure must be the
+ *          same than the one specified in M4AIR_configure. In stripe mode, only the width will be the same,
+ *          height will be taken as the stripe height (typically 16).
+ *          In normal mode, this function is call once to get the full output picture. In stripe mode, it is called
+ *          for each stripe till the whole picture has been retrieved,and  the position of the output stripe in the output picture
+ *          is internally incremented at each step.
+ *          Any call to M4AIR_configure during stripe process will reset this one to the beginning of the output picture.
+ * @param   pContext:   (IN) Context identifying the instance
+ * @param   pIn:            (IN) Plane structure containing input Plane(s).
+ * @param   pOut:       (IN/OUT)  Plane structure containing output Plane(s).
+ * @return  M4NO_ERROR: there is no error
+ * @return  M4ERR_ALLOC: No more memory space to add a new effect.
+ * @return  M4ERR_PARAMETER: pContext is M4OSA_NULL (debug only).
+ ******************************************************************************
+*/
+M4OSA_ERR M4AIR_get(M4OSA_Context pContext, M4VIFI_ImagePlane* pIn, M4VIFI_ImagePlane* pOut)
+{
+    M4AIR_InternalContext* pC = (M4AIR_InternalContext*)pContext ;
+    M4OSA_UInt32 i,j,k,u32_x_frac,u32_y_frac,u32_x_accum,u32_y_accum,u32_shift;
+        M4OSA_UInt8    *pu8_data_in, *pu8_data_in_org, *pu8_data_in_tmp, *pu8_data_out;
+        M4OSA_UInt8    *pu8_src_top;
+        M4OSA_UInt8    *pu8_src_bottom;
+    M4OSA_UInt32    u32_temp_value;
+    M4OSA_Int32 i32_tmp_offset;
+    M4OSA_UInt32    nb_planes;
+
+
+
+    M4ERR_CHECK_NULL_RETURN_VALUE(M4ERR_PARAMETER, pContext) ;
+
+    /**< Check state */
+    if(M4AIR_kConfigured != pC->m_state)
+    {
+        return M4ERR_STATE;
+    }
+
+    if(M4AIR_kYUV420AP == pC->m_inputFormat)
+    {
+        nb_planes = 4;
+    }
+    else
+    {
+        nb_planes = 3;
+    }
+
+    /**< Loop on each Plane */
+    for(i=0;i<nb_planes;i++)
+    {
+
+         /* Set the working pointers at the beginning of the input/output data field */
+
+        u32_shift = ((i==0)||(i==3))?0:1; /**< Depend on Luma or Chroma */
+
+        if((M4OSA_FALSE == pC->m_params.m_bOutputStripe)||((M4OSA_TRUE == pC->m_params.m_bOutputStripe)&&(0 == pC->m_procRows)))
+        {
+            /**< For input, take care about ROI */
+            pu8_data_in     = pIn[i].pac_data + pIn[i].u_topleft + (pC->m_params.m_inputCoord.m_x>>u32_shift)
+                        + (pC->m_params.m_inputCoord.m_y >> u32_shift) * pIn[i].u_stride;
+
+            /** Go at end of line/column in case X/Y scanning is flipped */
+            if(M4OSA_TRUE == pC->m_bFlipX)
+            {
+                pu8_data_in += ((pC->m_params.m_inputSize.m_width)>>u32_shift) -1 ;
+            }
+            if(M4OSA_TRUE == pC->m_bFlipY)
+            {
+                pu8_data_in += ((pC->m_params.m_inputSize.m_height>>u32_shift) -1) * pIn[i].u_stride;
+            }
+
+            /**< Initialize accumulators in case we are using it (bilinear interpolation) */
+            if( M4OSA_FALSE == pC->m_bOnlyCopy)
+            {
+                pC->u32_x_accum[i] = pC->u32_x_accum_start[i];
+                pC->u32_y_accum[i] = pC->u32_y_accum_start[i];
+            }
+
+        }
+        else
+        {
+            /**< In case of stripe mode for other than first stripe, we need to recover input pointer from internal context */
+            pu8_data_in = pC->pu8_data_in[i];
+        }
+
+        /**< In every mode, output data are at the beginning of the output plane */
+        pu8_data_out    = pOut[i].pac_data + pOut[i].u_topleft;
+
+        /**< Initialize input offset applied after each pixel */
+        if(M4OSA_FALSE == pC->m_bFlipY)
+        {
+            i32_tmp_offset = pIn[i].u_stride;
+        }
+        else
+        {
+            i32_tmp_offset = -pIn[i].u_stride;
+        }
+
+        /**< In this case, no bilinear interpolation is needed as input and output dimensions are the same */
+        if( M4OSA_TRUE == pC->m_bOnlyCopy)
+        {
+            /**< No +-90° rotation */
+            if(M4OSA_FALSE == pC->m_bRevertXY)
+            {
+                /**< No flip on X abscissa */
+                if(M4OSA_FALSE == pC->m_bFlipX)
+                {
+                     M4OSA_UInt32 loc_height = pOut[i].u_height;
+                     M4OSA_UInt32 loc_width = pOut[i].u_width;
+                     M4OSA_UInt32 loc_stride = pIn[i].u_stride;
+                    /**< Loop on each row */
+                    for (j=0; j<loc_height; j++)
+                    {
+                        /**< Copy one whole line */
+                        memcpy((void *)pu8_data_out, (void *)pu8_data_in, loc_width);
+
+                        /**< Update pointers */
+                        pu8_data_out += pOut[i].u_stride;
+                        if(M4OSA_FALSE == pC->m_bFlipY)
+                        {
+                            pu8_data_in += loc_stride;
+                        }
+                        else
+                        {
+                            pu8_data_in -= loc_stride;
+                        }
+                    }
+                }
+                else
+                {
+                    /**< Loop on each row */
+                    for(j=0;j<pOut[i].u_height;j++)
+                    {
+                        /**< Loop on each pixel of 1 row */
+                        for(k=0;k<pOut[i].u_width;k++)
+                        {
+                            *pu8_data_out++ = *pu8_data_in--;
+                        }
+
+                        /**< Update pointers */
+                        pu8_data_out += (pOut[i].u_stride - pOut[i].u_width);
+
+                        pu8_data_in += pOut[i].u_width + i32_tmp_offset;
+
+                    }
+                }
+            }
+            /**< Here we have a +-90° rotation */
+            else
+            {
+
+                /**< Loop on each row */
+                for(j=0;j<pOut[i].u_height;j++)
+                {
+                    pu8_data_in_tmp = pu8_data_in;
+
+                    /**< Loop on each pixel of 1 row */
+                    for(k=0;k<pOut[i].u_width;k++)
+                    {
+                        *pu8_data_out++ = *pu8_data_in_tmp;
+
+                        /**< Update input pointer in order to go to next/past line */
+                        pu8_data_in_tmp += i32_tmp_offset;
+                    }
+
+                    /**< Update pointers */
+                    pu8_data_out += (pOut[i].u_stride - pOut[i].u_width);
+                    if(M4OSA_FALSE == pC->m_bFlipX)
+                    {
+                        pu8_data_in ++;
+                    }
+                    else
+                    {
+                        pu8_data_in --;
+                    }
+                }
+            }
+        }
+        /**< Bilinear interpolation */
+        else
+        {
+
+        if(3 != i)  /**< other than alpha plane */
+        {
+            /**No +-90° rotation */
+            if(M4OSA_FALSE == pC->m_bRevertXY)
+            {
+
+                /**< Loop on each row */
+                for(j=0;j<pOut[i].u_height;j++)
+                {
+                    /* Vertical weight factor */
+                    u32_y_frac = (pC->u32_y_accum[i]>>12)&15;
+
+                    /* Reinit horizontal weight factor */
+                    u32_x_accum = pC->u32_x_accum_start[i];
+
+
+
+                        if(M4OSA_TRUE ==  pC->m_bFlipX)
+                        {
+
+                            /**< Loop on each output pixel in a row */
+                            for(k=0;k<pOut[i].u_width;k++)
+                            {
+
+                                u32_x_frac = (u32_x_accum >> 12)&15; /* Fraction of Horizontal weight factor */
+
+                                pu8_src_top = (pu8_data_in - (u32_x_accum >> 16)) -1 ;
+
+                                pu8_src_bottom = pu8_src_top + i32_tmp_offset;
+
+                                /* Weighted combination */
+                                u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[1]*(16-u32_x_frac) +
+                                                                 pu8_src_top[0]*u32_x_frac)*(16-u32_y_frac) +
+                                                                (pu8_src_bottom[1]*(16-u32_x_frac) +
+                                                                 pu8_src_bottom[0]*u32_x_frac)*u32_y_frac )>>8);
+
+                                *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;
+
+                                /* Update horizontal accumulator */
+                                u32_x_accum += pC->u32_x_inc[i];
+                            }
+                        }
+
+                        else
+                        {
+                            /**< Loop on each output pixel in a row */
+                            for(k=0;k<pOut[i].u_width;k++)
+                            {
+                                u32_x_frac = (u32_x_accum >> 12)&15; /* Fraction of Horizontal weight factor */
+
+                                pu8_src_top = pu8_data_in + (u32_x_accum >> 16);
+
+                                pu8_src_bottom = pu8_src_top + i32_tmp_offset;
+
+                                /* Weighted combination */
+                                u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) +
+                                                                 pu8_src_top[1]*u32_x_frac)*(16-u32_y_frac) +
+                                                                (pu8_src_bottom[0]*(16-u32_x_frac) +
+                                                                 pu8_src_bottom[1]*u32_x_frac)*u32_y_frac )>>8);
+
+                                    *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;
+
+                                /* Update horizontal accumulator */
+                                u32_x_accum += pC->u32_x_inc[i];
+                            }
+
+                        }
+
+                    pu8_data_out += pOut[i].u_stride - pOut[i].u_width;
+
+                    /* Update vertical accumulator */
+                    pC->u32_y_accum[i] += pC->u32_y_inc[i];
+                    if (pC->u32_y_accum[i]>>16)
+                    {
+                        pu8_data_in = pu8_data_in + (pC->u32_y_accum[i] >> 16) * i32_tmp_offset;
+                        pC->u32_y_accum[i] &= 0xffff;
+                    }
+                }
+        }
+            /** +-90° rotation */
+            else
+            {
+                pu8_data_in_org = pu8_data_in;
+
+                /**< Loop on each output row */
+                for(j=0;j<pOut[i].u_height;j++)
+                {
+                    /* horizontal weight factor */
+                    u32_x_frac = (pC->u32_x_accum[i]>>12)&15;
+
+                    /* Reinit accumulator */
+                    u32_y_accum = pC->u32_y_accum_start[i];
+
+                    if(M4OSA_TRUE ==  pC->m_bFlipX)
+                    {
+
+                        /**< Loop on each output pixel in a row */
+                        for(k=0;k<pOut[i].u_width;k++)
+                        {
+
+                            u32_y_frac = (u32_y_accum >> 12)&15; /* Vertical weight factor */
+
+
+                            pu8_src_top = (pu8_data_in - (pC->u32_x_accum[i] >> 16)) - 1;
+
+                            pu8_src_bottom = pu8_src_top + i32_tmp_offset;
+
+                            /* Weighted combination */
+                            u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[1]*(16-u32_x_frac) +
+                                                                 pu8_src_top[0]*u32_x_frac)*(16-u32_y_frac) +
+                                                                (pu8_src_bottom[1]*(16-u32_x_frac) +
+                                                                 pu8_src_bottom[0]*u32_x_frac)*u32_y_frac )>>8);
+
+                            *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;
+
+                            /* Update vertical accumulator */
+                            u32_y_accum += pC->u32_y_inc[i];
+                            if (u32_y_accum>>16)
+                            {
+                                pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * i32_tmp_offset;
+                                u32_y_accum &= 0xffff;
+                            }
+
+                        }
+                    }
+                    else
+                    {
+                        /**< Loop on each output pixel in a row */
+                        for(k=0;k<pOut[i].u_width;k++)
+                        {
+
+                            u32_y_frac = (u32_y_accum >> 12)&15; /* Vertical weight factor */
+
+                            pu8_src_top = pu8_data_in + (pC->u32_x_accum[i] >> 16);
+
+                            pu8_src_bottom = pu8_src_top + i32_tmp_offset;
+
+                            /* Weighted combination */
+                            u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) +
+                                                                 pu8_src_top[1]*u32_x_frac)*(16-u32_y_frac) +
+                                                                (pu8_src_bottom[0]*(16-u32_x_frac) +
+                                                                 pu8_src_bottom[1]*u32_x_frac)*u32_y_frac )>>8);
+
+                            *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;
+
+                            /* Update vertical accumulator */
+                            u32_y_accum += pC->u32_y_inc[i];
+                            if (u32_y_accum>>16)
+                            {
+                                pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * i32_tmp_offset;
+                                u32_y_accum &= 0xffff;
+                            }
+                        }
+                    }
+                    pu8_data_out += pOut[i].u_stride - pOut[i].u_width;
+
+                    /* Update horizontal accumulator */
+                    pC->u32_x_accum[i] += pC->u32_x_inc[i];
+
+                    pu8_data_in = pu8_data_in_org;
+                }
+
+            }
+            }/** 3 != i */
+            else
+            {
+            /**No +-90° rotation */
+            if(M4OSA_FALSE == pC->m_bRevertXY)
+            {
+
+                /**< Loop on each row */
+                for(j=0;j<pOut[i].u_height;j++)
+                {
+                    /* Vertical weight factor */
+                    u32_y_frac = (pC->u32_y_accum[i]>>12)&15;
+
+                    /* Reinit horizontal weight factor */
+                    u32_x_accum = pC->u32_x_accum_start[i];
+
+
+
+                        if(M4OSA_TRUE ==  pC->m_bFlipX)
+                        {
+
+                            /**< Loop on each output pixel in a row */
+                            for(k=0;k<pOut[i].u_width;k++)
+                            {
+
+                                u32_x_frac = (u32_x_accum >> 12)&15; /* Fraction of Horizontal weight factor */
+
+                                pu8_src_top = (pu8_data_in - (u32_x_accum >> 16)) -1 ;
+
+                                pu8_src_bottom = pu8_src_top + i32_tmp_offset;
+
+                                /* Weighted combination */
+                                u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[1]*(16-u32_x_frac) +
+                                                                 pu8_src_top[0]*u32_x_frac)*(16-u32_y_frac) +
+                                                                (pu8_src_bottom[1]*(16-u32_x_frac) +
+                                                                 pu8_src_bottom[0]*u32_x_frac)*u32_y_frac )>>8);
+
+                                u32_temp_value= (u32_temp_value >> 7)*0xff;
+
+                                *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;
+
+                                /* Update horizontal accumulator */
+                                u32_x_accum += pC->u32_x_inc[i];
+                            }
+                        }
+
+                        else
+                        {
+                            /**< Loop on each output pixel in a row */
+                            for(k=0;k<pOut[i].u_width;k++)
+                            {
+                                u32_x_frac = (u32_x_accum >> 12)&15; /* Fraction of Horizontal weight factor */
+
+                                pu8_src_top = pu8_data_in + (u32_x_accum >> 16);
+
+                                pu8_src_bottom = pu8_src_top + i32_tmp_offset;
+
+                                /* Weighted combination */
+                                u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) +
+                                                                 pu8_src_top[1]*u32_x_frac)*(16-u32_y_frac) +
+                                                                (pu8_src_bottom[0]*(16-u32_x_frac) +
+                                                                 pu8_src_bottom[1]*u32_x_frac)*u32_y_frac )>>8);
+
+                                u32_temp_value= (u32_temp_value >> 7)*0xff;
+
+                                *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;
+
+                                /* Update horizontal accumulator */
+                                u32_x_accum += pC->u32_x_inc[i];
+                            }
+
+                        }
+
+                    pu8_data_out += pOut[i].u_stride - pOut[i].u_width;
+
+                    /* Update vertical accumulator */
+                    pC->u32_y_accum[i] += pC->u32_y_inc[i];
+                    if (pC->u32_y_accum[i]>>16)
+                    {
+                        pu8_data_in = pu8_data_in + (pC->u32_y_accum[i] >> 16) * i32_tmp_offset;
+                        pC->u32_y_accum[i] &= 0xffff;
+                    }
+                }
+
+            } /**< M4OSA_FALSE == pC->m_bRevertXY */
+            /** +-90° rotation */
+            else
+            {
+                pu8_data_in_org = pu8_data_in;
+
+                /**< Loop on each output row */
+                for(j=0;j<pOut[i].u_height;j++)
+                {
+                    /* horizontal weight factor */
+                    u32_x_frac = (pC->u32_x_accum[i]>>12)&15;
+
+                    /* Reinit accumulator */
+                    u32_y_accum = pC->u32_y_accum_start[i];
+
+                    if(M4OSA_TRUE ==  pC->m_bFlipX)
+                    {
+
+                        /**< Loop on each output pixel in a row */
+                        for(k=0;k<pOut[i].u_width;k++)
+                        {
+
+                            u32_y_frac = (u32_y_accum >> 12)&15; /* Vertical weight factor */
+
+
+                            pu8_src_top = (pu8_data_in - (pC->u32_x_accum[i] >> 16)) - 1;
+
+                            pu8_src_bottom = pu8_src_top + i32_tmp_offset;
+
+                            /* Weighted combination */
+                            u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[1]*(16-u32_x_frac) +
+                                                                 pu8_src_top[0]*u32_x_frac)*(16-u32_y_frac) +
+                                                                (pu8_src_bottom[1]*(16-u32_x_frac) +
+                                                                 pu8_src_bottom[0]*u32_x_frac)*u32_y_frac )>>8);
+
+                            u32_temp_value= (u32_temp_value >> 7)*0xff;
+
+                            *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;
+
+                            /* Update vertical accumulator */
+                            u32_y_accum += pC->u32_y_inc[i];
+                            if (u32_y_accum>>16)
+                            {
+                                pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * i32_tmp_offset;
+                                u32_y_accum &= 0xffff;
+                            }
+
+                        }
+                    }
+                    else
+                    {
+                        /**< Loop on each output pixel in a row */
+                        for(k=0;k<pOut[i].u_width;k++)
+                        {
+
+                            u32_y_frac = (u32_y_accum >> 12)&15; /* Vertical weight factor */
+
+                            pu8_src_top = pu8_data_in + (pC->u32_x_accum[i] >> 16);
+
+                            pu8_src_bottom = pu8_src_top + i32_tmp_offset;
+
+                            /* Weighted combination */
+                            u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) +
+                                                                 pu8_src_top[1]*u32_x_frac)*(16-u32_y_frac) +
+                                                                (pu8_src_bottom[0]*(16-u32_x_frac) +
+                                                                 pu8_src_bottom[1]*u32_x_frac)*u32_y_frac )>>8);
+
+                            u32_temp_value= (u32_temp_value >> 7)*0xff;
+
+                            *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;
+
+                            /* Update vertical accumulator */
+                            u32_y_accum += pC->u32_y_inc[i];
+                            if (u32_y_accum>>16)
+                            {
+                                pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * i32_tmp_offset;
+                                u32_y_accum &= 0xffff;
+                            }
+                        }
+                    }
+                    pu8_data_out += pOut[i].u_stride - pOut[i].u_width;
+
+                    /* Update horizontal accumulator */
+                    pC->u32_x_accum[i] += pC->u32_x_inc[i];
+
+                    pu8_data_in = pu8_data_in_org;
+
+                }
+                } /**< M4OSA_TRUE == pC->m_bRevertXY */
+        }/** 3 == i */
+            }
+        /**< In case of stripe mode, save current input pointer */
+        if(M4OSA_TRUE == pC->m_params.m_bOutputStripe)
+        {
+            pC->pu8_data_in[i] = pu8_data_in;
+        }
+    }
+
+    /**< Update number of processed rows, reset it if we have finished with the whole processing */
+    pC->m_procRows += pOut[0].u_height;
+    if(M4OSA_FALSE == pC->m_bRevertXY)
+    {
+        if(pC->m_params.m_outputSize.m_height <= pC->m_procRows)    pC->m_procRows = 0;
+    }
+    else
+    {
+        if(pC->m_params.m_outputSize.m_width <= pC->m_procRows) pC->m_procRows = 0;
+    }
+
+    return M4NO_ERROR ;
+
+}
+/*+ Handle the image files here */
+
+/**
+ ******************************************************************************
+ * M4OSA_ERR LvGetImageThumbNail(M4OSA_UChar *fileName, M4OSA_Void **pBuffer)
+ * @brief   This function gives YUV420 buffer of a given image file (in argb888 format)
+ * @Note: The caller of the function is responsible to free the yuv buffer allocated
+ * @param   fileName:       (IN) Path to the filename of the image argb data
+ * @param   height:     (IN) Height of the image
+ * @param     width:             (OUT) pBuffer pointer to the address where the yuv data address needs to be returned.
+ * @return  M4NO_ERROR: there is no error
+ * @return  M4ERR_ALLOC: No more memory space to add a new effect.
+ * @return  M4ERR_FILE_NOT_FOUND: if the file passed does not exists.
+ ******************************************************************************
+*/
+M4OSA_ERR LvGetImageThumbNail(const char *fileName, M4OSA_UInt32 height, M4OSA_UInt32 width, M4OSA_Void **pBuffer) {
+
+    M4VIFI_ImagePlane rgbPlane, *yuvPlane;
+    M4OSA_UInt32 frameSize_argb = (width * height * 4); // argb data
+    M4OSA_Context lImageFileFp  = M4OSA_NULL;
+    M4OSA_ERR err = M4NO_ERROR;
+
+    M4OSA_UInt8 *pTmpData = (M4OSA_UInt8*) M4OSA_32bitAlignedMalloc(frameSize_argb, M4VS, (M4OSA_Char*)"Image argb data");
+    if(pTmpData == M4OSA_NULL) {
+        LOGE("Failed to allocate memory for Image clip");
+        return M4ERR_ALLOC;
+    }
+
+       /** Read the argb data from the passed file. */
+    M4OSA_ERR lerr = M4OSA_fileReadOpen(&lImageFileFp, (M4OSA_Void *) fileName, M4OSA_kFileRead);
+
+    if((lerr != M4NO_ERROR) || (lImageFileFp == M4OSA_NULL))
+    {
+        LOGE("LVPreviewController: Can not open the file ");
+        free(pTmpData);
+        return M4ERR_FILE_NOT_FOUND;
+    }
+    lerr = M4OSA_fileReadData(lImageFileFp, (M4OSA_MemAddr8)pTmpData, &frameSize_argb);
+    if(lerr != M4NO_ERROR)
+    {
+        LOGE("LVPreviewController: can not read the data ");
+        M4OSA_fileReadClose(lImageFileFp);
+        free(pTmpData);
+        return lerr;
+    }
+    M4OSA_fileReadClose(lImageFileFp);
+
+    M4OSA_UInt32 frameSize = (width * height * 3); //Size of YUV420 data.
+    rgbPlane.pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize, M4VS, (M4OSA_Char*)"Image clip RGB888 data");
+    if(rgbPlane.pac_data == M4OSA_NULL)
+    {
+        LOGE("Failed to allocate memory for Image clip");
+        free(pTmpData);
+        return M4ERR_ALLOC;
+    }
+
+    /** Remove the alpha channel */
+    for (M4OSA_UInt32 i=0, j = 0; i < frameSize_argb; i++) {
+        if ((i % 4) == 0) continue;
+        rgbPlane.pac_data[j] = pTmpData[i];
+        j++;
+    }
+    free(pTmpData);
+
+#ifdef FILE_DUMP
+    FILE *fp = fopen("/sdcard/Input/test_rgb.raw", "wb");
+    if(fp == NULL)
+        LOGE("Errors file can not be created");
+    else {
+        fwrite(rgbPlane.pac_data, frameSize, 1, fp);
+        fclose(fp);
+    }
+#endif
+        rgbPlane.u_height = height;
+        rgbPlane.u_width = width;
+        rgbPlane.u_stride = width*3;
+        rgbPlane.u_topleft = 0;
+
+        yuvPlane = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(3*sizeof(M4VIFI_ImagePlane),
+                M4VS, (M4OSA_Char*)"M4xVSS_internalConvertRGBtoYUV: Output plane YUV");
+        yuvPlane[0].u_height = height;
+        yuvPlane[0].u_width = width;
+        yuvPlane[0].u_stride = width;
+        yuvPlane[0].u_topleft = 0;
+        yuvPlane[0].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(yuvPlane[0].u_height * yuvPlane[0].u_width * 1.5, M4VS, (M4OSA_Char*)"imageClip YUV data");
+
+        yuvPlane[1].u_height = yuvPlane[0].u_height >>1;
+        yuvPlane[1].u_width = yuvPlane[0].u_width >> 1;
+        yuvPlane[1].u_stride = yuvPlane[1].u_width;
+        yuvPlane[1].u_topleft = 0;
+        yuvPlane[1].pac_data = (M4VIFI_UInt8*)(yuvPlane[0].pac_data + yuvPlane[0].u_height * yuvPlane[0].u_width);
+
+        yuvPlane[2].u_height = yuvPlane[0].u_height >>1;
+        yuvPlane[2].u_width = yuvPlane[0].u_width >> 1;
+        yuvPlane[2].u_stride = yuvPlane[2].u_width;
+        yuvPlane[2].u_topleft = 0;
+        yuvPlane[2].pac_data = (M4VIFI_UInt8*)(yuvPlane[1].pac_data + yuvPlane[1].u_height * yuvPlane[1].u_width);
+
+
+        err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane, yuvPlane);
+        //err = M4VIFI_BGR888toYUV420(M4OSA_NULL, &rgbPlane, yuvPlane);
+        if(err != M4NO_ERROR)
+        {
+            LOGE("error when converting from RGB to YUV: 0x%x\n", (unsigned int)err);
+        }
+        free(rgbPlane.pac_data);
+
+        //LOGE("RGB to YUV done");
+#ifdef FILE_DUMP
+        FILE *fp1 = fopen("/sdcard/Input/test_yuv.raw", "wb");
+        if(fp1 == NULL)
+            LOGE("Errors file can not be created");
+        else {
+            fwrite(yuvPlane[0].pac_data, yuvPlane[0].u_height * yuvPlane[0].u_width * 1.5, 1, fp1);
+            fclose(fp1);
+        }
+#endif
+        *pBuffer = yuvPlane[0].pac_data;
+        free(yuvPlane);
+        return M4NO_ERROR;
+
+}
+M4OSA_Void prepareYUV420ImagePlane(M4VIFI_ImagePlane *plane,
+    M4OSA_UInt32 width, M4OSA_UInt32 height, M4VIFI_UInt8 *buffer,
+    M4OSA_UInt32 reportedWidth, M4OSA_UInt32 reportedHeight) {
+
+    //Y plane
+    plane[0].u_width = width;
+    plane[0].u_height = height;
+    plane[0].u_stride = reportedWidth;
+    plane[0].u_topleft = 0;
+    plane[0].pac_data = buffer;
+
+    // U plane
+    plane[1].u_width = width/2;
+    plane[1].u_height = height/2;
+    plane[1].u_stride = reportedWidth >> 1;
+    plane[1].u_topleft = 0;
+    plane[1].pac_data = buffer+(reportedWidth*reportedHeight);
+
+    // V Plane
+    plane[2].u_width = width/2;
+    plane[2].u_height = height/2;
+    plane[2].u_stride = reportedWidth >> 1;
+    plane[2].u_topleft = 0;
+    plane[2].pac_data = plane[1].pac_data + ((reportedWidth/2)*(reportedHeight/2));
+}
+
+M4OSA_Void prepareYV12ImagePlane(M4VIFI_ImagePlane *plane,
+    M4OSA_UInt32 width, M4OSA_UInt32 height, M4OSA_UInt32 stride,
+    M4VIFI_UInt8 *buffer) {
+
+    //Y plane
+    plane[0].u_width = width;
+    plane[0].u_height = height;
+    plane[0].u_stride = stride;
+    plane[0].u_topleft = 0;
+    plane[0].pac_data = buffer;
+
+    // U plane
+    plane[1].u_width = width/2;
+    plane[1].u_height = height/2;
+    plane[1].u_stride = android::PreviewRenderer::ALIGN(plane[0].u_stride/2, 16);
+    plane[1].u_topleft = 0;
+    plane[1].pac_data = (buffer
+                + plane[0].u_height * plane[0].u_stride
+                + (plane[0].u_height/2) * android::PreviewRenderer::ALIGN((
+                 plane[0].u_stride / 2), 16));
+
+    // V Plane
+    plane[2].u_width = width/2;
+    plane[2].u_height = height/2;
+    plane[2].u_stride = android::PreviewRenderer::ALIGN(plane[0].u_stride/2, 16);
+    plane[2].u_topleft = 0;
+    plane[2].pac_data = (buffer +
+     plane[0].u_height * android::PreviewRenderer::ALIGN(plane[0].u_stride, 16));
+
+
+}
+
+M4OSA_Void swapImagePlanes(
+    M4VIFI_ImagePlane *planeIn, M4VIFI_ImagePlane *planeOut,
+    M4VIFI_UInt8 *buffer1, M4VIFI_UInt8 *buffer2) {
+
+    planeIn[0].u_height = planeOut[0].u_height;
+    planeIn[0].u_width = planeOut[0].u_width;
+    planeIn[0].u_stride = planeOut[0].u_stride;
+    planeIn[0].u_topleft = planeOut[0].u_topleft;
+    planeIn[0].pac_data = planeOut[0].pac_data;
+
+    /**
+     * U plane */
+    planeIn[1].u_width = planeOut[1].u_width;
+    planeIn[1].u_height = planeOut[1].u_height;
+    planeIn[1].u_stride = planeOut[1].u_stride;
+    planeIn[1].u_topleft = planeOut[1].u_topleft;
+    planeIn[1].pac_data = planeOut[1].pac_data;
+    /**
+     * V Plane */
+    planeIn[2].u_width = planeOut[2].u_width;
+    planeIn[2].u_height = planeOut[2].u_height;
+    planeIn[2].u_stride = planeOut[2].u_stride;
+    planeIn[2].u_topleft = planeOut[2].u_topleft;
+    planeIn[2].pac_data = planeOut[2].pac_data;
+
+    if(planeOut[0].pac_data == (M4VIFI_UInt8*)buffer1)
+    {
+        planeOut[0].pac_data = (M4VIFI_UInt8*)buffer2;
+        planeOut[1].pac_data = (M4VIFI_UInt8*)(buffer2 +
+         planeOut[0].u_width*planeOut[0].u_height);
+
+        planeOut[2].pac_data = (M4VIFI_UInt8*)(buffer2 +
+         planeOut[0].u_width*planeOut[0].u_height +
+         planeOut[1].u_width*planeOut[1].u_height);
+    }
+    else
+    {
+        planeOut[0].pac_data = (M4VIFI_UInt8*)buffer1;
+        planeOut[1].pac_data = (M4VIFI_UInt8*)(buffer1 +
+         planeOut[0].u_width*planeOut[0].u_height);
+
+        planeOut[2].pac_data = (M4VIFI_UInt8*)(buffer1 +
+         planeOut[0].u_width*planeOut[0].u_height +
+         planeOut[1].u_width*planeOut[1].u_height);
+    }
+
+}
+
+M4OSA_Void computePercentageDone(
+    M4OSA_UInt32 ctsMs, M4OSA_UInt32 effectStartTimeMs,
+    M4OSA_UInt32 effectDuration, M4OSA_Double *percentageDone) {
+
+    M4OSA_Double videoEffectTime =0;
+
+    // Compute how far from the beginning of the effect we are, in clip-base time.
+    videoEffectTime =
+     (M4OSA_Int32)(ctsMs+ 0.5) - effectStartTimeMs;
+
+    // To calculate %, substract timeIncrement
+    // because effect should finish on the last frame
+    // which is from CTS = (eof-timeIncrement) till CTS = eof
+    *percentageDone =
+     videoEffectTime / ((M4OSA_Float)effectDuration);
+
+    if(*percentageDone < 0.0) *percentageDone = 0.0;
+    if(*percentageDone > 1.0) *percentageDone = 1.0;
+
+}
+
+
+M4OSA_Void computeProgressForVideoEffect(
+    M4OSA_UInt32 ctsMs, M4OSA_UInt32 effectStartTimeMs,
+    M4OSA_UInt32 effectDuration, M4VSS3GPP_ExternalProgress* extProgress) {
+
+    M4OSA_Double percentageDone =0;
+
+    computePercentageDone(ctsMs, effectStartTimeMs, effectDuration, &percentageDone);
+
+    extProgress->uiProgress = (M4OSA_UInt32)( percentageDone * 1000 );
+    extProgress->uiOutputTime = (M4OSA_UInt32)(ctsMs + 0.5);
+    extProgress->uiClipTime = extProgress->uiOutputTime;
+    extProgress->bIsLast = M4OSA_FALSE;
+}
+
+M4OSA_ERR prepareFramingStructure(
+    M4xVSS_FramingStruct* framingCtx,
+    M4VSS3GPP_EffectSettings* effectsSettings, M4OSA_UInt32 index,
+    M4VIFI_UInt8* overlayRGB, M4VIFI_UInt8* overlayYUV) {
+
+    M4OSA_ERR err = M4NO_ERROR;
+
+    // Force input RGB buffer to even size to avoid errors in YUV conversion
+    framingCtx->FramingRgb = effectsSettings[index].xVSS.pFramingBuffer;
+    framingCtx->FramingRgb->u_width = framingCtx->FramingRgb->u_width & ~1;
+    framingCtx->FramingRgb->u_height = framingCtx->FramingRgb->u_height & ~1;
+    framingCtx->FramingYuv = NULL;
+
+    framingCtx->duration = effectsSettings[index].uiDuration;
+    framingCtx->topleft_x = effectsSettings[index].xVSS.topleft_x;
+    framingCtx->topleft_y = effectsSettings[index].xVSS.topleft_y;
+    framingCtx->pCurrent = framingCtx;
+    framingCtx->pNext = framingCtx;
+    framingCtx->previousClipTime = -1;
+
+    framingCtx->alphaBlendingStruct =
+     (M4xVSS_internalEffectsAlphaBlending*)M4OSA_32bitAlignedMalloc(
+      sizeof(M4xVSS_internalEffectsAlphaBlending), M4VS,
+      (M4OSA_Char*)"alpha blending struct");
+
+    framingCtx->alphaBlendingStruct->m_fadeInTime =
+     effectsSettings[index].xVSS.uialphaBlendingFadeInTime;
+
+    framingCtx->alphaBlendingStruct->m_fadeOutTime =
+     effectsSettings[index].xVSS.uialphaBlendingFadeOutTime;
+
+    framingCtx->alphaBlendingStruct->m_end =
+     effectsSettings[index].xVSS.uialphaBlendingEnd;
+
+    framingCtx->alphaBlendingStruct->m_middle =
+     effectsSettings[index].xVSS.uialphaBlendingMiddle;
+
+    framingCtx->alphaBlendingStruct->m_start =
+     effectsSettings[index].xVSS.uialphaBlendingStart;
+
+    // If new Overlay buffer, convert from RGB to YUV
+    if((overlayRGB != framingCtx->FramingRgb->pac_data) || (overlayYUV == NULL) ) {
+
+        // If YUV buffer exists, delete it
+        if(overlayYUV != NULL) {
+           free(overlayYUV);
+           overlayYUV = NULL;
+        }
+    if(effectsSettings[index].xVSS.rgbType == M4VSS3GPP_kRGB565) {
+        // Input RGB565 plane is provided,
+        // let's convert it to YUV420, and update framing structure
+        err = M4xVSS_internalConvertRGBtoYUV(framingCtx);
+    }
+    else if(effectsSettings[index].xVSS.rgbType == M4VSS3GPP_kRGB888) {
+        // Input RGB888 plane is provided,
+        // let's convert it to YUV420, and update framing structure
+        err = M4xVSS_internalConvertRGB888toYUV(framingCtx);
+    }
+    else {
+        err = M4ERR_PARAMETER;
+    }
+        overlayYUV = framingCtx->FramingYuv[0].pac_data;
+        overlayRGB = framingCtx->FramingRgb->pac_data;
+
+    }
+    else {
+        LOGV(" YUV buffer reuse");
+        framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(
+            3*sizeof(M4VIFI_ImagePlane), M4VS, (M4OSA_Char*)"YUV");
+
+        if(framingCtx->FramingYuv == M4OSA_NULL) {
+            return M4ERR_ALLOC;
+        }
+
+        framingCtx->FramingYuv[0].u_width = framingCtx->FramingRgb->u_width;
+        framingCtx->FramingYuv[0].u_height = framingCtx->FramingRgb->u_height;
+        framingCtx->FramingYuv[0].u_topleft = 0;
+        framingCtx->FramingYuv[0].u_stride = framingCtx->FramingRgb->u_width;
+        framingCtx->FramingYuv[0].pac_data = (M4VIFI_UInt8*)overlayYUV;
+
+        framingCtx->FramingYuv[1].u_width = (framingCtx->FramingRgb->u_width)>>1;
+        framingCtx->FramingYuv[1].u_height = (framingCtx->FramingRgb->u_height)>>1;
+        framingCtx->FramingYuv[1].u_topleft = 0;
+        framingCtx->FramingYuv[1].u_stride = (framingCtx->FramingRgb->u_width)>>1;
+        framingCtx->FramingYuv[1].pac_data = framingCtx->FramingYuv[0].pac_data +
+            framingCtx->FramingYuv[0].u_width * framingCtx->FramingYuv[0].u_height;
+
+        framingCtx->FramingYuv[2].u_width = (framingCtx->FramingRgb->u_width)>>1;
+        framingCtx->FramingYuv[2].u_height = (framingCtx->FramingRgb->u_height)>>1;
+        framingCtx->FramingYuv[2].u_topleft = 0;
+        framingCtx->FramingYuv[2].u_stride = (framingCtx->FramingRgb->u_width)>>1;
+        framingCtx->FramingYuv[2].pac_data = framingCtx->FramingYuv[1].pac_data +
+            framingCtx->FramingYuv[1].u_width * framingCtx->FramingYuv[1].u_height;
+
+        framingCtx->duration = 0;
+        framingCtx->previousClipTime = -1;
+        framingCtx->previewOffsetClipTime = -1;
+
+    }
+    return err;
+}
+
+M4OSA_ERR applyColorEffect(M4xVSS_VideoEffectType colorEffect,
+    M4VIFI_ImagePlane *planeIn, M4VIFI_ImagePlane *planeOut,
+    M4VIFI_UInt8 *buffer1, M4VIFI_UInt8 *buffer2, M4OSA_UInt16 rgbColorData) {
+
+    M4xVSS_ColorStruct colorContext;
+    M4OSA_ERR err = M4NO_ERROR;
+
+    colorContext.colorEffectType = colorEffect;
+    colorContext.rgb16ColorData = rgbColorData;
+
+    err = M4VSS3GPP_externalVideoEffectColor(
+     (M4OSA_Void *)&colorContext, planeIn, planeOut, NULL,
+     colorEffect);
+
+    if(err != M4NO_ERROR) {
+        LOGV("M4VSS3GPP_externalVideoEffectColor(%d) error %d",
+            colorEffect, err);
+
+        if(NULL != buffer1) {
+            free(buffer1);
+            buffer1 = NULL;
+        }
+        if(NULL != buffer2) {
+            free(buffer2);
+            buffer2 = NULL;
+        }
+        return err;
+    }
+
+    // The out plane now becomes the in plane for adding other effects
+    swapImagePlanes(planeIn, planeOut, buffer1, buffer2);
+
+    return err;
+}
+
+M4OSA_ERR applyLumaEffect(M4VSS3GPP_VideoEffectType videoEffect,
+    M4VIFI_ImagePlane *planeIn, M4VIFI_ImagePlane *planeOut,
+    M4VIFI_UInt8 *buffer1, M4VIFI_UInt8 *buffer2, M4OSA_Int32 lum_factor) {
+
+    M4OSA_ERR err = M4NO_ERROR;
+
+    err = M4VFL_modifyLumaWithScale(
+         (M4ViComImagePlane*)planeIn,(M4ViComImagePlane*)planeOut,
+         lum_factor, NULL);
+
+    if(err != M4NO_ERROR) {
+        LOGE("M4VFL_modifyLumaWithScale(%d) error %d", videoEffect, (int)err);
+
+        if(NULL != buffer1) {
+            free(buffer1);
+            buffer1= NULL;
+        }
+        if(NULL != buffer2) {
+            free(buffer2);
+            buffer2= NULL;
+        }
+        return err;
+    }
+
+    // The out plane now becomes the in plane for adding other effects
+    swapImagePlanes(planeIn, planeOut,(M4VIFI_UInt8 *)buffer1,
+     (M4VIFI_UInt8 *)buffer2);
+
+    return err;
+}
+
+M4OSA_ERR applyCurtainEffect(M4VSS3GPP_VideoEffectType videoEffect,
+    M4VIFI_ImagePlane *planeIn, M4VIFI_ImagePlane *planeOut,
+    M4VIFI_UInt8 *buffer1, M4VIFI_UInt8 *buffer2,
+    M4VFL_CurtainParam* curtainParams) {
+
+    M4OSA_ERR err = M4NO_ERROR;
+
+    // Apply the curtain effect
+    err = M4VFL_applyCurtain( (M4ViComImagePlane*)planeIn,
+          (M4ViComImagePlane*)planeOut, curtainParams, NULL);
+    if(err != M4NO_ERROR) {
+        LOGE("M4VFL_applyCurtain(%d) error %d", videoEffect, (int)err);
+
+        if(NULL != buffer1) {
+            free(buffer1);
+            buffer1= NULL;
+        }
+        if(NULL != buffer2) {
+            free(buffer2);
+            buffer2 = NULL;
+        }
+        return err;
+    }
+
+    // The out plane now becomes the in plane for adding other effects
+    swapImagePlanes(planeIn, planeOut,(M4VIFI_UInt8 *)buffer1,
+    (M4VIFI_UInt8 *)buffer2);
+
+    return err;
+}
+
+M4OSA_ERR applyEffectsAndRenderingMode(vePostProcessParams *params,
+    M4OSA_UInt32 reportedWidth, M4OSA_UInt32 reportedHeight) {
+
+    M4OSA_ERR err = M4NO_ERROR;
+    M4VIFI_ImagePlane planeIn[3], planeOut[3];
+    M4VIFI_UInt8 *finalOutputBuffer = NULL, *tempOutputBuffer= NULL;
+    M4OSA_Double percentageDone =0;
+    M4OSA_Int32 lum_factor;
+    M4VFL_CurtainParam curtainParams;
+    M4VSS3GPP_ExternalProgress extProgress;
+    M4xVSS_FiftiesStruct fiftiesCtx;
+    M4OSA_UInt32 frameSize = 0, i=0;
+
+    frameSize = (params->videoWidth*params->videoHeight*3) >> 1;
+
+    finalOutputBuffer = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize, M4VS,
+     (M4OSA_Char*)("lvpp finalOutputBuffer"));
+
+    if(finalOutputBuffer == NULL) {
+        LOGE("applyEffectsAndRenderingMode: malloc error");
+        return M4ERR_ALLOC;
+    }
+
+    // allocate the tempOutputBuffer
+    tempOutputBuffer = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(
+     ((params->videoHeight*params->videoWidth*3)>>1), M4VS, (M4OSA_Char*)("lvpp colorBuffer"));
+
+    if(tempOutputBuffer == NULL) {
+        LOGE("applyEffectsAndRenderingMode: malloc error tempOutputBuffer");
+        if(NULL != finalOutputBuffer) {
+            free(finalOutputBuffer);
+            finalOutputBuffer = NULL;
+        }
+        return M4ERR_ALLOC;
+    }
+
+    // Initialize the In plane
+    prepareYUV420ImagePlane(planeIn, params->videoWidth, params->videoHeight,
+       params->vidBuffer, reportedWidth, reportedHeight);
+
+    // Initialize the Out plane
+    prepareYUV420ImagePlane(planeOut, params->videoWidth, params->videoHeight,
+       (M4VIFI_UInt8 *)tempOutputBuffer, params->videoWidth, params->videoHeight);
+
+    // The planeIn contains the YUV420 input data to postprocessing node
+    // and planeOut will contain the YUV420 data with effect
+    // In each successive if condition, apply filter to successive
+    // output YUV frame so that concurrent effects are both applied
+
+    if(params->currentVideoEffect & VIDEO_EFFECT_BLACKANDWHITE) {
+        err = applyColorEffect(M4xVSS_kVideoEffectType_BlackAndWhite,
+              planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,
+              (M4VIFI_UInt8 *)tempOutputBuffer, 0);
+        if(err != M4NO_ERROR) {
+            return err;
+        }
+    }
+
+    if(params->currentVideoEffect & VIDEO_EFFECT_PINK) {
+        err = applyColorEffect(M4xVSS_kVideoEffectType_Pink,
+              planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,
+              (M4VIFI_UInt8 *)tempOutputBuffer, 0);
+        if(err != M4NO_ERROR) {
+            return err;
+        }
+    }
+
+    if(params->currentVideoEffect & VIDEO_EFFECT_GREEN) {
+        err = applyColorEffect(M4xVSS_kVideoEffectType_Green,
+              planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,
+              (M4VIFI_UInt8 *)tempOutputBuffer, 0);
+        if(err != M4NO_ERROR) {
+            return err;
+        }
+    }
+
+    if(params->currentVideoEffect & VIDEO_EFFECT_SEPIA) {
+        err = applyColorEffect(M4xVSS_kVideoEffectType_Sepia,
+              planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,
+              (M4VIFI_UInt8 *)tempOutputBuffer, 0);
+        if(err != M4NO_ERROR) {
+            return err;
+        }
+    }
+
+    if(params->currentVideoEffect & VIDEO_EFFECT_NEGATIVE) {
+        err = applyColorEffect(M4xVSS_kVideoEffectType_Negative,
+              planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,
+              (M4VIFI_UInt8 *)tempOutputBuffer, 0);
+        if(err != M4NO_ERROR) {
+            return err;
+        }
+    }
+
+    if(params->currentVideoEffect & VIDEO_EFFECT_GRADIENT) {
+        // find the effect in effectSettings array
+        for(i=0;i<params->numberEffects;i++) {
+            if(params->effectsSettings[i].VideoEffectType ==
+             (M4VSS3GPP_VideoEffectType)M4xVSS_kVideoEffectType_Gradient)
+                break;
+        }
+        err = applyColorEffect(M4xVSS_kVideoEffectType_Gradient,
+              planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,
+              (M4VIFI_UInt8 *)tempOutputBuffer,
+              params->effectsSettings[i].xVSS.uiRgb16InputColor);
+        if(err != M4NO_ERROR) {
+            return err;
+        }
+    }
+
+    if(params->currentVideoEffect & VIDEO_EFFECT_COLOR_RGB16) {
+        // Find the effect in effectSettings array
+        for(i=0;i<params->numberEffects;i++) {
+            if(params->effectsSettings[i].VideoEffectType ==
+             (M4VSS3GPP_VideoEffectType)M4xVSS_kVideoEffectType_ColorRGB16)
+                break;
+        }
+        err = applyColorEffect(M4xVSS_kVideoEffectType_ColorRGB16,
+              planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,
+              (M4VIFI_UInt8 *)tempOutputBuffer,
+              params->effectsSettings[i].xVSS.uiRgb16InputColor);
+        if(err != M4NO_ERROR) {
+            return err;
+        }
+    }
+
+    if(params->currentVideoEffect & VIDEO_EFFECT_FIFTIES) {
+        // Find the effect in effectSettings array
+        for(i=0;i<params->numberEffects;i++) {
+            if(params->effectsSettings[i].VideoEffectType ==
+             (M4VSS3GPP_VideoEffectType)M4xVSS_kVideoEffectType_Fifties)
+                break;
+        }
+        if(i < params->numberEffects) {
+            computeProgressForVideoEffect(params->timeMs,
+             params->effectsSettings[i].uiStartTime,
+             params->effectsSettings[i].uiDuration, &extProgress);
+
+            if(params->isFiftiesEffectStarted) {
+                fiftiesCtx.previousClipTime = -1;
+            }
+            fiftiesCtx.fiftiesEffectDuration =
+             1000/params->effectsSettings[i].xVSS.uiFiftiesOutFrameRate;
+
+            fiftiesCtx.shiftRandomValue = 0;
+            fiftiesCtx.stripeRandomValue = 0;
+
+            err = M4VSS3GPP_externalVideoEffectFifties(
+             (M4OSA_Void *)&fiftiesCtx, planeIn, planeOut, &extProgress,
+             M4xVSS_kVideoEffectType_Fifties);
+
+            if(err != M4NO_ERROR) {
+                LOGE("M4VSS3GPP_externalVideoEffectFifties error 0x%x", (unsigned int)err);
+
+                if(NULL != finalOutputBuffer) {
+                    free(finalOutputBuffer);
+                    finalOutputBuffer = NULL;
+                }
+                if(NULL != tempOutputBuffer) {
+                    free(tempOutputBuffer);
+                    tempOutputBuffer = NULL;
+                }
+                return err;
+            }
+
+            // The out plane now becomes the in plane for adding other effects
+            swapImagePlanes(planeIn, planeOut,(M4VIFI_UInt8 *)finalOutputBuffer,
+             (M4VIFI_UInt8 *)tempOutputBuffer);
+        }
+    }
+
+    if(params->currentVideoEffect & VIDEO_EFFECT_FRAMING) {
+
+        M4xVSS_FramingStruct framingCtx;
+        // Find the effect in effectSettings array
+        for(i=0;i<params->numberEffects;i++) {
+            if(params->effectsSettings[i].VideoEffectType ==
+             (M4VSS3GPP_VideoEffectType)M4xVSS_kVideoEffectType_Framing) {
+                if((params->effectsSettings[i].uiStartTime <= params->timeMs + params->timeOffset) &&
+                   ((params->effectsSettings[i].uiStartTime+
+                     params->effectsSettings[i].uiDuration) >= params->timeMs + params->timeOffset))
+                {
+                        break;
+                }
+            }
+        }
+        if(i < params->numberEffects) {
+            computeProgressForVideoEffect(params->timeMs,
+             params->effectsSettings[i].uiStartTime,
+             params->effectsSettings[i].uiDuration, &extProgress);
+
+            err = prepareFramingStructure(&framingCtx,
+                  params->effectsSettings, i, params->overlayFrameRGBBuffer,
+                  params->overlayFrameYUVBuffer);
+
+            if(err == M4NO_ERROR) {
+                err = M4VSS3GPP_externalVideoEffectFraming(
+                      (M4OSA_Void *)&framingCtx, planeIn, planeOut, &extProgress,
+                      M4xVSS_kVideoEffectType_Framing);
+            }
+
+            free(framingCtx.alphaBlendingStruct);
+
+            if(framingCtx.FramingYuv != NULL) {
+                free(framingCtx.FramingYuv);
+                framingCtx.FramingYuv = NULL;
+            }
+            //If prepareFramingStructure / M4VSS3GPP_externalVideoEffectFraming
+            // returned error, then return from function
+            if(err != M4NO_ERROR) {
+
+                if(NULL != finalOutputBuffer) {
+                    free(finalOutputBuffer);
+                    finalOutputBuffer = NULL;
+                }
+                if(NULL != tempOutputBuffer) {
+                    free(tempOutputBuffer);
+                    tempOutputBuffer = NULL;
+                }
+                return err;
+            }
+
+            // The out plane now becomes the in plane for adding other effects
+            swapImagePlanes(planeIn, planeOut,(M4VIFI_UInt8 *)finalOutputBuffer,
+             (M4VIFI_UInt8 *)tempOutputBuffer);
+        }
+    }
+
+    if(params->currentVideoEffect & VIDEO_EFFECT_FADEFROMBLACK) {
+        /* find the effect in effectSettings array*/
+        for(i=0;i<params->numberEffects;i++) {
+            if(params->effectsSettings[i].VideoEffectType ==
+             M4VSS3GPP_kVideoEffectType_FadeFromBlack)
+                break;
+        }
+
+        if(i < params->numberEffects) {
+            computePercentageDone(params->timeMs,
+             params->effectsSettings[i].uiStartTime,
+             params->effectsSettings[i].uiDuration, &percentageDone);
+
+            // Compute where we are in the effect (scale is 0->1024)
+            lum_factor = (M4OSA_Int32)( percentageDone * 1024 );
+            // Apply the darkening effect
+            err = applyLumaEffect(M4VSS3GPP_kVideoEffectType_FadeFromBlack,
+                  planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,
+                  (M4VIFI_UInt8 *)tempOutputBuffer, lum_factor);
+            if(err != M4NO_ERROR) {
+                return err;
+            }
+        }
+    }
+
+    if(params->currentVideoEffect & VIDEO_EFFECT_FADETOBLACK) {
+        // Find the effect in effectSettings array
+        for(i=0;i<params->numberEffects;i++) {
+            if(params->effectsSettings[i].VideoEffectType ==
+             M4VSS3GPP_kVideoEffectType_FadeToBlack)
+                break;
+        }
+        if(i < params->numberEffects) {
+            computePercentageDone(params->timeMs,
+             params->effectsSettings[i].uiStartTime,
+             params->effectsSettings[i].uiDuration, &percentageDone);
+
+            // Compute where we are in the effect (scale is 0->1024)
+            lum_factor = (M4OSA_Int32)( (1.0-percentageDone) * 1024 );
+            // Apply the darkening effect
+            err = applyLumaEffect(M4VSS3GPP_kVideoEffectType_FadeToBlack,
+                  planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,
+                  (M4VIFI_UInt8 *)tempOutputBuffer, lum_factor);
+            if(err != M4NO_ERROR) {
+                return err;
+            }
+        }
+    }
+
+    if(params->currentVideoEffect & VIDEO_EFFECT_CURTAINOPEN) {
+        // Find the effect in effectSettings array
+        for(i=0;i<params->numberEffects;i++) {
+            if(params->effectsSettings[i].VideoEffectType ==
+             M4VSS3GPP_kVideoEffectType_CurtainOpening)
+                break;
+        }
+        if(i < params->numberEffects) {
+            computePercentageDone(params->timeMs,
+             params->effectsSettings[i].uiStartTime,
+             params->effectsSettings[i].uiDuration, &percentageDone);
+
+            // Compute where we are in the effect (scale is 0->height).
+            // It is done with floats because tmp x height
+            // can be very large (with long clips).
+            curtainParams.nb_black_lines =
+             (M4OSA_UInt16)((1.0 - percentageDone) * planeIn[0].u_height );
+            // The curtain is hanged on the ceiling
+            curtainParams.top_is_black = 1;
+
+            // Apply the curtain effect
+            err = applyCurtainEffect(M4VSS3GPP_kVideoEffectType_CurtainOpening,
+                  planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,
+                  (M4VIFI_UInt8 *)tempOutputBuffer, &curtainParams);
+            if(err != M4NO_ERROR) {
+                return err;
+            }
+        }
+    }
+
+    if(params->currentVideoEffect & VIDEO_EFFECT_CURTAINCLOSE) {
+        // Find the effect in effectSettings array
+        for(i=0;i<params->numberEffects;i++) {
+            if(params->effectsSettings[i].VideoEffectType ==
+             M4VSS3GPP_kVideoEffectType_CurtainClosing)
+                break;
+        }
+        if(i < params->numberEffects) {
+            computePercentageDone(params->timeMs,
+             params->effectsSettings[i].uiStartTime,
+             params->effectsSettings[i].uiDuration, &percentageDone);
+
+            // Compute where we are in the effect (scale is 0->height).
+            // It is done with floats because
+            // tmp x height can be very large (with long clips).
+            curtainParams.nb_black_lines =
+             (M4OSA_UInt16)(percentageDone * planeIn[0].u_height );
+
+            // The curtain is hanged on the ceiling
+            curtainParams.top_is_black = 1;
+
+            // Apply the curtain effect
+            err = applyCurtainEffect(M4VSS3GPP_kVideoEffectType_CurtainClosing,
+                  planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,
+                  (M4VIFI_UInt8 *)tempOutputBuffer, &curtainParams);
+            if(err != M4NO_ERROR) {
+                return err;
+            }
+        }
+    }
+
+    LOGV("doMediaRendering CALL getBuffer()");
+    // Set the output YUV420 plane to be compatible with YV12 format
+    // W & H even
+    // YVU instead of YUV
+    // align buffers on 32 bits
+
+    // Y plane
+    //in YV12 format, sizes must be even
+    M4OSA_UInt32 yv12PlaneWidth = ((params->outVideoWidth +1)>>1)<<1;
+    M4OSA_UInt32 yv12PlaneHeight = ((params->outVideoHeight+1)>>1)<<1;
+
+    prepareYV12ImagePlane(planeOut, yv12PlaneWidth, yv12PlaneHeight,
+     (M4OSA_UInt32)params->outBufferStride, (M4VIFI_UInt8 *)params->pOutBuffer);
+
+    err = applyRenderingMode(planeIn, planeOut, params->renderingMode);
+
+    if(M4OSA_NULL != finalOutputBuffer) {
+        free(finalOutputBuffer);
+        finalOutputBuffer= M4OSA_NULL;
+    }
+    if(M4OSA_NULL != tempOutputBuffer) {
+        free(tempOutputBuffer);
+        tempOutputBuffer = M4OSA_NULL;
+    }
+    if(err != M4NO_ERROR) {
+        LOGV("doVideoPostProcessing: applyRenderingMode returned err=%d",err);
+        return err;
+    }
+    return M4NO_ERROR;
+}
diff --git a/libvideoeditor/lvpp/VideoEditorTools.h b/libvideoeditor/lvpp/VideoEditorTools.h
index 6d70473..36d286e 100755
--- a/libvideoeditor/lvpp/VideoEditorTools.h
+++ b/libvideoeditor/lvpp/VideoEditorTools.h
@@ -1,147 +1,147 @@
-/*

- * Copyright (C) 2011 The Android Open Source Project

- *

- * Licensed under the Apache License, Version 2.0 (the "License");

- * you may not use this file except in compliance with the License.

- * You may obtain a copy of the License at

- *

- *      http://www.apache.org/licenses/LICENSE-2.0

- *

- * Unless required by applicable law or agreed to in writing, software

- * distributed under the License is distributed on an "AS IS" BASIS,

- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

- * See the License for the specific language governing permissions and

- * limitations under the License.

- */

-

-#ifndef ANDROID_VE_TOOLS_H

-#define ANDROID_VE_TOOLS_H

-

-#include "M4OSA_Types.h"

-#include "M4OSA_Memory.h"

-#include "M4OSA_Debug.h"

-#include "M4VIFI_FiltersAPI.h"

-/* Macro definitions */

-#include "M4VIFI_Defines.h"

-/* Clip table declaration */

-#include "M4VIFI_Clip.h"

-#include "M4VFL_transition.h"

-#include "M4VSS3GPP_API.h"

-#include "M4xVSS_API.h"

-#include "M4xVSS_Internal.h"

-

-#include "M4AIR_API.h"

-#include "PreviewRenderer.h"

-#define MEDIA_RENDERING_INVALID 255

-

-#define TRANSPARENT_COLOR 0x7E0

-#define LUM_FACTOR_MAX 10

-enum {

-    VIDEO_EFFECT_NONE               = 0,

-    VIDEO_EFFECT_BLACKANDWHITE      = 1,

-    VIDEO_EFFECT_PINK               = 2,

-    VIDEO_EFFECT_GREEN              = 4,

-    VIDEO_EFFECT_SEPIA              = 8,

-    VIDEO_EFFECT_NEGATIVE           = 16,

-    VIDEO_EFFECT_FRAMING            = 32,

-    VIDEO_EFFECT_FIFTIES            = 64,

-    VIDEO_EFFECT_COLOR_RGB16        = 128,

-    VIDEO_EFFECT_GRADIENT           = 256,

-    VIDEO_EFFECT_FADEFROMBLACK      = 512,

-    VIDEO_EFFECT_CURTAINOPEN        = 1024,

-    VIDEO_EFFECT_FADETOBLACK        = 2048,

-    VIDEO_EFFECT_CURTAINCLOSE       = 4096,

-};

-

-typedef struct {

-    M4VIFI_UInt8 *vidBuffer;

-    M4OSA_UInt32 videoWidth;

-    M4OSA_UInt32 videoHeight;

-    M4OSA_UInt32 timeMs;

-    M4OSA_UInt32 timeOffset; //has the duration of clips played.

-                             //The flag shall be used for Framing.

-    M4VSS3GPP_EffectSettings* effectsSettings;

-    M4OSA_UInt32 numberEffects;

-    M4OSA_UInt32 outVideoWidth;

-    M4OSA_UInt32 outVideoHeight;

-    M4OSA_UInt32 currentVideoEffect;

-    M4OSA_Bool isFiftiesEffectStarted;

-    M4xVSS_MediaRendering renderingMode;

-    uint8_t *pOutBuffer;

-    size_t outBufferStride;

-    M4VIFI_UInt8*  overlayFrameRGBBuffer;

-    M4VIFI_UInt8*  overlayFrameYUVBuffer;

-} vePostProcessParams;

-

-M4VIFI_UInt8 M4VIFI_YUV420PlanarToYUV420Semiplanar(void *user_data, M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane *PlaneOut );

-M4VIFI_UInt8 M4VIFI_SemiplanarYUV420toYUV420(void *user_data, M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane *PlaneOut );

-

-M4OSA_ERR M4VSS3GPP_externalVideoEffectColor(M4OSA_Void *pFunctionContext, M4VIFI_ImagePlane *PlaneIn,

-                                                    M4VIFI_ImagePlane *PlaneOut,M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiEffectKind);

-

-M4OSA_ERR M4VSS3GPP_externalVideoEffectFraming( M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn[3], M4VIFI_ImagePlane *PlaneOut, M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiEffectKind );

-

-M4OSA_ERR M4VSS3GPP_externalVideoEffectFifties( M4OSA_Void *pUserData, M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut, M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiEffectKind );

-

-unsigned char M4VFL_modifyLumaWithScale(M4ViComImagePlane *plane_in, M4ViComImagePlane *plane_out, unsigned long lum_factor, void *user_data);

-unsigned char M4VFL_applyCurtain(M4ViComImagePlane *plane_in, M4ViComImagePlane *plane_out, M4VFL_CurtainParam *curtain_factor, void *user_data);

-

-M4OSA_ERR M4xVSS_internalConvertRGBtoYUV(M4xVSS_FramingStruct* framingCtx);

-M4VIFI_UInt8    M4VIFI_xVSS_RGB565toYUV420(void *pUserData, M4VIFI_ImagePlane *pPlaneIn,

-                                                      M4VIFI_ImagePlane *pPlaneOut);

-

-M4OSA_ERR M4xVSS_internalConvertRGB888toYUV(M4xVSS_FramingStruct* framingCtx);

-M4VIFI_UInt8 M4VIFI_RGB888toYUV420(void *pUserData, M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane PlaneOut[3]);

-

-/*+ Handle the image files here */

-M4OSA_ERR LvGetImageThumbNail(const char *fileName, M4OSA_UInt32 height, M4OSA_UInt32 width, M4OSA_Void **pBuffer);

-/*- Handle the image files here */

-

-M4OSA_ERR applyRenderingMode(M4VIFI_ImagePlane* pPlaneIn, M4VIFI_ImagePlane* pPlaneOut, M4xVSS_MediaRendering mediaRendering);

-

-

-M4VIFI_UInt8 M4VIFI_YUV420toYUV420(void *user_data, M4VIFI_ImagePlane PlaneIn[3], M4VIFI_ImagePlane *PlaneOut );

-M4VIFI_UInt8    M4VIFI_ResizeBilinearYUV420toYUV420(void *pUserData,

-                                                                M4VIFI_ImagePlane *pPlaneIn,

-                                                                M4VIFI_ImagePlane *pPlaneOut);

-

-M4OSA_Void prepareYUV420ImagePlane(M4VIFI_ImagePlane *plane,

-    M4OSA_UInt32 width, M4OSA_UInt32 height, M4VIFI_UInt8 *buffer,

-    M4OSA_UInt32 reportedWidth, M4OSA_UInt32 reportedHeight);

-

-M4OSA_Void prepareYV12ImagePlane(M4VIFI_ImagePlane *plane,

-    M4OSA_UInt32 width, M4OSA_UInt32 height, M4OSA_UInt32 stride, M4VIFI_UInt8 *buffer);

-

-M4OSA_Void swapImagePlanes(

-    M4VIFI_ImagePlane *planeIn, M4VIFI_ImagePlane *planeOut,

-    M4VIFI_UInt8 *buffer1, M4VIFI_UInt8 *buffer2);

-

-M4OSA_Void computePercentageDone(

-     M4OSA_UInt32 ctsMs, M4OSA_UInt32 effectStartTimeMs,

-     M4OSA_UInt32 effectDuration, M4OSA_Double *percentageDone);

-

-M4OSA_Void computeProgressForVideoEffect(

-     M4OSA_UInt32 ctsMs, M4OSA_UInt32 effectStartTimeMs,

-     M4OSA_UInt32 effectDuration, M4VSS3GPP_ExternalProgress* extProgress);

-

-M4OSA_ERR prepareFramingStructure(

-    M4xVSS_FramingStruct* framingCtx,

-    M4VSS3GPP_EffectSettings* effectsSettings, M4OSA_UInt32 index,

-    M4VIFI_UInt8* overlayRGB, M4VIFI_UInt8* overlayYUV);

-

-M4OSA_ERR applyColorEffect(M4xVSS_VideoEffectType colorEffect,

-    M4VIFI_ImagePlane *planeIn, M4VIFI_ImagePlane *planeOut,

-    M4VIFI_UInt8 *buffer1, M4VIFI_UInt8 *buffer2, M4OSA_UInt16 rgbColorData);

-

-M4OSA_ERR applyLumaEffect(M4VSS3GPP_VideoEffectType videoEffect,

-    M4VIFI_ImagePlane *planeIn, M4VIFI_ImagePlane *planeOut,

-    M4VIFI_UInt8 *buffer1, M4VIFI_UInt8 *buffer2, M4OSA_Int32 lum_factor);

-

-M4OSA_ERR applyCurtainEffect(M4VSS3GPP_VideoEffectType videoEffect,

-    M4VIFI_ImagePlane *planeIn, M4VIFI_ImagePlane *planeOut,

-    M4VIFI_UInt8 *buffer1, M4VIFI_UInt8 *buffer2, M4VFL_CurtainParam* curtainParams);

-

-M4OSA_ERR applyEffectsAndRenderingMode(vePostProcessParams *params,

-    M4OSA_UInt32 reportedWidth, M4OSA_UInt32 reportedHeight);

-

-#endif // ANDROID_VE_TOOLS_H

+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_VE_TOOLS_H
+#define ANDROID_VE_TOOLS_H
+
+#include "M4OSA_Types.h"
+#include "M4OSA_Memory.h"
+#include "M4OSA_Debug.h"
+#include "M4VIFI_FiltersAPI.h"
+/* Macro definitions */
+#include "M4VIFI_Defines.h"
+/* Clip table declaration */
+#include "M4VIFI_Clip.h"
+#include "M4VFL_transition.h"
+#include "M4VSS3GPP_API.h"
+#include "M4xVSS_API.h"
+#include "M4xVSS_Internal.h"
+
+#include "M4AIR_API.h"
+#include "PreviewRenderer.h"
+#define MEDIA_RENDERING_INVALID 255
+
+#define TRANSPARENT_COLOR 0x7E0
+#define LUM_FACTOR_MAX 10
+enum {
+    VIDEO_EFFECT_NONE               = 0,
+    VIDEO_EFFECT_BLACKANDWHITE      = 1,
+    VIDEO_EFFECT_PINK               = 2,
+    VIDEO_EFFECT_GREEN              = 4,
+    VIDEO_EFFECT_SEPIA              = 8,
+    VIDEO_EFFECT_NEGATIVE           = 16,
+    VIDEO_EFFECT_FRAMING            = 32,
+    VIDEO_EFFECT_FIFTIES            = 64,
+    VIDEO_EFFECT_COLOR_RGB16        = 128,
+    VIDEO_EFFECT_GRADIENT           = 256,
+    VIDEO_EFFECT_FADEFROMBLACK      = 512,
+    VIDEO_EFFECT_CURTAINOPEN        = 1024,
+    VIDEO_EFFECT_FADETOBLACK        = 2048,
+    VIDEO_EFFECT_CURTAINCLOSE       = 4096,
+};
+
+typedef struct {
+    M4VIFI_UInt8 *vidBuffer;
+    M4OSA_UInt32 videoWidth;
+    M4OSA_UInt32 videoHeight;
+    M4OSA_UInt32 timeMs;
+    M4OSA_UInt32 timeOffset; //has the duration of clips played.
+                             //The flag shall be used for Framing.
+    M4VSS3GPP_EffectSettings* effectsSettings;
+    M4OSA_UInt32 numberEffects;
+    M4OSA_UInt32 outVideoWidth;
+    M4OSA_UInt32 outVideoHeight;
+    M4OSA_UInt32 currentVideoEffect;
+    M4OSA_Bool isFiftiesEffectStarted;
+    M4xVSS_MediaRendering renderingMode;
+    uint8_t *pOutBuffer;
+    size_t outBufferStride;
+    M4VIFI_UInt8*  overlayFrameRGBBuffer;
+    M4VIFI_UInt8*  overlayFrameYUVBuffer;
+} vePostProcessParams;
+
+M4VIFI_UInt8 M4VIFI_YUV420PlanarToYUV420Semiplanar(void *user_data, M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane *PlaneOut );
+M4VIFI_UInt8 M4VIFI_SemiplanarYUV420toYUV420(void *user_data, M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane *PlaneOut );
+
+M4OSA_ERR M4VSS3GPP_externalVideoEffectColor(M4OSA_Void *pFunctionContext, M4VIFI_ImagePlane *PlaneIn,
+                                                    M4VIFI_ImagePlane *PlaneOut,M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiEffectKind);
+
+M4OSA_ERR M4VSS3GPP_externalVideoEffectFraming( M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn[3], M4VIFI_ImagePlane *PlaneOut, M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiEffectKind );
+
+M4OSA_ERR M4VSS3GPP_externalVideoEffectFifties( M4OSA_Void *pUserData, M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut, M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiEffectKind );
+
+unsigned char M4VFL_modifyLumaWithScale(M4ViComImagePlane *plane_in, M4ViComImagePlane *plane_out, unsigned long lum_factor, void *user_data);
+unsigned char M4VFL_applyCurtain(M4ViComImagePlane *plane_in, M4ViComImagePlane *plane_out, M4VFL_CurtainParam *curtain_factor, void *user_data);
+
+M4OSA_ERR M4xVSS_internalConvertRGBtoYUV(M4xVSS_FramingStruct* framingCtx);
+M4VIFI_UInt8    M4VIFI_xVSS_RGB565toYUV420(void *pUserData, M4VIFI_ImagePlane *pPlaneIn,
+                                                      M4VIFI_ImagePlane *pPlaneOut);
+
+M4OSA_ERR M4xVSS_internalConvertRGB888toYUV(M4xVSS_FramingStruct* framingCtx);
+M4VIFI_UInt8 M4VIFI_RGB888toYUV420(void *pUserData, M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane PlaneOut[3]);
+
+/*+ Handle the image files here */
+M4OSA_ERR LvGetImageThumbNail(const char *fileName, M4OSA_UInt32 height, M4OSA_UInt32 width, M4OSA_Void **pBuffer);
+/*- Handle the image files here */
+
+M4OSA_ERR applyRenderingMode(M4VIFI_ImagePlane* pPlaneIn, M4VIFI_ImagePlane* pPlaneOut, M4xVSS_MediaRendering mediaRendering);
+
+
+M4VIFI_UInt8 M4VIFI_YUV420toYUV420(void *user_data, M4VIFI_ImagePlane PlaneIn[3], M4VIFI_ImagePlane *PlaneOut );
+M4VIFI_UInt8    M4VIFI_ResizeBilinearYUV420toYUV420(void *pUserData,
+                                                                M4VIFI_ImagePlane *pPlaneIn,
+                                                                M4VIFI_ImagePlane *pPlaneOut);
+
+M4OSA_Void prepareYUV420ImagePlane(M4VIFI_ImagePlane *plane,
+    M4OSA_UInt32 width, M4OSA_UInt32 height, M4VIFI_UInt8 *buffer,
+    M4OSA_UInt32 reportedWidth, M4OSA_UInt32 reportedHeight);
+
+M4OSA_Void prepareYV12ImagePlane(M4VIFI_ImagePlane *plane,
+    M4OSA_UInt32 width, M4OSA_UInt32 height, M4OSA_UInt32 stride, M4VIFI_UInt8 *buffer);
+
+M4OSA_Void swapImagePlanes(
+    M4VIFI_ImagePlane *planeIn, M4VIFI_ImagePlane *planeOut,
+    M4VIFI_UInt8 *buffer1, M4VIFI_UInt8 *buffer2);
+
+M4OSA_Void computePercentageDone(
+     M4OSA_UInt32 ctsMs, M4OSA_UInt32 effectStartTimeMs,
+     M4OSA_UInt32 effectDuration, M4OSA_Double *percentageDone);
+
+M4OSA_Void computeProgressForVideoEffect(
+     M4OSA_UInt32 ctsMs, M4OSA_UInt32 effectStartTimeMs,
+     M4OSA_UInt32 effectDuration, M4VSS3GPP_ExternalProgress* extProgress);
+
+M4OSA_ERR prepareFramingStructure(
+    M4xVSS_FramingStruct* framingCtx,
+    M4VSS3GPP_EffectSettings* effectsSettings, M4OSA_UInt32 index,
+    M4VIFI_UInt8* overlayRGB, M4VIFI_UInt8* overlayYUV);
+
+M4OSA_ERR applyColorEffect(M4xVSS_VideoEffectType colorEffect,
+    M4VIFI_ImagePlane *planeIn, M4VIFI_ImagePlane *planeOut,
+    M4VIFI_UInt8 *buffer1, M4VIFI_UInt8 *buffer2, M4OSA_UInt16 rgbColorData);
+
+M4OSA_ERR applyLumaEffect(M4VSS3GPP_VideoEffectType videoEffect,
+    M4VIFI_ImagePlane *planeIn, M4VIFI_ImagePlane *planeOut,
+    M4VIFI_UInt8 *buffer1, M4VIFI_UInt8 *buffer2, M4OSA_Int32 lum_factor);
+
+M4OSA_ERR applyCurtainEffect(M4VSS3GPP_VideoEffectType videoEffect,
+    M4VIFI_ImagePlane *planeIn, M4VIFI_ImagePlane *planeOut,
+    M4VIFI_UInt8 *buffer1, M4VIFI_UInt8 *buffer2, M4VFL_CurtainParam* curtainParams);
+
+M4OSA_ERR applyEffectsAndRenderingMode(vePostProcessParams *params,
+    M4OSA_UInt32 reportedWidth, M4OSA_UInt32 reportedHeight);
+
+#endif // ANDROID_VE_TOOLS_H