Merge remote-tracking branch 'goog/ics-aah-exp'

empty merge -s ours

Change-Id: Icaa928cc62272dad788a80b769b8a745e08cf2ca
diff --git a/libvideoeditor/include/II420ColorConverter.h b/libvideoeditor/include/II420ColorConverter.h
deleted file mode 100644
index 33af61f..0000000
--- a/libvideoeditor/include/II420ColorConverter.h
+++ /dev/null
@@ -1,126 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef II420_COLOR_CONVERTER_H
-
-#define II420_COLOR_CONVERTER_H
-
-#include <stdint.h>
-#include <android/rect.h>
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-typedef struct II420ColorConverter {
-
-    /*
-     * getDecoderOutputFormat
-     * Returns the color format (OMX_COLOR_FORMATTYPE) of the decoder output.
-     * If it is I420 (OMX_COLOR_FormatYUV420Planar), no conversion is needed,
-     * and convertDecoderOutputToI420() can be a no-op.
-     */
-    int (*getDecoderOutputFormat)();
-
-    /*
-     * convertDecoderOutputToI420
-     * @Desc     Converts from the decoder output format to I420 format.
-     * @note     Caller (e.g. VideoEditor) owns the buffers
-     * @param    decoderBits   (IN) Pointer to the buffer contains decoder output
-     * @param    decoderWidth  (IN) Buffer width, as reported by the decoder
-     *                              metadata (kKeyWidth)
-     * @param    decoderHeight (IN) Buffer height, as reported by the decoder
-     *                              metadata (kKeyHeight)
-     * @param    decoderRect   (IN) The rectangle of the actual frame, as
-     *                              reported by decoder metadata (kKeyCropRect)
-     * @param    dstBits      (OUT) Pointer to the output I420 buffer
-     * @return   -1 Any error
-     * @return   0  No Error
-     */
-    int (*convertDecoderOutputToI420)(
-        void* decoderBits, int decoderWidth, int decoderHeight,
-        ARect decoderRect, void* dstBits);
-
-    /*
-     * getEncoderIntputFormat
-     * Returns the color format (OMX_COLOR_FORMATTYPE) of the encoder input.
-     * If it is I420 (OMX_COLOR_FormatYUV420Planar), no conversion is needed,
-     * and convertI420ToEncoderInput() and getEncoderInputBufferInfo() can
-     * be no-ops.
-     */
-    int (*getEncoderInputFormat)();
-
-    /* convertI420ToEncoderInput
-     * @Desc     This function converts from I420 to the encoder input format
-     * @note     Caller (e.g. VideoEditor) owns the buffers
-     * @param    srcBits       (IN) Pointer to the input I420 buffer
-     * @param    srcWidth      (IN) Width of the I420 frame
-     * @param    srcHeight     (IN) Height of the I420 frame
-     * @param    encoderWidth  (IN) Encoder buffer width, as calculated by
-     *                              getEncoderBufferInfo()
-     * @param    encoderHeight (IN) Encoder buffer height, as calculated by
-     *                              getEncoderBufferInfo()
-     * @param    encoderRect   (IN) Rect coordinates of the actual frame inside
-     *                              the encoder buffer, as calculated by
-     *                              getEncoderBufferInfo().
-     * @param    encoderBits  (OUT) Pointer to the output buffer. The size of
-     *                              this buffer is calculated by
-     *                              getEncoderBufferInfo()
-     * @return   -1 Any error
-     * @return   0  No Error
-     */
-    int (*convertI420ToEncoderInput)(
-        void* srcBits, int srcWidth, int srcHeight,
-        int encoderWidth, int encoderHeight, ARect encoderRect,
-        void* encoderBits);
-
-    /* getEncoderInputBufferInfo
-     * @Desc     This function returns metadata for the encoder input buffer
-     *           based on the actual I420 frame width and height.
-     * @note     This API should be be used to obtain the necessary information
-     *           before calling convertI420ToEncoderInput().
-     *           VideoEditor knows only the width and height of the I420 buffer,
-     *           but it also needs know the width, height, and size of the
-     *           encoder input buffer. The encoder input buffer width and height
-     *           are used to set the metadata for the encoder.
-     * @param    srcWidth      (IN) Width of the I420 frame
-     * @param    srcHeight     (IN) Height of the I420 frame
-     * @param    encoderWidth  (OUT) Encoder buffer width needed
-     * @param    encoderHeight (OUT) Encoder buffer height needed
-     * @param    encoderRect   (OUT) Rect coordinates of the actual frame inside
-     *                               the encoder buffer
-     * @param    encoderBufferSize  (OUT) The size of the buffer that need to be
-     *                              allocated by the caller before invoking
-     *                              convertI420ToEncoderInput().
-     * @return   -1 Any error
-     * @return   0  No Error
-     */
-    int (*getEncoderInputBufferInfo)(
-        int srcWidth, int srcHeight,
-        int* encoderWidth, int* encoderHeight,
-        ARect* encoderRect, int* encoderBufferSize);
-
-} II420ColorConverter;
-
-/* The only function that the shared library needs to expose: It fills the
-   function pointers in II420ColorConverter */
-void getI420ColorConverter(II420ColorConverter *converter);
-
-#if defined(__cplusplus)
-}
-#endif
-
-#endif  // II420_COLOR_CONVERTER_H
diff --git a/libvideoeditor/lvpp/Android.mk b/libvideoeditor/lvpp/Android.mk
index fdcd316..636beae 100755
--- a/libvideoeditor/lvpp/Android.mk
+++ b/libvideoeditor/lvpp/Android.mk
@@ -34,8 +34,6 @@
     DummyAudioSource.cpp \
     DummyVideoSource.cpp \
     VideoEditorBGAudioProcessing.cpp \
-    AudioPlayerBase.cpp \
-    PreviewPlayerBase.cpp \
     PreviewRenderer.cpp \
     I420ColorConverter.cpp \
     NativeWindowRenderer.cpp
@@ -49,10 +47,12 @@
 
 
 LOCAL_SHARED_LIBRARIES := \
+    libaudioutils      \
     libbinder          \
     libutils           \
     libcutils          \
     libmedia           \
+    libmedia_native    \
     libdrmframework    \
     libstagefright  \
     libstagefright_omx  \
@@ -65,24 +65,24 @@
 
 
 LOCAL_C_INCLUDES += \
+    $(TOP)/system/media/audio_utils/include \
     $(TOP)/frameworks/base/core/jni \
-    $(TOP)/frameworks/base/include \
-    $(TOP)/frameworks/base/include/media \
     $(TOP)/frameworks/base/media/libmediaplayerservice \
     $(TOP)/frameworks/base/media/libstagefright \
     $(TOP)/frameworks/base/media/libstagefright/include \
     $(TOP)/frameworks/base/media/libstagefright/rtsp \
     $(JNI_H_INCLUDE) \
     $(call include-path-for, corecg graphics) \
-    $(TOP)/frameworks/base/include/media/stagefright/openmax \
+    $(TOP)/frameworks/native/include/media/openmax \
     $(TOP)/frameworks/media/libvideoeditor/osal/inc \
     $(TOP)/frameworks/media/libvideoeditor/vss/common/inc \
     $(TOP)/frameworks/media/libvideoeditor/vss/mcs/inc \
     $(TOP)/frameworks/media/libvideoeditor/vss/inc \
     $(TOP)/frameworks/media/libvideoeditor/vss/stagefrightshells/inc \
     $(TOP)/frameworks/media/libvideoeditor/lvpp \
-    $(TOP)/frameworks/media/libvideoeditor/include \
+    $(TOP)/frameworks/native/include/media/editor \
     $(TOP)/frameworks/base/media/jni/mediaeditor \
+    $(TOP)/frameworks/native/services/audioflinger \
     $(TOP)/frameworks/base/services/audioflinger
 
 
diff --git a/libvideoeditor/lvpp/AudioPlayerBase.cpp b/libvideoeditor/lvpp/AudioPlayerBase.cpp
deleted file mode 100644
index 26c6a63..0000000
--- a/libvideoeditor/lvpp/AudioPlayerBase.cpp
+++ /dev/null
@@ -1,510 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "AudioPlayerBase"
-#include <utils/Log.h>
-
-#include <binder/IPCThreadState.h>
-#include <media/AudioTrack.h>
-#include <media/stagefright/MediaDebug.h>
-#include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaSource.h>
-#include <media/stagefright/MetaData.h>
-
-#include "AudioPlayerBase.h"
-#include "PreviewPlayerBase.h"
-
-namespace android {
-
-AudioPlayerBase::AudioPlayerBase(
-        const sp<MediaPlayerBase::AudioSink> &audioSink,
-        PreviewPlayerBase *observer)
-    : mAudioTrack(NULL),
-      mInputBuffer(NULL),
-      mSampleRate(0),
-      mLatencyUs(0),
-      mFrameSize(0),
-      mNumFramesPlayed(0),
-      mPositionTimeMediaUs(-1),
-      mPositionTimeRealUs(-1),
-      mSeeking(false),
-      mReachedEOS(false),
-      mFinalStatus(OK),
-      mStarted(false),
-      mIsFirstBuffer(false),
-      mFirstBufferResult(OK),
-      mFirstBuffer(NULL),
-      mAudioSink(audioSink),
-      mObserver(observer) {
-}
-
-AudioPlayerBase::~AudioPlayerBase() {
-    if (mStarted) {
-        reset();
-    }
-}
-
-void AudioPlayerBase::setSource(const sp<MediaSource> &source) {
-    CHECK_EQ(mSource, NULL);
-    mSource = source;
-}
-
-status_t AudioPlayerBase::start(bool sourceAlreadyStarted) {
-    CHECK(!mStarted);
-    CHECK(mSource != NULL);
-
-    status_t err;
-    if (!sourceAlreadyStarted) {
-        err = mSource->start();
-
-        if (err != OK) {
-            return err;
-        }
-    }
-
-    // We allow an optional INFO_FORMAT_CHANGED at the very beginning
-    // of playback, if there is one, getFormat below will retrieve the
-    // updated format, if there isn't, we'll stash away the valid buffer
-    // of data to be used on the first audio callback.
-
-    CHECK(mFirstBuffer == NULL);
-
-    mFirstBufferResult = mSource->read(&mFirstBuffer);
-    if (mFirstBufferResult == INFO_FORMAT_CHANGED) {
-        LOGV("INFO_FORMAT_CHANGED!!!");
-
-        CHECK(mFirstBuffer == NULL);
-        mFirstBufferResult = OK;
-        mIsFirstBuffer = false;
-    } else {
-        mIsFirstBuffer = true;
-    }
-
-    sp<MetaData> format = mSource->getFormat();
-    const char *mime;
-    bool success = format->findCString(kKeyMIMEType, &mime);
-    CHECK(success);
-    CHECK(!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW));
-
-    success = format->findInt32(kKeySampleRate, &mSampleRate);
-    CHECK(success);
-
-    int32_t numChannels;
-    success = format->findInt32(kKeyChannelCount, &numChannels);
-    CHECK(success);
-
-    if (mAudioSink.get() != NULL) {
-        status_t err = mAudioSink->open(
-                mSampleRate, numChannels, AUDIO_FORMAT_PCM_16_BIT,
-                DEFAULT_AUDIOSINK_BUFFERCOUNT,
-                &AudioPlayerBase::AudioSinkCallback, this);
-        if (err != OK) {
-            if (mFirstBuffer != NULL) {
-                mFirstBuffer->release();
-                mFirstBuffer = NULL;
-            }
-
-            if (!sourceAlreadyStarted) {
-                mSource->stop();
-            }
-
-            return err;
-        }
-
-        mLatencyUs = (int64_t)mAudioSink->latency() * 1000;
-        mFrameSize = mAudioSink->frameSize();
-
-        mAudioSink->start();
-    } else {
-        mAudioTrack = new AudioTrack(
-                AUDIO_STREAM_MUSIC, mSampleRate, AUDIO_FORMAT_PCM_16_BIT,
-                (numChannels == 2)
-                    ? AUDIO_CHANNEL_OUT_STEREO
-                    : AUDIO_CHANNEL_OUT_MONO,
-                0, 0, &AudioCallback, this, 0);
-
-        if ((err = mAudioTrack->initCheck()) != OK) {
-            delete mAudioTrack;
-            mAudioTrack = NULL;
-
-            if (mFirstBuffer != NULL) {
-                mFirstBuffer->release();
-                mFirstBuffer = NULL;
-            }
-
-            if (!sourceAlreadyStarted) {
-                mSource->stop();
-            }
-
-            return err;
-        }
-
-        mLatencyUs = (int64_t)mAudioTrack->latency() * 1000;
-        mFrameSize = mAudioTrack->frameSize();
-
-        mAudioTrack->start();
-    }
-
-    mStarted = true;
-
-    return OK;
-}
-
-void AudioPlayerBase::pause(bool playPendingSamples) {
-    CHECK(mStarted);
-
-    if (playPendingSamples) {
-        if (mAudioSink.get() != NULL) {
-            mAudioSink->stop();
-        } else {
-            mAudioTrack->stop();
-        }
-    } else {
-        if (mAudioSink.get() != NULL) {
-            mAudioSink->pause();
-        } else {
-            mAudioTrack->pause();
-        }
-    }
-}
-
-void AudioPlayerBase::resume() {
-    CHECK(mStarted);
-
-    if (mAudioSink.get() != NULL) {
-        mAudioSink->start();
-    } else {
-        mAudioTrack->start();
-    }
-}
-
-void AudioPlayerBase::reset() {
-    CHECK(mStarted);
-
-    if (mAudioSink.get() != NULL) {
-        mAudioSink->stop();
-        mAudioSink->close();
-    } else {
-        mAudioTrack->stop();
-
-        delete mAudioTrack;
-        mAudioTrack = NULL;
-    }
-
-    // Make sure to release any buffer we hold onto so that the
-    // source is able to stop().
-
-    if (mFirstBuffer != NULL) {
-        mFirstBuffer->release();
-        mFirstBuffer = NULL;
-    }
-
-    if (mInputBuffer != NULL) {
-        LOGV("AudioPlayerBase releasing input buffer.");
-
-        mInputBuffer->release();
-        mInputBuffer = NULL;
-    }
-
-    mSource->stop();
-
-    // The following hack is necessary to ensure that the OMX
-    // component is completely released by the time we may try
-    // to instantiate it again.
-    wp<MediaSource> tmp = mSource;
-    mSource.clear();
-    while (tmp.promote() != NULL) {
-        usleep(1000);
-    }
-    IPCThreadState::self()->flushCommands();
-
-    mNumFramesPlayed = 0;
-    mPositionTimeMediaUs = -1;
-    mPositionTimeRealUs = -1;
-    mSeeking = false;
-    mReachedEOS = false;
-    mFinalStatus = OK;
-    mStarted = false;
-}
-
-// static
-void AudioPlayerBase::AudioCallback(int event, void *user, void *info) {
-    static_cast<AudioPlayerBase *>(user)->AudioCallback(event, info);
-}
-
-bool AudioPlayerBase::isSeeking() {
-    Mutex::Autolock autoLock(mLock);
-    return mSeeking;
-}
-
-bool AudioPlayerBase::reachedEOS(status_t *finalStatus) {
-    *finalStatus = OK;
-
-    Mutex::Autolock autoLock(mLock);
-    *finalStatus = mFinalStatus;
-    return mReachedEOS;
-}
-
-// static
-size_t AudioPlayerBase::AudioSinkCallback(
-        MediaPlayerBase::AudioSink *audioSink,
-        void *buffer, size_t size, void *cookie) {
-    AudioPlayerBase *me = (AudioPlayerBase *)cookie;
-
-    return me->fillBuffer(buffer, size);
-}
-
-void AudioPlayerBase::AudioCallback(int event, void *info) {
-    if (event != AudioTrack::EVENT_MORE_DATA) {
-        return;
-    }
-
-    AudioTrack::Buffer *buffer = (AudioTrack::Buffer *)info;
-    size_t numBytesWritten = fillBuffer(buffer->raw, buffer->size);
-
-    buffer->size = numBytesWritten;
-}
-
-uint32_t AudioPlayerBase::getNumFramesPendingPlayout() const {
-    uint32_t numFramesPlayedOut;
-    status_t err;
-
-    if (mAudioSink != NULL) {
-        err = mAudioSink->getPosition(&numFramesPlayedOut);
-    } else {
-        err = mAudioTrack->getPosition(&numFramesPlayedOut);
-    }
-
-    if (err != OK || mNumFramesPlayed < numFramesPlayedOut) {
-        return 0;
-    }
-
-    // mNumFramesPlayed is the number of frames submitted
-    // to the audio sink for playback, but not all of them
-    // may have played out by now.
-    return mNumFramesPlayed - numFramesPlayedOut;
-}
-
-size_t AudioPlayerBase::fillBuffer(void *data, size_t size) {
-    if (mNumFramesPlayed == 0) {
-        LOGV("AudioCallback");
-    }
-
-    if (mReachedEOS) {
-        return 0;
-    }
-
-    bool postSeekComplete = false;
-    bool postEOS = false;
-    int64_t postEOSDelayUs = 0;
-
-    size_t size_done = 0;
-    size_t size_remaining = size;
-    while (size_remaining > 0) {
-        MediaSource::ReadOptions options;
-
-        {
-            Mutex::Autolock autoLock(mLock);
-
-            if (mSeeking) {
-                if (mIsFirstBuffer) {
-                    if (mFirstBuffer != NULL) {
-                        mFirstBuffer->release();
-                        mFirstBuffer = NULL;
-                    }
-                    mIsFirstBuffer = false;
-                }
-
-                options.setSeekTo(mSeekTimeUs);
-
-                if (mInputBuffer != NULL) {
-                    mInputBuffer->release();
-                    mInputBuffer = NULL;
-                }
-
-                mSeeking = false;
-                if (mObserver) {
-                    postSeekComplete = true;
-                }
-            }
-        }
-
-        if (mInputBuffer == NULL) {
-            status_t err;
-
-            if (mIsFirstBuffer) {
-                mInputBuffer = mFirstBuffer;
-                mFirstBuffer = NULL;
-                err = mFirstBufferResult;
-
-                mIsFirstBuffer = false;
-            } else {
-                err = mSource->read(&mInputBuffer, &options);
-            }
-
-            CHECK((err == OK && mInputBuffer != NULL)
-                   || (err != OK && mInputBuffer == NULL));
-
-            Mutex::Autolock autoLock(mLock);
-
-            if (err != OK) {
-                if (mObserver && !mReachedEOS) {
-                    // We don't want to post EOS right away but only
-                    // after all frames have actually been played out.
-
-                    // These are the number of frames submitted to the
-                    // AudioTrack that you haven't heard yet.
-                    uint32_t numFramesPendingPlayout =
-                        getNumFramesPendingPlayout();
-
-                    // These are the number of frames we're going to
-                    // submit to the AudioTrack by returning from this
-                    // callback.
-                    uint32_t numAdditionalFrames = size_done / mFrameSize;
-
-                    numFramesPendingPlayout += numAdditionalFrames;
-
-                    int64_t timeToCompletionUs =
-                        (1000000ll * numFramesPendingPlayout) / mSampleRate;
-
-                    LOGV("total number of frames played: %lld (%lld us)",
-                            (mNumFramesPlayed + numAdditionalFrames),
-                            1000000ll * (mNumFramesPlayed + numAdditionalFrames)
-                                / mSampleRate);
-
-                    LOGV("%d frames left to play, %lld us (%.2f secs)",
-                         numFramesPendingPlayout,
-                         timeToCompletionUs, timeToCompletionUs / 1E6);
-
-                    postEOS = true;
-                    postEOSDelayUs = timeToCompletionUs + mLatencyUs;
-                }
-
-                mReachedEOS = true;
-                mFinalStatus = err;
-                break;
-            }
-
-            CHECK(mInputBuffer->meta_data()->findInt64(
-                        kKeyTime, &mPositionTimeMediaUs));
-
-            mPositionTimeRealUs =
-                ((mNumFramesPlayed + size_done / mFrameSize) * 1000000)
-                    / mSampleRate;
-
-            LOGV("buffer->size() = %d, "
-                 "mPositionTimeMediaUs=%.2f mPositionTimeRealUs=%.2f",
-                 mInputBuffer->range_length(),
-                 mPositionTimeMediaUs / 1E6, mPositionTimeRealUs / 1E6);
-        }
-
-        if (mInputBuffer->range_length() == 0) {
-            mInputBuffer->release();
-            mInputBuffer = NULL;
-
-            continue;
-        }
-
-        size_t copy = size_remaining;
-        if (copy > mInputBuffer->range_length()) {
-            copy = mInputBuffer->range_length();
-        }
-
-        memcpy((char *)data + size_done,
-               (const char *)mInputBuffer->data() + mInputBuffer->range_offset(),
-               copy);
-
-        mInputBuffer->set_range(mInputBuffer->range_offset() + copy,
-                                mInputBuffer->range_length() - copy);
-
-        size_done += copy;
-        size_remaining -= copy;
-    }
-
-    {
-        Mutex::Autolock autoLock(mLock);
-        mNumFramesPlayed += size_done / mFrameSize;
-    }
-
-    if (postEOS) {
-        mObserver->postAudioEOS(postEOSDelayUs);
-    }
-
-    if (postSeekComplete) {
-        mObserver->postAudioSeekComplete();
-    }
-
-    return size_done;
-}
-
-int64_t AudioPlayerBase::getRealTimeUs() {
-    Mutex::Autolock autoLock(mLock);
-    return getRealTimeUsLocked();
-}
-
-int64_t AudioPlayerBase::getRealTimeUsLocked() const {
-    return -mLatencyUs + (mNumFramesPlayed * 1000000) / mSampleRate;
-}
-
-int64_t AudioPlayerBase::getMediaTimeUs() {
-    Mutex::Autolock autoLock(mLock);
-
-    if (mPositionTimeMediaUs < 0 || mPositionTimeRealUs < 0) {
-        if (mSeeking) {
-            return mSeekTimeUs;
-        }
-
-        return 0;
-    }
-
-    int64_t realTimeOffset = getRealTimeUsLocked() - mPositionTimeRealUs;
-    if (realTimeOffset < 0) {
-        realTimeOffset = 0;
-    }
-
-    return mPositionTimeMediaUs + realTimeOffset;
-}
-
-bool AudioPlayerBase::getMediaTimeMapping(
-        int64_t *realtime_us, int64_t *mediatime_us) {
-    Mutex::Autolock autoLock(mLock);
-
-    *realtime_us = mPositionTimeRealUs;
-    *mediatime_us = mPositionTimeMediaUs;
-
-    return mPositionTimeRealUs != -1 && mPositionTimeMediaUs != -1;
-}
-
-status_t AudioPlayerBase::seekTo(int64_t time_us) {
-    Mutex::Autolock autoLock(mLock);
-
-    mSeeking = true;
-    mPositionTimeRealUs = mPositionTimeMediaUs = -1;
-    mReachedEOS = false;
-    mSeekTimeUs = time_us;
-
-    if (mAudioSink != NULL) {
-        mAudioSink->flush();
-    } else {
-        mAudioTrack->flush();
-    }
-
-    return OK;
-}
-
-}
diff --git a/libvideoeditor/lvpp/AudioPlayerBase.h b/libvideoeditor/lvpp/AudioPlayerBase.h
deleted file mode 100644
index 31b9fa2..0000000
--- a/libvideoeditor/lvpp/AudioPlayerBase.h
+++ /dev/null
@@ -1,119 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef AUDIO_PLAYER_BASE_H_
-
-#define AUDIO_PLAYER_BASE_H_
-
-#include <media/MediaPlayerInterface.h>
-#include <media/stagefright/MediaBuffer.h>
-#include <media/stagefright/TimeSource.h>
-#include <utils/threads.h>
-
-namespace android {
-
-class MediaSource;
-class AudioTrack;
-class PreviewPlayerBase;
-
-class AudioPlayerBase : public TimeSource {
-public:
-    enum {
-        REACHED_EOS,
-        SEEK_COMPLETE
-    };
-
-    AudioPlayerBase(const sp<MediaPlayerBase::AudioSink> &audioSink,
-                PreviewPlayerBase *audioObserver = NULL);
-
-    virtual ~AudioPlayerBase();
-
-    // Caller retains ownership of "source".
-    void setSource(const sp<MediaSource> &source);
-
-    // Return time in us.
-    virtual int64_t getRealTimeUs();
-
-    status_t start(bool sourceAlreadyStarted = false);
-
-    void pause(bool playPendingSamples = false);
-    void resume();
-
-    // Returns the timestamp of the last buffer played (in us).
-    int64_t getMediaTimeUs();
-
-    // Returns true iff a mapping is established, i.e. the AudioPlayerBase
-    // has played at least one frame of audio.
-    bool getMediaTimeMapping(int64_t *realtime_us, int64_t *mediatime_us);
-
-    status_t seekTo(int64_t time_us);
-
-    bool isSeeking();
-    bool reachedEOS(status_t *finalStatus);
-
-private:
-    friend class VideoEditorAudioPlayer;
-    sp<MediaSource> mSource;
-    AudioTrack *mAudioTrack;
-
-    MediaBuffer *mInputBuffer;
-
-    int mSampleRate;
-    int64_t mLatencyUs;
-    size_t mFrameSize;
-
-    Mutex mLock;
-    int64_t mNumFramesPlayed;
-
-    int64_t mPositionTimeMediaUs;
-    int64_t mPositionTimeRealUs;
-
-    bool mSeeking;
-    bool mReachedEOS;
-    status_t mFinalStatus;
-    int64_t mSeekTimeUs;
-
-    bool mStarted;
-
-    bool mIsFirstBuffer;
-    status_t mFirstBufferResult;
-    MediaBuffer *mFirstBuffer;
-
-    sp<MediaPlayerBase::AudioSink> mAudioSink;
-    PreviewPlayerBase *mObserver;
-
-    static void AudioCallback(int event, void *user, void *info);
-    void AudioCallback(int event, void *info);
-
-    static size_t AudioSinkCallback(
-            MediaPlayerBase::AudioSink *audioSink,
-            void *data, size_t size, void *me);
-
-    size_t fillBuffer(void *data, size_t size);
-
-    int64_t getRealTimeUsLocked() const;
-
-    void reset();
-
-    uint32_t getNumFramesPendingPlayout() const;
-
-    AudioPlayerBase(const AudioPlayerBase &);
-    AudioPlayerBase &operator=(const AudioPlayerBase &);
-};
-
-}  // namespace android
-
-#endif  // AUDIO_PLAYER_BASE_H_
diff --git a/libvideoeditor/lvpp/DummyAudioSource.cpp b/libvideoeditor/lvpp/DummyAudioSource.cpp
index db1b573..dbcab68 100755
--- a/libvideoeditor/lvpp/DummyAudioSource.cpp
+++ b/libvideoeditor/lvpp/DummyAudioSource.cpp
@@ -14,149 +14,112 @@
  * limitations under the License.
  */
 
-#define LOG_NDEBUG 1
+// #define LOG_NDEBUG 0
 #define LOG_TAG "DummyAudioSource"
-#include "utils/Log.h"
-
+#include <utils/Log.h>
+#include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaDebug.h>
 #include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaBufferGroup.h>
 #include <media/stagefright/MetaData.h>
-
-
 #include "DummyAudioSource.h"
 
 
-
-/* Android includes*/
-#include <utils/Log.h>
-#include <memory.h>
-
-
-/*---------------------*/
-/*  DEBUG LEVEL SETUP  */
-/*---------------------*/
-#define LOG1 LOGE    /*ERRORS Logging*/
-#define LOG2 LOGV    /*WARNING Logging*/
-#define LOG3 //LOGV    /*COMMENTS Logging*/
-
-/*---------------------*/
-/*  CONSTANTS          */
-/*---------------------*/
-
-
 namespace android {
 
-
-/*---------------------*/
-/*  SEEK SOURCE        */
-/*---------------------*/
-
 //static
-sp<DummyAudioSource> DummyAudioSource::Create(int32_t samplingRate,
-                                              int32_t channelCount,
-                                              int64_t frameDurationUs,
-                                              int64_t audioDurationUs) {
-    LOG2("DummyAudioSource::Create ");
-    sp<DummyAudioSource> aSource = new DummyAudioSource (samplingRate,
-                                                         channelCount,
-                                                         frameDurationUs,
-                                                         audioDurationUs);
-    return aSource;
+sp<DummyAudioSource> DummyAudioSource::Create(
+        int32_t samplingRate, int32_t channelCount,
+        int64_t frameDurationUs, int64_t audioDurationUs) {
+
+    ALOGV("Create ");
+    return new DummyAudioSource(samplingRate,
+                                channelCount,
+                                frameDurationUs,
+                                audioDurationUs);
+
 }
 
+DummyAudioSource::DummyAudioSource(
+        int32_t samplingRate, int32_t channelCount,
+        int64_t frameDurationUs, int64_t audioDurationUs)
+    : mSamplingRate(samplingRate),
+      mChannelCount(channelCount),
+      mFrameDurationUs(frameDurationUs),
+      mNumberOfSamplePerFrame(0),
+      mAudioDurationUs(audioDurationUs),
+      mTimeStampUs(0),
+      mBufferGroup(NULL) {
 
-DummyAudioSource::DummyAudioSource (int32_t samplingRate,
-                                    int32_t channelCount,
-                                    int64_t frameDurationUs,
-                                    int64_t audioDurationUs):
-    mSamplingRate(samplingRate),
-    mChannelCount(channelCount),
-    mFrameDurationUs(frameDurationUs),
-    mNumberOfSamplePerFrame(0),
-    mAudioDurationUs(audioDurationUs),
-    mTimeStampUs(0) ,
+    mNumberOfSamplePerFrame = (int32_t)
+            ((1L * mSamplingRate * mFrameDurationUs)/1000000);
+    mNumberOfSamplePerFrame = mNumberOfSamplePerFrame  * mChannelCount;
 
-    mBufferGroup(NULL){
-    LOG2("DummyAudioSource::DummyAudioSource constructor START");
+    ALOGV("Constructor: E");
+    ALOGV("samplingRate = %d", samplingRate);
+    ALOGV("channelCount = %d", channelCount);
+    ALOGV("frameDurationUs = %lld", frameDurationUs);
+    ALOGV("audioDurationUs = %lld", audioDurationUs);
+    ALOGV("mNumberOfSamplePerFrame = %d", mNumberOfSamplePerFrame);
+    ALOGV("Constructor: X");
+}
+
+DummyAudioSource::~DummyAudioSource() {
     /* Do nothing here? */
-    LOG2("DummyAudioSource::DummyAudioSource");
-    LOG2("DummyAudioSource:: mSamplingRate = %d",samplingRate);
-    LOG2("DummyAudioSource:: mChannelCount = %d",channelCount);
-    LOG2("DummyAudioSource:: frameDurationUs = %lld",frameDurationUs);
-    LOG2("DummyAudioSource:: mAudioDurationUs = %lld",mAudioDurationUs);
-
-    LOG2("DummyAudioSource::DummyAudioSource constructor END");
+    ALOGV("~DummyAudioSource");
 }
 
+void DummyAudioSource::setDuration(int64_t audioDurationUs) {
+    ALOGV("setDuration: %lld us added to %lld us",
+        audioDurationUs, mAudioDurationUs);
 
-DummyAudioSource::~DummyAudioSource () {
-    /* Do nothing here? */
-    LOG2("DummyAudioSource::~DummyAudioSource");
-}
-
-void DummyAudioSource::setDuration (int64_t audioDurationUs) {
     Mutex::Autolock autoLock(mLock);
-    LOG2("SetDuration %lld", mAudioDurationUs);
     mAudioDurationUs += audioDurationUs;
-    LOG2("SetDuration %lld", mAudioDurationUs);
 }
 
 status_t DummyAudioSource::start(MetaData *params) {
+    ALOGV("start: E");
     status_t err = OK;
-    LOG2("DummyAudioSource::start START");
 
     mTimeStampUs = 0;
-    mNumberOfSamplePerFrame = (int32_t) ((1L * mSamplingRate * mFrameDurationUs)/1000000);
-    mNumberOfSamplePerFrame = mNumberOfSamplePerFrame  * mChannelCount;
 
     mBufferGroup = new MediaBufferGroup;
     mBufferGroup->add_buffer(
             new MediaBuffer(mNumberOfSamplePerFrame * sizeof(int16_t)));
 
-    LOG2("DummyAudioSource:: mNumberOfSamplePerFrame = %d",mNumberOfSamplePerFrame);
-    LOG2("DummyAudioSource::start END");
+    ALOGV("start: X");
 
     return err;
 }
 
-
 status_t DummyAudioSource::stop() {
-    status_t err = OK;
-
-    LOG2("DummyAudioSource::stop START");
+    ALOGV("stop");
 
     delete mBufferGroup;
     mBufferGroup = NULL;
 
-
-    LOG2("DummyAudioSource::stop END");
-
-    return err;
+    return OK;
 }
 
 
 sp<MetaData> DummyAudioSource::getFormat() {
-    LOG2("DummyAudioSource::getFormat");
+    ALOGV("getFormat");
 
     sp<MetaData> meta = new MetaData;
-
     meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_RAW);
     meta->setInt32(kKeyChannelCount, mChannelCount);
     meta->setInt32(kKeySampleRate, mSamplingRate);
     meta->setInt64(kKeyDuration, mFrameDurationUs);
-
     meta->setCString(kKeyDecoderComponent, "DummyAudioSource");
 
     return meta;
 }
 
-status_t DummyAudioSource::read( MediaBuffer **out, const MediaSource::ReadOptions *options) {
-    status_t err            = OK;
-    //LOG2("DummyAudioSource::read START");
-    MediaBuffer *buffer;
+status_t DummyAudioSource::read(
+        MediaBuffer **out, const MediaSource::ReadOptions *options) {
+
+    ALOGV("read: E");
+
     int64_t seekTimeUs;
     ReadOptions::SeekMode mode;
 
@@ -164,32 +127,37 @@
         CHECK(seekTimeUs >= 0);
         mTimeStampUs = seekTimeUs;
     }
+
     {
         Mutex::Autolock autoLock(mLock);
         if (mTimeStampUs >= mAudioDurationUs) {
+            ALOGI("read: EOS reached %lld > %lld",
+                mTimeStampUs, mAudioDurationUs);
+
             *out = NULL;
-            LOGI("EOS reached");
             return ERROR_END_OF_STREAM;
         }
     }
 
-    err = mBufferGroup->acquire_buffer(&buffer);
+    MediaBuffer *buffer;
+    status_t err = mBufferGroup->acquire_buffer(&buffer);
     if (err != OK) {
+        ALOGE("Failed to acquire buffer from mBufferGroup: %d", err);
         return err;
     }
 
     memset((uint8_t *) buffer->data() + buffer->range_offset(),
             0, mNumberOfSamplePerFrame << 1);
-
     buffer->set_range(buffer->range_offset(), (mNumberOfSamplePerFrame << 1));
-
     buffer->meta_data()->setInt64(kKeyTime, mTimeStampUs);
-    LOG2("DummyAudioSource::read  Buffer_offset  = %d,"
-            "Buffer_Size = %d, mTimeStampUs = %lld",
-             buffer->range_offset(),buffer->size(),mTimeStampUs);
+
+    ALOGV("read: offset  = %d, size = %d, mTimeStampUs = %lld",
+             buffer->range_offset(), buffer->size(), mTimeStampUs);
+
     mTimeStampUs = mTimeStampUs + mFrameDurationUs;
     *out = buffer;
-    return err;
+
+    return OK;
 }
 
 }// namespace android
diff --git a/libvideoeditor/lvpp/DummyAudioSource.h b/libvideoeditor/lvpp/DummyAudioSource.h
index 8715b10..5f25a8c 100755
--- a/libvideoeditor/lvpp/DummyAudioSource.h
+++ b/libvideoeditor/lvpp/DummyAudioSource.h
@@ -15,44 +15,34 @@
  */
 
 #ifndef DUMMY_AUDIOSOURCE_H_
-
 #define DUMMY_AUDIOSOURCE_H_
 
-#include <utils/RefBase.h>
-#include <utils/threads.h>
-#include <media/stagefright/MediaBufferGroup.h>
 #include <media/stagefright/MediaSource.h>
-#include <media/stagefright/DataSource.h>
 
 
 namespace android {
 
-
-class MediaBuffer;
 class MetaData;
 struct MediaBufferGroup;
 
-
-
 struct DummyAudioSource : public MediaSource {
 
 public:
-    static sp<DummyAudioSource> Create(int32_t samplingRate,
-                                       int32_t channelCount,
-                                       int64_t frameDurationUs,
-                                       int64_t audioDurationUs);
+    static sp<DummyAudioSource> Create(
+                int32_t samplingRate, int32_t channelCount,
+                int64_t frameDurationUs, int64_t audioDurationUs);
+
     virtual status_t start(MetaData *params = NULL);
     virtual status_t stop();
     virtual sp<MetaData> getFormat();
-    virtual status_t read (MediaBuffer **buffer,
-                            const MediaSource::ReadOptions *options = NULL);
-    void setDuration (int64_t audioDurationUs);
+
+    virtual status_t read(
+                MediaBuffer **buffer,
+                const MediaSource::ReadOptions *options = NULL);
+
+    void setDuration(int64_t audioDurationUs);
 
 protected:
-    DummyAudioSource (int32_t samplingRate,
-                      int32_t channelCount,
-                      int64_t frameDurationUs,
-                      int64_t audioDurationUs);
     virtual ~DummyAudioSource();
 
 private:
@@ -66,6 +56,11 @@
 
     MediaBufferGroup *mBufferGroup;
 
+    DummyAudioSource(
+            int32_t samplingRate, int32_t channelCount,
+            int64_t frameDurationUs, int64_t audioDurationUs);
+
+    // Don't call me
     DummyAudioSource(const DummyAudioSource &);
     DummyAudioSource &operator=(const DummyAudioSource &);
 
diff --git a/libvideoeditor/lvpp/DummyVideoSource.cpp b/libvideoeditor/lvpp/DummyVideoSource.cpp
index 44be104..b06f937 100755
--- a/libvideoeditor/lvpp/DummyVideoSource.cpp
+++ b/libvideoeditor/lvpp/DummyVideoSource.cpp
@@ -14,101 +14,98 @@
  * limitations under the License.
  */
 
-#define LOG_NDEBUG 1
+//#define LOG_NDEBUG 0
 #define LOG_TAG "DummyVideoSource"
-#include "utils/Log.h"
-
+#include <stdlib.h>
+#include <utils/Log.h>
+#include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaDebug.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaBufferGroup.h>
 #include <media/stagefright/MetaData.h>
-
+#include "VideoEditorTools.h"
 #include "DummyVideoSource.h"
 
-/* Android includes*/
-#include <utils/Log.h>
-#include <memory.h>
-
-
-#define LOG1 LOGE    /*ERRORS Logging*/
-#define LOG2 LOGV    /*WARNING Logging*/
-#define LOG3 //LOGV    /*COMMENTS Logging*/
-
 
 namespace android {
 
+sp<DummyVideoSource> DummyVideoSource::Create(
+        uint32_t width, uint32_t height,
+        uint64_t clipDuration, const char *imageUri) {
 
-sp<DummyVideoSource> DummyVideoSource::Create (
-            uint32_t width, uint32_t height,
-            uint64_t clipDuration, const char *imageUri) {
-    LOG2("DummyVideoSource::Create ");
-    sp<DummyVideoSource> vSource = new DummyVideoSource (
-                         width, height, clipDuration, imageUri);
-    return vSource;
+    ALOGV("Create");
+    return new DummyVideoSource(
+                    width, height, clipDuration, imageUri);
+
 }
 
 
-DummyVideoSource::DummyVideoSource (
-            uint32_t width, uint32_t height,
-            uint64_t clipDuration, const char *imageUri) {
+DummyVideoSource::DummyVideoSource(
+        uint32_t width, uint32_t height,
+        uint64_t clipDuration, const char *imageUri) {
 
-    LOG2("DummyVideoSource::DummyVideoSource constructor START");
+    ALOGV("Constructor: E");
+
     mFrameWidth = width;
     mFrameHeight = height;
     mImageClipDuration = clipDuration;
     mUri = imageUri;
     mImageBuffer = NULL;
 
-    LOG2("DummyVideoSource::DummyVideoSource constructor END");
+    ALOGV("%s", mUri);
+    ALOGV("Constructor: X");
 }
 
 
-DummyVideoSource::~DummyVideoSource () {
+DummyVideoSource::~DummyVideoSource() {
     /* Do nothing here? */
-    LOG2("DummyVideoSource::~DummyVideoSource");
+    ALOGV("~DummyVideoSource");
 }
 
 
 
 status_t DummyVideoSource::start(MetaData *params) {
-    status_t err = OK;
-    LOG2("DummyVideoSource::start START, %s", mUri);
-    //get the frame buffer from the rgb file and store into a MediaBuffer
-    err = LvGetImageThumbNail((const char *)mUri,
-                          mFrameHeight , mFrameWidth ,
-                          (M4OSA_Void **)&mImageBuffer);
+    ALOGV("start: E");
+
+    // Get the frame buffer from the rgb file, mUri,
+    // and store its content into a MediaBuffer
+    status_t err = LvGetImageThumbNail(
+                    (const char *)mUri,
+                    mFrameHeight, mFrameWidth,
+                    (M4OSA_Void **) &mImageBuffer);
+    if (err != OK) {
+        ALOGE("LvGetImageThumbNail failed: %d", err);
+        return err;
+    }
 
     mIsFirstImageFrame = true;
     mImageSeekTime = 0;
     mImagePlayStartTime = 0;
     mFrameTimeUs = 0;
-    LOG2("DummyVideoSource::start END");
 
-    return err;
+    ALOGV("start: X");
+    return OK;
 }
 
 
 status_t DummyVideoSource::stop() {
+    ALOGV("stop");
     status_t err = OK;
 
-    LOG2("DummyVideoSource::stop START");
     if (mImageBuffer != NULL) {
         free(mImageBuffer);
         mImageBuffer = NULL;
     }
-    LOG2("DummyVideoSource::stop END");
 
     return err;
 }
 
 
 sp<MetaData> DummyVideoSource::getFormat() {
-    LOG2("DummyVideoSource::getFormat");
+    ALOGV("getFormat");
 
     sp<MetaData> meta = new MetaData;
-
     meta->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420Planar);
     meta->setInt32(kKeyWidth, mFrameWidth);
     meta->setInt32(kKeyHeight, mFrameHeight);
@@ -119,45 +116,54 @@
 }
 
 status_t DummyVideoSource::read(
-                        MediaBuffer **out,
-                        const MediaSource::ReadOptions *options) {
-    status_t err = OK;
-    MediaBuffer *buffer;
-    LOG2("DummyVideoSource::read START");
+        MediaBuffer **out,
+        const MediaSource::ReadOptions *options) {
 
+    ALOGV("read: E");
+
+    const int32_t kTimeScale = 1000;  /* time scale in ms */
     bool seeking = false;
     int64_t seekTimeUs;
     ReadOptions::SeekMode seekMode;
-
     if (options && options->getSeekTo(&seekTimeUs, &seekMode)) {
         seeking = true;
         mImageSeekTime = seekTimeUs;
-        M4OSA_clockGetTime(&mImagePlayStartTime, 1000); //1000 time scale for time in ms
+        M4OSA_clockGetTime(&mImagePlayStartTime, kTimeScale);
     }
 
-    if ((mImageSeekTime == mImageClipDuration) || (mFrameTimeUs == (int64_t)mImageClipDuration)) {
-        LOG2("DummyVideoSource::read() End of stream reached; return NULL buffer");
+    if ((mImageSeekTime == mImageClipDuration) ||
+        (mFrameTimeUs == (int64_t)mImageClipDuration)) {
+        ALOGV("read: EOS reached");
         *out = NULL;
         return ERROR_END_OF_STREAM;
     }
 
-    buffer = new MediaBuffer(mImageBuffer, (mFrameWidth*mFrameHeight*1.5));
+    status_t err = OK;
+    MediaBuffer *buffer = new MediaBuffer(
+            mImageBuffer, (mFrameWidth * mFrameHeight * 1.5));
 
-    //set timestamp of buffer
+    // Set timestamp of buffer
     if (mIsFirstImageFrame) {
-        M4OSA_clockGetTime(&mImagePlayStartTime, 1000); //1000 time scale for time in ms
+        M4OSA_clockGetTime(&mImagePlayStartTime, kTimeScale);
         mFrameTimeUs =  (mImageSeekTime + 1);
-        LOG2("DummyVideoSource::read() jpg 1st frame timeUs = %lld, begin cut time = %ld", mFrameTimeUs, mImageSeekTime);
+        ALOGV("read: jpg 1st frame timeUs = %lld, begin cut time = %ld",
+            mFrameTimeUs, mImageSeekTime);
+
         mIsFirstImageFrame = false;
     } else {
         M4OSA_Time  currentTimeMs;
-        M4OSA_clockGetTime(&currentTimeMs, 1000);
+        M4OSA_clockGetTime(&currentTimeMs, kTimeScale);
 
-        mFrameTimeUs = mImageSeekTime + (currentTimeMs - mImagePlayStartTime)*1000;
-        LOG2("DummyVideoSource::read() jpg frame timeUs = %lld", mFrameTimeUs);
+        mFrameTimeUs = mImageSeekTime +
+            (currentTimeMs - mImagePlayStartTime) * 1000LL;
+
+        ALOGV("read: jpg frame timeUs = %lld", mFrameTimeUs);
     }
+
     buffer->meta_data()->setInt64(kKeyTime, mFrameTimeUs);
-    buffer->set_range(buffer->range_offset(), mFrameWidth*mFrameHeight*1.5);
+    buffer->set_range(buffer->range_offset(),
+                mFrameWidth * mFrameHeight * 1.5);
+
     *out = buffer;
     return err;
 }
diff --git a/libvideoeditor/lvpp/DummyVideoSource.h b/libvideoeditor/lvpp/DummyVideoSource.h
index 28c33c3..16514f2 100755
--- a/libvideoeditor/lvpp/DummyVideoSource.h
+++ b/libvideoeditor/lvpp/DummyVideoSource.h
@@ -15,43 +15,34 @@
  */
 
 #ifndef DUMMY_VIDEOSOURCE_H_
-
 #define DUMMY_VIDEOSOURCE_H_
 
-#include <utils/RefBase.h>
-#include <utils/threads.h>
 #include <media/stagefright/MediaSource.h>
-#include <media/stagefright/DataSource.h>
-#include "OMX_IVCommon.h"
-#include "VideoEditorTools.h"
 #include "M4OSA_Clock.h"
 #include "M4OSA_Time.h"
 #include "M4OSA_Types.h"
 
-
 namespace android {
 
-
 class  MediaBuffer;
 class  MetaData;
 
 struct DummyVideoSource : public MediaSource {
 
 public:
-    static sp<DummyVideoSource> Create(uint32_t width, 
-                                       uint32_t height, 
-                                       uint64_t clipDuration, 
-                                       const char *imageUri);
+    static sp<DummyVideoSource> Create(
+                uint32_t width, uint32_t height,
+                uint64_t clipDuration, const char *imageUri);
 
     virtual status_t start(MetaData *params = NULL);
     virtual status_t stop();
     virtual sp<MetaData> getFormat();
-    virtual status_t read(MediaBuffer **buffer, 
-                          const MediaSource::ReadOptions *options = NULL);
+
+    virtual status_t read(
+                MediaBuffer **buffer,
+                const MediaSource::ReadOptions *options = NULL);
 
 protected:
-    DummyVideoSource (uint32_t width, uint32_t height, 
-                      uint64_t clipDuration, const char *imageUri);
     virtual ~DummyVideoSource();
 
 private:
@@ -65,6 +56,11 @@
     M4OSA_Time mImagePlayStartTime;
     uint32_t mImageSeekTime;
 
+    DummyVideoSource(
+            uint32_t width, uint32_t height,
+            uint64_t clipDuration, const char *imageUri);
+
+    // Don't call me
     DummyVideoSource(const DummyVideoSource &);
     DummyVideoSource &operator=(const DummyVideoSource &);
 
diff --git a/libvideoeditor/lvpp/I420ColorConverter.cpp b/libvideoeditor/lvpp/I420ColorConverter.cpp
index 75f1e93..321d3fe 100755
--- a/libvideoeditor/lvpp/I420ColorConverter.cpp
+++ b/libvideoeditor/lvpp/I420ColorConverter.cpp
@@ -23,7 +23,7 @@
     mHandle = dlopen("libI420colorconvert.so", RTLD_NOW);
 
     if (mHandle == NULL) {
-        LOGW("I420ColorConverter: cannot load libI420colorconvert.so");
+        ALOGW("I420ColorConverter: cannot load libI420colorconvert.so");
         return;
     }
 
@@ -32,7 +32,7 @@
         (void (*)(I420ColorConverter*)) dlsym(mHandle, "getI420ColorConverter");
 
     if (getI420ColorConverter == NULL) {
-        LOGW("I420ColorConverter: cannot load getI420ColorConverter");
+        ALOGW("I420ColorConverter: cannot load getI420ColorConverter");
         dlclose(mHandle);
         mHandle = NULL;
         return;
@@ -41,7 +41,7 @@
     // Fill the function pointers.
     getI420ColorConverter(this);
 
-    LOGI("I420ColorConverter: libI420colorconvert.so loaded");
+    ALOGI("I420ColorConverter: libI420colorconvert.so loaded");
 }
 
 bool I420ColorConverter::isLoaded() {
diff --git a/libvideoeditor/lvpp/NativeWindowRenderer.cpp b/libvideoeditor/lvpp/NativeWindowRenderer.cpp
index 854062e..0f3ea3c 100755
--- a/libvideoeditor/lvpp/NativeWindowRenderer.cpp
+++ b/libvideoeditor/lvpp/NativeWindowRenderer.cpp
@@ -22,10 +22,9 @@
 #include <cutils/log.h>
 #include <gui/SurfaceTexture.h>
 #include <gui/SurfaceTextureClient.h>
-#include <stagefright/MediaBuffer.h>
-#include <stagefright/MediaDebug.h>
-#include <stagefright/MetaData.h>
-#include <surfaceflinger/Surface.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MetaData.h>
 #include "VideoEditorTools.h"
 
 #define CHECK_EGL_ERROR CHECK(EGL_SUCCESS == eglGetError())
@@ -146,7 +145,7 @@
 // other threads wait until the request is finished by GL thread.
 
 int NativeWindowRenderer::threadStart(void* self) {
-    LOGD("create thread");
+    ALOGD("create thread");
     ((NativeWindowRenderer*)self)->glThread();
     return 0;
 }
@@ -181,7 +180,7 @@
         mThreadCmd = CMD_IDLE;
         mCond.broadcast();
     }
-    LOGD("quit");
+    ALOGD("quit");
 }
 
 void NativeWindowRenderer::initializeEGL() {
@@ -270,7 +269,7 @@
             char* buf = (char*) malloc(infoLen);
             if (buf) {
                 glGetProgramInfoLog(program, infoLen, NULL, buf);
-                LOGE("Program link log:\n%s\n", buf);
+                ALOGE("Program link log:\n%s\n", buf);
                 free(buf);
             }
         }
@@ -300,7 +299,7 @@
         char* buf = (char*) malloc(infoLen);
         if (buf) {
             glGetShaderInfoLog(shader, infoLen, NULL, buf);
-            LOGE("Shader compile log:\n%s\n", buf);
+            ALOGE("Shader compile log:\n%s\n", buf);
             free(buf);
         }
         glDeleteShader(shader);
@@ -385,7 +384,7 @@
     native_window_set_buffers_timestamp(anw, timeUs * 1000);
     status_t err = anw->queueBuffer(anw, buffer->graphicBuffer().get());
     if (err != 0) {
-        LOGE("queueBuffer failed with error %s (%d)", strerror(-err), -err);
+        ALOGE("queueBuffer failed with error %s (%d)", strerror(-err), -err);
         return;
     }
 
@@ -540,7 +539,7 @@
 }
 
 RenderInput* NativeWindowRenderer::createRenderInput() {
-    LOGD("new render input %d", mNextTextureId);
+    ALOGD("new render input %d", mNextTextureId);
     RenderInput* input = new RenderInput(this, mNextTextureId);
 
     startRequest(CMD_RESERVE_TEXTURE);
@@ -553,7 +552,7 @@
 }
 
 void NativeWindowRenderer::destroyRenderInput(RenderInput* input) {
-    LOGD("destroy render input %d", input->mTextureId);
+    ALOGD("destroy render input %d", input->mTextureId);
     GLuint textureId = input->mTextureId;
     delete input;
 
diff --git a/libvideoeditor/lvpp/NativeWindowRenderer.h b/libvideoeditor/lvpp/NativeWindowRenderer.h
index cafd6fb..8fbb4f9 100755
--- a/libvideoeditor/lvpp/NativeWindowRenderer.h
+++ b/libvideoeditor/lvpp/NativeWindowRenderer.h
@@ -19,8 +19,8 @@
 
 #include <EGL/egl.h>
 #include <GLES2/gl2.h>
-#include <stagefright/MediaBuffer.h>
-#include <stagefright/MetaData.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MetaData.h>
 #include <utils/RefBase.h>
 #include <utils/threads.h>
 
diff --git a/libvideoeditor/lvpp/PreviewPlayer.cpp b/libvideoeditor/lvpp/PreviewPlayer.cpp
index 349e9c2..34731d7 100755
--- a/libvideoeditor/lvpp/PreviewPlayer.cpp
+++ b/libvideoeditor/lvpp/PreviewPlayer.cpp
@@ -15,37 +15,43 @@
  */
 
 
-#define LOG_NDEBUG 1
+// #define LOG_NDEBUG 0
 #define LOG_TAG "PreviewPlayer"
 #include <utils/Log.h>
 
-#include <dlfcn.h>
-
-#include "PreviewPlayer.h"
-#include "DummyAudioSource.h"
-#include "DummyVideoSource.h"
-#include "VideoEditorSRC.h"
-#include "include/NuCachedSource2.h"
-#include "include/ThrottledSource.h"
-
-
 #include <binder/IPCThreadState.h>
+#include <binder/IServiceManager.h>
+#include <media/IMediaPlayerService.h>
 #include <media/stagefright/DataSource.h>
-#include <media/stagefright/FileSource.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaExtractor.h>
-#include <media/stagefright/MediaDebug.h>
 #include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/OMXCodec.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <gui/Surface.h>
+#include <gui/ISurfaceTexture.h>
+#include <gui/SurfaceTextureClient.h>
 
-#include <surfaceflinger/Surface.h>
-#include <media/stagefright/foundation/ALooper.h>
+#include "VideoEditorPreviewController.h"
+#include "DummyAudioSource.h"
+#include "DummyVideoSource.h"
+#include "VideoEditorSRC.h"
+#include "PreviewPlayer.h"
 
 namespace android {
 
 
+void addBatteryData(uint32_t params) {
+    sp<IBinder> binder =
+        defaultServiceManager()->getService(String16("media.player"));
+    sp<IMediaPlayerService> service = interface_cast<IMediaPlayerService>(binder);
+    CHECK(service.get() != NULL);
+
+    service->addBatteryData(params);
+}
+
 struct PreviewPlayerEvent : public TimedEventQueue::Event {
     PreviewPlayerEvent(
             PreviewPlayer *player,
@@ -70,15 +76,28 @@
 };
 
 PreviewPlayer::PreviewPlayer(NativeWindowRenderer* renderer)
-    : PreviewPlayerBase(),
+    : mQueueStarted(false),
+      mTimeSource(NULL),
+      mVideoRendererIsPreview(false),
+      mAudioPlayer(NULL),
+      mDisplayWidth(0),
+      mDisplayHeight(0),
+      mFlags(0),
+      mExtractorFlags(0),
+      mVideoBuffer(NULL),
+      mLastVideoTimeUs(-1),
       mNativeWindowRenderer(renderer),
       mCurrFramingEffectIndex(0),
       mFrameRGBBuffer(NULL),
       mFrameYUVBuffer(NULL) {
 
+    CHECK_EQ(mClient.connect(), (status_t)OK);
+    DataSource::RegisterDefaultSniffers();
+
+
     mVideoRenderer = NULL;
     mEffectsSettings = NULL;
-    mVeAudioPlayer = NULL;
+    mAudioPlayer = NULL;
     mAudioMixStoryBoardTS = 0;
     mCurrentMediaBeginCutTime = 0;
     mCurrentMediaVolumeValue = 0;
@@ -93,16 +112,14 @@
 
     mVideoEvent = new PreviewPlayerEvent(this, &PreviewPlayer::onVideoEvent);
     mVideoEventPending = false;
-    mStreamDoneEvent = new PreviewPlayerEvent(this,
-         &PreviewPlayer::onStreamDone);
-
-    mStreamDoneEventPending = false;
-
+    mVideoLagEvent = new PreviewPlayerEvent(this, &PreviewPlayer::onVideoLagUpdate);
+    mVideoEventPending = false;
     mCheckAudioStatusEvent = new PreviewPlayerEvent(
-        this, &PreviewPlayerBase::onCheckAudioStatus);
-
+            this, &PreviewPlayer::onCheckAudioStatus);
     mAudioStatusEventPending = false;
-
+    mStreamDoneEvent = new PreviewPlayerEvent(
+            this, &PreviewPlayer::onStreamDone);
+    mStreamDoneEventPending = false;
     mProgressCbEvent = new PreviewPlayerEvent(this,
          &PreviewPlayer::onProgressCbEvent);
 
@@ -127,35 +144,36 @@
     if (mVideoRenderer) {
         mNativeWindowRenderer->destroyRenderInput(mVideoRenderer);
     }
+
+    Mutex::Autolock lock(mLock);
+    clear_l();
+    mClient.disconnect();
 }
 
-void PreviewPlayer::cancelPlayerEvents(bool keepBufferingGoing) {
+void PreviewPlayer::cancelPlayerEvents_l(bool updateProgressCb) {
     mQueue.cancelEvent(mVideoEvent->eventID());
     mVideoEventPending = false;
     mQueue.cancelEvent(mStreamDoneEvent->eventID());
     mStreamDoneEventPending = false;
     mQueue.cancelEvent(mCheckAudioStatusEvent->eventID());
     mAudioStatusEventPending = false;
-
-    mQueue.cancelEvent(mProgressCbEvent->eventID());
-    mProgressCbEventPending = false;
+    mQueue.cancelEvent(mVideoLagEvent->eventID());
+    mVideoLagEventPending = false;
+    if (updateProgressCb) {
+        mQueue.cancelEvent(mProgressCbEvent->eventID());
+        mProgressCbEventPending = false;
+    }
 }
 
-status_t PreviewPlayer::setDataSource(
-        const char *uri, const KeyedVector<String8, String8> *headers) {
+status_t PreviewPlayer::setDataSource(const char *path) {
     Mutex::Autolock autoLock(mLock);
-    return setDataSource_l(uri, headers);
+    return setDataSource_l(path);
 }
 
-status_t PreviewPlayer::setDataSource_l(
-        const char *uri, const KeyedVector<String8, String8> *headers) {
+status_t PreviewPlayer::setDataSource_l(const char *path) {
     reset_l();
 
-    mUri = uri;
-
-    if (headers) {
-        mUriHeaders = *headers;
-    }
+    mUri = path;
 
     // The actual work will be done during preparation in the call to
     // ::finishSetDataSource_l to avoid blocking the calling thread in
@@ -201,11 +219,8 @@
 
     /* Add the support for Dummy audio*/
     if( !haveAudio ){
-        LOGV("PreviewPlayer: setDataSource_l Dummyaudiocreation started");
-
         mAudioTrack = DummyAudioSource::Create(32000, 2, 20000,
                                               ((mPlayEndTimeMsec)*1000LL));
-        LOGV("PreviewPlayer: setDataSource_l Dummyauiosource created");
         if(mAudioTrack != NULL) {
             haveAudio = true;
         }
@@ -220,18 +235,18 @@
 }
 
 status_t PreviewPlayer::setDataSource_l_jpg() {
+    ALOGV("setDataSource_l_jpg");
+
     M4OSA_ERR err = M4NO_ERROR;
-    LOGV("PreviewPlayer: setDataSource_l_jpg started");
 
     mAudioSource = DummyAudioSource::Create(32000, 2, 20000,
                                           ((mPlayEndTimeMsec)*1000LL));
-    LOGV("PreviewPlayer: setDataSource_l_jpg Dummyaudiosource created");
     if(mAudioSource != NULL) {
         setAudioSource(mAudioSource);
     }
     status_t error = mAudioSource->start();
     if (error != OK) {
-        LOGV("Error starting dummy audio source");
+        ALOGE("Error starting dummy audio source");
         mAudioSource.clear();
         return err;
     }
@@ -253,11 +268,6 @@
     return OK;
 }
 
-void PreviewPlayer::reset() {
-    Mutex::Autolock autoLock(mLock);
-    reset_l();
-}
-
 void PreviewPlayer::reset_l() {
 
     if (mFlags & PREPARING) {
@@ -268,7 +278,7 @@
         mPreparedCondition.wait(mLock);
     }
 
-    cancelPlayerEvents();
+    cancelPlayerEvents_l();
     mAudioTrack.clear();
     mVideoTrack.clear();
 
@@ -324,9 +334,6 @@
     mSeekTimeUs = 0;
 
     mUri.setTo("");
-    mUriHeaders.clear();
-
-    mFileSource.clear();
 
     mCurrentVideoEffect = VIDEO_EFFECT_NONE;
     mIsVideoSourceJpg = false;
@@ -338,6 +345,7 @@
 }
 
 status_t PreviewPlayer::play() {
+    ALOGV("play");
     Mutex::Autolock autoLock(mLock);
 
     mFlags &= ~CACHE_UNDERRUN;
@@ -346,6 +354,7 @@
 }
 
 status_t PreviewPlayer::startAudioPlayer_l() {
+    ALOGV("startAudioPlayer_l");
     CHECK(!(mFlags & AUDIO_RUNNING));
 
     if (mAudioSource == NULL || mAudioPlayer == NULL) {
@@ -357,7 +366,7 @@
 
         // We've already started the MediaSource in order to enable
         // the prefetcher to read its data.
-        status_t err = mVeAudioPlayer->start(
+        status_t err = mAudioPlayer->start(
                 true /* sourceAlreadyStarted */);
 
         if (err != OK) {
@@ -365,7 +374,7 @@
             return err;
         }
     } else {
-        mVeAudioPlayer->resume();
+        mAudioPlayer->resume();
     }
 
     mFlags |= AUDIO_RUNNING;
@@ -375,21 +384,20 @@
     return OK;
 }
 
-status_t PreviewPlayer::setAudioPlayer(AudioPlayerBase *audioPlayer) {
+status_t PreviewPlayer::setAudioPlayer(VideoEditorAudioPlayer *audioPlayer) {
+    ALOGV("setAudioPlayer");
     Mutex::Autolock autoLock(mLock);
     CHECK(!(mFlags & PLAYING));
     mAudioPlayer = audioPlayer;
 
-    LOGV("SetAudioPlayer");
+    ALOGV("SetAudioPlayer");
     mIsChangeSourceRequired = true;
-    mVeAudioPlayer =
-            (VideoEditorAudioPlayer*)mAudioPlayer;
 
     // check if the new and old source are dummy
-    sp<MediaSource> anAudioSource = mVeAudioPlayer->getSource();
+    sp<MediaSource> anAudioSource = mAudioPlayer->getSource();
     if (anAudioSource == NULL) {
         // Audio player does not have any source set.
-        LOGV("setAudioPlayer: Audio player does not have any source set");
+        ALOGV("setAudioPlayer: Audio player does not have any source set");
         return OK;
     }
 
@@ -402,7 +410,7 @@
         const char *pVidSrcType;
         if (meta->findCString(kKeyDecoderComponent, &pVidSrcType)) {
             if (strcmp(pVidSrcType, "DummyVideoSource") != 0) {
-                LOGV(" Video clip with silent audio; need to change source");
+                ALOGV(" Video clip with silent audio; need to change source");
                 return OK;
             }
         }
@@ -427,7 +435,7 @@
 
                     // Stop the new audio source
                     // since we continue using old source
-                    LOGV("setAudioPlayer: stop new audio source");
+                    ALOGV("setAudioPlayer: stop new audio source");
                     mAudioSource->stop();
                 }
             }
@@ -438,6 +446,7 @@
 }
 
 void PreviewPlayer::onStreamDone() {
+    ALOGV("onStreamDone");
     // Posted whenever any stream finishes playing.
 
     Mutex::Autolock autoLock(mLock);
@@ -447,7 +456,7 @@
     mStreamDoneEventPending = false;
 
     if (mStreamDoneStatus != ERROR_END_OF_STREAM) {
-        LOGV("MEDIA_ERROR %d", mStreamDoneStatus);
+        ALOGV("MEDIA_ERROR %d", mStreamDoneStatus);
 
         notifyListener_l(
                 MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, mStreamDoneStatus);
@@ -473,7 +482,7 @@
             postVideoEvent_l();
         }
     } else {
-        LOGV("MEDIA_PLAYBACK_COMPLETE");
+        ALOGV("MEDIA_PLAYBACK_COMPLETE");
         //pause before sending event
         pause_l(true /* at eos */);
 
@@ -499,13 +508,14 @@
             mFlags |= INFORMED_AV_EOS;
         }
         mFlags |= AT_EOS;
-        LOGV("onStreamDone end");
+        ALOGV("onStreamDone end");
         return;
     }
 }
 
 
 status_t PreviewPlayer::play_l() {
+    ALOGV("play_l");
 
     mFlags &= ~SEEK_PREVIEW;
 
@@ -532,25 +542,22 @@
             if (mAudioSink != NULL) {
 
                 mAudioPlayer = new VideoEditorAudioPlayer(mAudioSink, this);
-                mVeAudioPlayer =
-                          (VideoEditorAudioPlayer*)mAudioPlayer;
-
                 mAudioPlayer->setSource(mAudioSource);
 
-                mVeAudioPlayer->setAudioMixSettings(
+                mAudioPlayer->setAudioMixSettings(
                  mPreviewPlayerAudioMixSettings);
 
-                mVeAudioPlayer->setAudioMixPCMFileHandle(
+                mAudioPlayer->setAudioMixPCMFileHandle(
                  mAudioMixPCMFileHandle);
 
-                mVeAudioPlayer->setAudioMixStoryBoardSkimTimeStamp(
+                mAudioPlayer->setAudioMixStoryBoardSkimTimeStamp(
                  mAudioMixStoryBoardTS, mCurrentMediaBeginCutTime,
                  mCurrentMediaVolumeValue);
 
                  mFlags |= AUDIOPLAYER_STARTED;
                 // We've already started the MediaSource in order to enable
                 // the prefetcher to read its data.
-                status_t err = mVeAudioPlayer->start(
+                status_t err = mAudioPlayer->start(
                         true /* sourceAlreadyStarted */);
 
                 if (err != OK) {
@@ -561,58 +568,57 @@
                     return err;
                 }
 
-                mTimeSource = mVeAudioPlayer;
+                mTimeSource = mAudioPlayer;
                 mFlags |= AUDIO_RUNNING;
                 deferredAudioSeek = true;
                 mWatchForAudioSeekComplete = false;
                 mWatchForAudioEOS = true;
             }
         } else {
-            mVeAudioPlayer = (VideoEditorAudioPlayer*)mAudioPlayer;
-            bool isAudioPlayerStarted = mVeAudioPlayer->isStarted();
+            bool isAudioPlayerStarted = mAudioPlayer->isStarted();
 
             if (mIsChangeSourceRequired == true) {
-                LOGV("play_l: Change audio source required");
+                ALOGV("play_l: Change audio source required");
 
                 if (isAudioPlayerStarted == true) {
-                    mVeAudioPlayer->pause();
+                    mAudioPlayer->pause();
                 }
 
-                mVeAudioPlayer->setSource(mAudioSource);
-                mVeAudioPlayer->setObserver(this);
+                mAudioPlayer->setSource(mAudioSource);
+                mAudioPlayer->setObserver(this);
 
-                mVeAudioPlayer->setAudioMixSettings(
+                mAudioPlayer->setAudioMixSettings(
                  mPreviewPlayerAudioMixSettings);
 
-                mVeAudioPlayer->setAudioMixStoryBoardSkimTimeStamp(
+                mAudioPlayer->setAudioMixStoryBoardSkimTimeStamp(
                     mAudioMixStoryBoardTS, mCurrentMediaBeginCutTime,
                     mCurrentMediaVolumeValue);
 
                 if (isAudioPlayerStarted == true) {
-                    mVeAudioPlayer->resume();
+                    mAudioPlayer->resume();
                 } else {
                     status_t err = OK;
-                    err = mVeAudioPlayer->start(true);
+                    err = mAudioPlayer->start(true);
                     if (err != OK) {
                         mAudioPlayer = NULL;
-                        mVeAudioPlayer = NULL;
+                        mAudioPlayer = NULL;
 
                         mFlags &= ~(PLAYING | FIRST_FRAME);
                         return err;
                     }
                 }
             } else {
-                LOGV("play_l: No Source change required");
-                mVeAudioPlayer->setAudioMixStoryBoardSkimTimeStamp(
+                ALOGV("play_l: No Source change required");
+                mAudioPlayer->setAudioMixStoryBoardSkimTimeStamp(
                     mAudioMixStoryBoardTS, mCurrentMediaBeginCutTime,
                     mCurrentMediaVolumeValue);
 
-                mVeAudioPlayer->resume();
+                mAudioPlayer->resume();
             }
 
             mFlags |= AUDIOPLAYER_STARTED;
             mFlags |= AUDIO_RUNNING;
-            mTimeSource = mVeAudioPlayer;
+            mTimeSource = mAudioPlayer;
             deferredAudioSeek = true;
             mWatchForAudioSeekComplete = false;
             mWatchForAudioEOS = true;
@@ -668,9 +674,8 @@
 
 
 status_t PreviewPlayer::seekTo(int64_t timeUs) {
-
+    Mutex::Autolock autoLock(mLock);
     if ((mExtractorFlags & MediaExtractor::CAN_SEEK) || (mIsVideoSourceJpg)) {
-        Mutex::Autolock autoLock(mLock);
         return seekTo_l(timeUs);
     }
 
@@ -693,7 +698,7 @@
 }
 
 
-status_t PreviewPlayer::initAudioDecoder() {
+status_t PreviewPlayer::initAudioDecoder_l() {
     sp<MetaData> meta = mAudioTrack->getFormat();
     const char *mime;
     CHECK(meta->findCString(kKeyMIMEType, &mime));
@@ -708,7 +713,6 @@
                 mAudioTrack);
 
         if(aRawSource != NULL) {
-            LOGV("initAudioDecoder: new VideoEditorSRC");
             mAudioSource = new VideoEditorSRC(aRawSource);
         }
     }
@@ -716,10 +720,7 @@
     if (mAudioSource != NULL) {
         int64_t durationUs;
         if (mAudioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
-            Mutex::Autolock autoLock(mMiscStateLock);
-            if (mDurationUs < 0 || durationUs > mDurationUs) {
-                mDurationUs = durationUs;
-            }
+            setDuration_l(durationUs);
         }
         status_t err = mAudioSource->start();
 
@@ -737,13 +738,11 @@
     return mAudioSource != NULL ? OK : UNKNOWN_ERROR;
 }
 
-
-status_t PreviewPlayer::initVideoDecoder(uint32_t flags) {
-
+status_t PreviewPlayer::initVideoDecoder_l(uint32_t flags) {
     initRenderer_l();
 
     if (mVideoRenderer == NULL) {
-        LOGE("Cannot create renderer");
+        ALOGE("Cannot create renderer");
         return UNKNOWN_ERROR;
     }
 
@@ -756,10 +755,7 @@
     if (mVideoSource != NULL) {
         int64_t durationUs;
         if (mVideoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
-            Mutex::Autolock autoLock(mMiscStateLock);
-            if (mDurationUs < 0 || durationUs > mDurationUs) {
-                mDurationUs = durationUs;
-            }
+            setDuration_l(durationUs);
         }
 
         updateSizeToRender(mVideoTrack->getFormat());
@@ -820,7 +816,7 @@
     if (!mVideoBuffer) {
         MediaSource::ReadOptions options;
         if (mSeeking != NO_SEEK) {
-            LOGV("LV PLAYER seeking to %lld us (%.2f secs)", mSeekTimeUs,
+            ALOGV("LV PLAYER seeking to %lld us (%.2f secs)", mSeekTimeUs,
                                                       mSeekTimeUs / 1E6);
 
             options.setSeekTo(
@@ -831,10 +827,10 @@
             options.clearSeekTo();
 
             if (err != OK) {
-                CHECK_EQ(mVideoBuffer, NULL);
+                CHECK(!mVideoBuffer);
 
                 if (err == INFO_FORMAT_CHANGED) {
-                    LOGV("LV PLAYER VideoSource signalled format change");
+                    ALOGV("LV PLAYER VideoSource signalled format change");
                     notifyVideoSize_l();
 
                     if (mVideoRenderer != NULL) {
@@ -852,10 +848,10 @@
                 // So video playback is complete, but we may still have
                 // a seek request pending that needs to be applied to the audio track
                 if (mSeeking != NO_SEEK) {
-                    LOGV("video stream ended while seeking!");
+                    ALOGV("video stream ended while seeking!");
                 }
                 finishSeekIfNecessary(-1);
-                LOGV("PreviewPlayer: onVideoEvent EOS reached.");
+                ALOGV("PreviewPlayer: onVideoEvent EOS reached.");
                 mFlags |= VIDEO_AT_EOS;
                 mFlags |= AUDIO_AT_EOS;
                 mOverlayUpdateEventPosted = false;
@@ -902,16 +898,11 @@
 
     int64_t timeUs;
     CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
+    setPosition_l(timeUs);
 
-    {
-        Mutex::Autolock autoLock(mMiscStateLock);
-        mVideoTimeUs = timeUs;
-    }
-
-
-    if(!mStartNextPlayer) {
-        int64_t playbackTimeRemaining = (mPlayEndTimeMsec*1000LL) - timeUs;
-        if(playbackTimeRemaining <= 1500000) {
+    if (!mStartNextPlayer) {
+        int64_t playbackTimeRemaining = (mPlayEndTimeMsec * 1000LL) - timeUs;
+        if (playbackTimeRemaining <= 1500000) {
             //When less than 1.5 sec of playback left
             // send notification to start next player
 
@@ -925,7 +916,7 @@
     if (mAudioPlayer != NULL && !(mFlags & (AUDIO_RUNNING))) {
         status_t err = startAudioPlayer_l();
         if (err != OK) {
-            LOGE("Starting the audio player failed w/ err %d", err);
+            ALOGE("Starting the audio player failed w/ err %d", err);
             return;
         }
     }
@@ -959,12 +950,12 @@
             // Let's display the first frame after seeking right away.
             latenessUs = 0;
         }
-        LOGV("Audio time stamp = %lld and video time stamp = %lld",
+        ALOGV("Audio time stamp = %lld and video time stamp = %lld",
                                             ts->getRealTimeUs(),timeUs);
         if (latenessUs > 40000) {
             // We're more than 40ms late.
 
-            LOGV("LV PLAYER we're late by %lld us (%.2f secs)",
+            ALOGV("LV PLAYER we're late by %lld us (%.2f secs)",
                                            latenessUs, latenessUs / 1E6);
 
             mVideoBuffer->release();
@@ -975,7 +966,7 @@
 
         if (latenessUs < -25000) {
             // We're more than 25ms early.
-            LOGV("We're more than 25ms early, lateness %lld", latenessUs);
+            ALOGV("We're more than 25ms early, lateness %lld", latenessUs);
 
             postVideoEvent_l(25000);
             return;
@@ -997,7 +988,7 @@
         mVideoBuffer = NULL;
         mFlags |= VIDEO_AT_EOS;
         mFlags |= AUDIO_AT_EOS;
-        LOGV("PreviewPlayer: onVideoEvent timeUs > mPlayEndTime; send EOS..");
+        ALOGV("PreviewPlayer: onVideoEvent timeUs > mPlayEndTime; send EOS..");
         mOverlayUpdateEventPosted = false;
         // Set the last decoded timestamp to duration
         mDecodedVideoTs = (mPlayEndTimeMsec*1000LL);
@@ -1057,15 +1048,15 @@
                 mCurrFramingEffectIndex = index;
                 mOverlayUpdateEventPosted = true;
                 postOverlayUpdateEvent_l();
-                LOGV("Framing index = %d", mCurrFramingEffectIndex);
+                ALOGV("Framing index = %ld", mCurrFramingEffectIndex);
             } else {
-                LOGV("No framing effects found");
+                ALOGV("No framing effects found");
             }
         }
 
     } else if (mOverlayUpdateEventPosted) {
         //Post the event when the overlay is no more valid
-        LOGV("Overlay is Done");
+        ALOGV("Overlay is Done");
         mOverlayUpdateEventPosted = false;
         postOverlayUpdateEvent_l();
     }
@@ -1086,7 +1077,7 @@
 
     // if reached EndCutTime of clip, post EOS event
     if((timeUs/1000) >= mPlayEndTimeMsec) {
-        LOGV("PreviewPlayer: onVideoEvent EOS.");
+        ALOGV("PreviewPlayer: onVideoEvent EOS.");
         mFlags |= VIDEO_AT_EOS;
         mFlags |= AUDIO_AT_EOS;
         mOverlayUpdateEventPosted = false;
@@ -1110,11 +1101,13 @@
 }
 
 status_t PreviewPlayer::prepare() {
+    ALOGV("prepare");
     Mutex::Autolock autoLock(mLock);
     return prepare_l();
 }
 
 status_t PreviewPlayer::prepare_l() {
+    ALOGV("prepare_l");
     if (mFlags & PREPARED) {
         return OK;
     }
@@ -1137,7 +1130,14 @@
     return mPrepareResult;
 }
 
+status_t PreviewPlayer::prepareAsync() {
+    ALOGV("prepareAsync");
+    Mutex::Autolock autoLock(mLock);
+    return prepareAsync_l();
+}
+
 status_t PreviewPlayer::prepareAsync_l() {
+    ALOGV("prepareAsync_l");
     if (mFlags & PREPARING) {
         return UNKNOWN_ERROR;  // async prepare already pending
     }
@@ -1160,7 +1160,7 @@
     sp<DataSource> dataSource;
     sp<MediaExtractor> extractor;
 
-    dataSource = DataSource::CreateFromURI(mUri.string(), &mUriHeaders);
+    dataSource = DataSource::CreateFromURI(mUri.string(), NULL);
 
     if (dataSource == NULL) {
         return UNKNOWN_ERROR;
@@ -1178,27 +1178,19 @@
     }
 
     if (extractor == NULL) {
-        LOGV("PreviewPlayer::finishSetDataSource_l  extractor == NULL");
+        ALOGV("finishSetDataSource_l: failed to create extractor");
         return setDataSource_l_jpg();
     }
 
     return setDataSource_l(extractor);
 }
 
-
-// static
-bool PreviewPlayer::ContinuePreparation(void *cookie) {
-    PreviewPlayer *me = static_cast<PreviewPlayer *>(cookie);
-
-    return (me->mFlags & PREPARE_CANCELLED) == 0;
-}
-
 void PreviewPlayer::onPrepareAsyncEvent() {
     Mutex::Autolock autoLock(mLock);
-    LOGV("onPrepareAsyncEvent");
+    ALOGV("onPrepareAsyncEvent");
 
     if (mFlags & PREPARE_CANCELLED) {
-        LOGV("LV PLAYER prepare was cancelled before doing anything");
+        ALOGV("prepare was cancelled before doing anything");
         abortPrepare(UNKNOWN_ERROR);
         return;
     }
@@ -1213,7 +1205,7 @@
     }
 
     if (mVideoTrack != NULL && mVideoSource == NULL) {
-        status_t err = initVideoDecoder(OMXCodec::kHardwareCodecsOnly);
+        status_t err = initVideoDecoder_l(OMXCodec::kHardwareCodecsOnly);
 
         if (err != OK) {
             abortPrepare(err);
@@ -1222,7 +1214,7 @@
     }
 
     if (mAudioTrack != NULL && mAudioSource == NULL) {
-        status_t err = initAudioDecoder();
+        status_t err = initAudioDecoder_l();
 
         if (err != OK) {
             abortPrepare(err);
@@ -1234,15 +1226,13 @@
 }
 
 void PreviewPlayer::finishAsyncPrepare_l() {
+    ALOGV("finishAsyncPrepare_l");
     if (mIsAsyncPrepare) {
         if (mVideoSource == NULL) {
-            LOGV("finishAsyncPrepare_l: MEDIA_SET_VIDEO_SIZE 0 0 ");
             notifyListener_l(MEDIA_SET_VIDEO_SIZE, 0, 0);
         } else {
-            LOGV("finishAsyncPrepare_l: MEDIA_SET_VIDEO_SIZE");
             notifyVideoSize_l();
         }
-        LOGV("finishAsyncPrepare_l: MEDIA_PREPARED");
         notifyListener_l(MEDIA_PREPARED);
     }
 
@@ -1254,46 +1244,46 @@
 }
 
 void PreviewPlayer::acquireLock() {
-    LOGV("acquireLock");
+    ALOGV("acquireLock");
     mLockControl.lock();
 }
 
 void PreviewPlayer::releaseLock() {
-    LOGV("releaseLock");
+    ALOGV("releaseLock");
     mLockControl.unlock();
 }
 
 status_t PreviewPlayer::loadEffectsSettings(
-                    M4VSS3GPP_EffectSettings* pEffectSettings, int nEffects) {
-    M4OSA_UInt32 i = 0, rgbSize = 0;
-    M4VIFI_UInt8 *tmp = M4OSA_NULL;
+        M4VSS3GPP_EffectSettings* pEffectSettings, int nEffects) {
 
+    ALOGV("loadEffectsSettings");
     mNumberEffects = nEffects;
     mEffectsSettings = pEffectSettings;
     return OK;
 }
 
 status_t PreviewPlayer::loadAudioMixSettings(
-                    M4xVSS_AudioMixingSettings* pAudioMixSettings) {
+        M4xVSS_AudioMixingSettings* pAudioMixSettings) {
 
-    LOGV("PreviewPlayer: loadAudioMixSettings: ");
+    ALOGV("loadAudioMixSettings");
     mPreviewPlayerAudioMixSettings = pAudioMixSettings;
     return OK;
 }
 
 status_t PreviewPlayer::setAudioMixPCMFileHandle(
-                    M4OSA_Context pAudioMixPCMFileHandle) {
+        M4OSA_Context pAudioMixPCMFileHandle) {
 
-    LOGV("PreviewPlayer: setAudioMixPCMFileHandle: ");
+    ALOGV("setAudioMixPCMFileHandle");
     mAudioMixPCMFileHandle = pAudioMixPCMFileHandle;
     return OK;
 }
 
 status_t PreviewPlayer::setAudioMixStoryBoardParam(
-                    M4OSA_UInt32 audioMixStoryBoardTS,
-                    M4OSA_UInt32 currentMediaBeginCutTime,
-                    M4OSA_UInt32 primaryTrackVolValue ) {
+        M4OSA_UInt32 audioMixStoryBoardTS,
+        M4OSA_UInt32 currentMediaBeginCutTime,
+        M4OSA_UInt32 primaryTrackVolValue ) {
 
+    ALOGV("setAudioMixStoryBoardParam");
     mAudioMixStoryBoardTS = audioMixStoryBoardTS;
     mCurrentMediaBeginCutTime = currentMediaBeginCutTime;
     mCurrentMediaVolumeValue = primaryTrackVolValue;
@@ -1315,7 +1305,7 @@
 status_t PreviewPlayer::setStoryboardStartTime(uint32_t msec) {
 
     mStoryboardStartTimeMsec = msec;
-    mDecVideoTsStoryBoard = mStoryboardStartTimeMsec*1000LL;
+    mDecVideoTsStoryBoard = mStoryboardStartTimeMsec * 1000LL;
     return OK;
 }
 
@@ -1332,17 +1322,16 @@
 
     mRenderingMode = mode;
 
-    status_t err = OK;
     /* get the video width and height by resolution */
-    err = getVideoSizeByResolution(outputVideoSize,
-              &mOutputVideoWidth, &mOutputVideoHeight);
+    return getVideoSizeByResolution(
+                outputVideoSize,
+                    &mOutputVideoWidth, &mOutputVideoHeight);
 
-    return err;
 }
 
 status_t PreviewPlayer::resetJniCallbackTimeStamp() {
 
-    mDecVideoTsStoryBoard = mStoryboardStartTimeMsec*1000LL;
+    mDecVideoTsStoryBoard = mStoryboardStartTimeMsec * 1000LL;
     return OK;
 }
 
@@ -1364,10 +1353,9 @@
     mProgressCbEventPending = false;
     // If playback starts from previous I-frame,
     // then send frame storyboard duration
-    if((mDecodedVideoTs/1000) < mPlayBeginTimeMsec) {
+    if ((mDecodedVideoTs/1000) < mPlayBeginTimeMsec) {
         notifyListener_l(MEDIA_INFO, 0, mDecVideoTsStoryBoard/1000);
-    }
-    else {
+    } else {
         notifyListener_l(MEDIA_INFO, 0,
         (((mDecodedVideoTs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec));
     }
@@ -1389,18 +1377,13 @@
     }
     mOverlayUpdateEventPending = false;
 
-    int updateState;
-    if (mOverlayUpdateEventPosted) {
-        updateState = 1;
-    } else {
-        updateState = 0;
-    }
+    int updateState = mOverlayUpdateEventPosted? 1: 0;
     notifyListener_l(0xBBBBBBBB, updateState, mCurrFramingEffectIndex);
 }
 
 
 void PreviewPlayer::setVideoPostProcessingNode(
-                    M4VSS3GPP_VideoEffectType type, M4OSA_Bool enable) {
+        M4VSS3GPP_VideoEffectType type, M4OSA_Bool enable) {
 
     uint32_t effect = VIDEO_EFFECT_NONE;
 
@@ -1455,18 +1438,17 @@
             break;
     }
 
-    if(enable == M4OSA_TRUE) {
+    if (enable == M4OSA_TRUE) {
         //If already set, then no need to set again
-        if(!(mCurrentVideoEffect & effect)) {
+        if (!(mCurrentVideoEffect & effect)) {
             mCurrentVideoEffect |= effect;
-            if(effect == VIDEO_EFFECT_FIFTIES) {
+            if (effect == VIDEO_EFFECT_FIFTIES) {
                 mIsFiftiesEffectStarted = true;
             }
         }
-    }
-    else  {
+    } else  {
         //Reset only if already set
-        if(mCurrentVideoEffect & effect) {
+        if (mCurrentVideoEffect & effect) {
             mCurrentVideoEffect &= ~effect;
         }
     }
@@ -1479,12 +1461,12 @@
 }
 
 status_t PreviewPlayer::readFirstVideoFrame() {
-    LOGV("PreviewPlayer::readFirstVideoFrame");
+    ALOGV("readFirstVideoFrame");
 
     if (!mVideoBuffer) {
         MediaSource::ReadOptions options;
         if (mSeeking != NO_SEEK) {
-            LOGV("LV PLAYER seeking to %lld us (%.2f secs)", mSeekTimeUs,
+            ALOGV("seeking to %lld us (%.2f secs)", mSeekTimeUs,
                     mSeekTimeUs / 1E6);
 
             options.setSeekTo(
@@ -1495,10 +1477,10 @@
             options.clearSeekTo();
 
             if (err != OK) {
-                CHECK_EQ(mVideoBuffer, NULL);
+                CHECK(!mVideoBuffer);
 
                 if (err == INFO_FORMAT_CHANGED) {
-                    LOGV("LV PLAYER VideoSource signalled format change");
+                    ALOGV("VideoSource signalled format change");
                     notifyVideoSize_l();
 
                     if (mVideoRenderer != NULL) {
@@ -1512,7 +1494,7 @@
                     updateSizeToRender(mVideoSource->getFormat());
                     continue;
                 }
-                LOGV("PreviewPlayer: onVideoEvent EOS reached.");
+                ALOGV("EOS reached.");
                 mFlags |= VIDEO_AT_EOS;
                 mFlags |= AUDIO_AT_EOS;
                 postStreamDoneEvent_l(err);
@@ -1539,7 +1521,7 @@
                     continue;
                 }
             } else {
-                if((videoTimeUs/1000) < mPlayBeginTimeMsec) {
+                if ((videoTimeUs/1000) < mPlayBeginTimeMsec) {
                     // buffers are before begin cut time
                     // ignore them
                     mVideoBuffer->release();
@@ -1553,11 +1535,7 @@
 
     int64_t timeUs;
     CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
-
-    {
-        Mutex::Autolock autoLock(mMiscStateLock);
-        mVideoTimeUs = timeUs;
-    }
+    setPosition_l(timeUs);
 
     mDecodedVideoTs = timeUs;
 
@@ -1576,4 +1554,529 @@
     }
 }
 
+void PreviewPlayer::setListener(const wp<MediaPlayerBase> &listener) {
+    Mutex::Autolock autoLock(mLock);
+    mListener = listener;
+}
+
+status_t PreviewPlayer::setDataSource(const sp<IStreamSource> &source) {
+    return INVALID_OPERATION;
+}
+
+void PreviewPlayer::reset() {
+    Mutex::Autolock autoLock(mLock);
+    reset_l();
+}
+
+void PreviewPlayer::clear_l() {
+    mDisplayWidth = 0;
+    mDisplayHeight = 0;
+
+    if (mFlags & PLAYING) {
+        updateBatteryUsage_l();
+    }
+
+    if (mFlags & PREPARING) {
+        mFlags |= PREPARE_CANCELLED;
+
+        if (mFlags & PREPARING_CONNECTED) {
+            // We are basically done preparing, we're just buffering
+            // enough data to start playback, we can safely interrupt that.
+            finishAsyncPrepare_l();
+        }
+    }
+
+    while (mFlags & PREPARING) {
+        mPreparedCondition.wait(mLock);
+    }
+
+    cancelPlayerEvents_l(true);
+
+    mAudioTrack.clear();
+    mVideoTrack.clear();
+
+    // Shutdown audio first, so that the respone to the reset request
+    // appears to happen instantaneously as far as the user is concerned
+    // If we did this later, audio would continue playing while we
+    // shutdown the video-related resources and the player appear to
+    // not be as responsive to a reset request.
+    if (mAudioPlayer == NULL && mAudioSource != NULL) {
+        // If we had an audio player, it would have effectively
+        // taken possession of the audio source and stopped it when
+        // _it_ is stopped. Otherwise this is still our responsibility.
+        mAudioSource->stop();
+    }
+    mAudioSource.clear();
+
+    mTimeSource = NULL;
+
+    delete mAudioPlayer;
+    mAudioPlayer = NULL;
+
+    if (mVideoSource != NULL) {
+        shutdownVideoDecoder_l();
+    }
+
+    mDurationUs = -1;
+    mFlags = 0;
+    mExtractorFlags = 0;
+    mTimeSourceDeltaUs = 0;
+    mVideoTimeUs = 0;
+
+    mSeeking = NO_SEEK;
+    mSeekNotificationSent = false;
+    mSeekTimeUs = 0;
+
+    mUri.setTo("");
+
+    mBitrate = -1;
+    mLastVideoTimeUs = -1;
+}
+
+void PreviewPlayer::notifyListener_l(int msg, int ext1, int ext2) {
+    if (mListener != NULL) {
+        sp<MediaPlayerBase> listener = mListener.promote();
+
+        if (listener != NULL) {
+            listener->sendEvent(msg, ext1, ext2);
+        }
+    }
+}
+
+void PreviewPlayer::onVideoLagUpdate() {
+    Mutex::Autolock autoLock(mLock);
+    if (!mVideoLagEventPending) {
+        return;
+    }
+    mVideoLagEventPending = false;
+
+    int64_t audioTimeUs = mAudioPlayer->getMediaTimeUs();
+    int64_t videoLateByUs = audioTimeUs - mVideoTimeUs;
+
+    if (!(mFlags & VIDEO_AT_EOS) && videoLateByUs > 300000ll) {
+        ALOGV("video late by %lld ms.", videoLateByUs / 1000ll);
+
+        notifyListener_l(
+                MEDIA_INFO,
+                MEDIA_INFO_VIDEO_TRACK_LAGGING,
+                videoLateByUs / 1000ll);
+    }
+
+    postVideoLagEvent_l();
+}
+
+void PreviewPlayer::notifyVideoSize_l() {
+    sp<MetaData> meta = mVideoSource->getFormat();
+
+    int32_t vWidth, vHeight;
+    int32_t cropLeft, cropTop, cropRight, cropBottom;
+
+    CHECK(meta->findInt32(kKeyWidth, &vWidth));
+    CHECK(meta->findInt32(kKeyHeight, &vHeight));
+
+    mGivenWidth = vWidth;
+    mGivenHeight = vHeight;
+
+    if (!meta->findRect(
+                kKeyCropRect, &cropLeft, &cropTop, &cropRight, &cropBottom)) {
+
+        cropLeft = cropTop = 0;
+        cropRight = vWidth - 1;
+        cropBottom = vHeight - 1;
+
+        ALOGD("got dimensions only %d x %d", vWidth, vHeight);
+    } else {
+        ALOGD("got crop rect %d, %d, %d, %d",
+             cropLeft, cropTop, cropRight, cropBottom);
+    }
+
+    mCropRect.left = cropLeft;
+    mCropRect.right = cropRight;
+    mCropRect.top = cropTop;
+    mCropRect.bottom = cropBottom;
+
+    int32_t displayWidth;
+    if (meta->findInt32(kKeyDisplayWidth, &displayWidth)) {
+        ALOGV("Display width changed (%d=>%d)", mDisplayWidth, displayWidth);
+        mDisplayWidth = displayWidth;
+    }
+    int32_t displayHeight;
+    if (meta->findInt32(kKeyDisplayHeight, &displayHeight)) {
+        ALOGV("Display height changed (%d=>%d)", mDisplayHeight, displayHeight);
+        mDisplayHeight = displayHeight;
+    }
+
+    int32_t usableWidth = cropRight - cropLeft + 1;
+    int32_t usableHeight = cropBottom - cropTop + 1;
+    if (mDisplayWidth != 0) {
+        usableWidth = mDisplayWidth;
+    }
+    if (mDisplayHeight != 0) {
+        usableHeight = mDisplayHeight;
+    }
+
+    int32_t rotationDegrees;
+    if (!mVideoTrack->getFormat()->findInt32(
+                kKeyRotation, &rotationDegrees)) {
+        rotationDegrees = 0;
+    }
+
+    if (rotationDegrees == 90 || rotationDegrees == 270) {
+        notifyListener_l(
+                MEDIA_SET_VIDEO_SIZE, usableHeight, usableWidth);
+    } else {
+        notifyListener_l(
+                MEDIA_SET_VIDEO_SIZE, usableWidth, usableHeight);
+    }
+}
+
+status_t PreviewPlayer::pause() {
+    Mutex::Autolock autoLock(mLock);
+
+    mFlags &= ~CACHE_UNDERRUN;
+
+    return pause_l();
+}
+
+status_t PreviewPlayer::pause_l(bool at_eos) {
+    if (!(mFlags & PLAYING)) {
+        return OK;
+    }
+
+    cancelPlayerEvents_l();
+
+    if (mAudioPlayer != NULL && (mFlags & AUDIO_RUNNING)) {
+        if (at_eos) {
+            // If we played the audio stream to completion we
+            // want to make sure that all samples remaining in the audio
+            // track's queue are played out.
+            mAudioPlayer->pause(true /* playPendingSamples */);
+        } else {
+            mAudioPlayer->pause();
+        }
+
+        mFlags &= ~AUDIO_RUNNING;
+    }
+
+    mFlags &= ~PLAYING;
+    updateBatteryUsage_l();
+
+    return OK;
+}
+
+bool PreviewPlayer::isPlaying() const {
+    return (mFlags & PLAYING) || (mFlags & CACHE_UNDERRUN);
+}
+
+void PreviewPlayer::setSurface(const sp<Surface> &surface) {
+    Mutex::Autolock autoLock(mLock);
+
+    mSurface = surface;
+    setNativeWindow_l(surface);
+}
+
+void PreviewPlayer::setSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture) {
+    Mutex::Autolock autoLock(mLock);
+
+    mSurface.clear();
+    if (surfaceTexture != NULL) {
+        setNativeWindow_l(new SurfaceTextureClient(surfaceTexture));
+    }
+}
+
+void PreviewPlayer::shutdownVideoDecoder_l() {
+    if (mVideoBuffer) {
+        mVideoBuffer->release();
+        mVideoBuffer = NULL;
+    }
+
+    mVideoSource->stop();
+
+    // The following hack is necessary to ensure that the OMX
+    // component is completely released by the time we may try
+    // to instantiate it again.
+    wp<MediaSource> tmp = mVideoSource;
+    mVideoSource.clear();
+    while (tmp.promote() != NULL) {
+        usleep(1000);
+    }
+    IPCThreadState::self()->flushCommands();
+}
+
+void PreviewPlayer::setNativeWindow_l(const sp<ANativeWindow> &native) {
+    mNativeWindow = native;
+
+    if (mVideoSource == NULL) {
+        return;
+    }
+
+    ALOGI("attempting to reconfigure to use new surface");
+
+    bool wasPlaying = (mFlags & PLAYING) != 0;
+
+    pause_l();
+
+    shutdownVideoDecoder_l();
+
+    CHECK_EQ(initVideoDecoder_l(), (status_t)OK);
+
+    if (mLastVideoTimeUs >= 0) {
+        mSeeking = SEEK;
+        mSeekNotificationSent = true;
+        mSeekTimeUs = mLastVideoTimeUs;
+        mFlags &= ~(AT_EOS | AUDIO_AT_EOS | VIDEO_AT_EOS);
+    }
+
+    if (wasPlaying) {
+        play_l();
+    }
+}
+
+void PreviewPlayer::setAudioSink(
+        const sp<MediaPlayerBase::AudioSink> &audioSink) {
+    Mutex::Autolock autoLock(mLock);
+
+    mAudioSink = audioSink;
+}
+
+status_t PreviewPlayer::setLooping(bool shouldLoop) {
+    Mutex::Autolock autoLock(mLock);
+
+    mFlags = mFlags & ~LOOPING;
+
+    if (shouldLoop) {
+        mFlags |= LOOPING;
+    }
+
+    return OK;
+}
+
+void PreviewPlayer::setDuration_l(int64_t durationUs) {
+    if (mDurationUs < 0 || durationUs > mDurationUs) {
+        mDurationUs = durationUs;
+    }
+}
+
+status_t PreviewPlayer::getDuration(int64_t *durationUs) {
+    Mutex::Autolock autoLock(mLock);
+    if (mDurationUs < 0) {
+        return UNKNOWN_ERROR;
+    }
+
+    *durationUs = mDurationUs;
+    return OK;
+}
+
+status_t PreviewPlayer::getPosition(int64_t *positionUs) {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mSeeking != NO_SEEK) {
+        *positionUs = mSeekTimeUs;
+    } else if (mVideoSource != NULL
+            && (mAudioPlayer == NULL || !(mFlags & VIDEO_AT_EOS))) {
+        *positionUs = mVideoTimeUs;
+    } else if (mAudioPlayer != NULL) {
+        *positionUs = mAudioPlayer->getMediaTimeUs();
+    } else {
+        *positionUs = 0;
+    }
+
+    return OK;
+}
+
+void PreviewPlayer::setPosition_l(int64_t timeUs) {
+    mVideoTimeUs = timeUs;
+}
+
+status_t PreviewPlayer::seekTo_l(int64_t timeUs) {
+    ALOGV("seekTo_l");
+    if (mFlags & CACHE_UNDERRUN) {
+        mFlags &= ~CACHE_UNDERRUN;
+        play_l();
+    }
+
+    if ((mFlags & PLAYING) && mVideoSource != NULL && (mFlags & VIDEO_AT_EOS)) {
+        // Video playback completed before, there's no pending
+        // video event right now. In order for this new seek
+        // to be honored, we need to post one.
+
+        postVideoEvent_l();
+    }
+
+    mSeeking = SEEK;
+    mSeekNotificationSent = false;
+    mSeekTimeUs = timeUs;
+    mFlags &= ~(AT_EOS | AUDIO_AT_EOS | VIDEO_AT_EOS);
+
+    seekAudioIfNecessary_l();
+
+    if (!(mFlags & PLAYING)) {
+        ALOGV("seeking while paused, sending SEEK_COMPLETE notification"
+             " immediately.");
+
+        notifyListener_l(MEDIA_SEEK_COMPLETE);
+        mSeekNotificationSent = true;
+
+        if ((mFlags & PREPARED) && mVideoSource != NULL) {
+            mFlags |= SEEK_PREVIEW;
+            postVideoEvent_l();
+        }
+    }
+
+    return OK;
+}
+
+void PreviewPlayer::seekAudioIfNecessary_l() {
+    if (mSeeking != NO_SEEK && mVideoSource == NULL && mAudioPlayer != NULL) {
+        mAudioPlayer->seekTo(mSeekTimeUs);
+
+        mWatchForAudioSeekComplete = true;
+        mWatchForAudioEOS = true;
+    }
+}
+
+void PreviewPlayer::setAudioSource(const sp<MediaSource>& source) {
+    CHECK(source != NULL);
+    mAudioTrack = source;
+}
+
+void PreviewPlayer::setVideoSource(const sp<MediaSource>& source) {
+    CHECK(source != NULL);
+    mVideoTrack = source;
+}
+
+void PreviewPlayer::finishSeekIfNecessary(int64_t videoTimeUs) {
+    if (mSeeking == SEEK_VIDEO_ONLY) {
+        mSeeking = NO_SEEK;
+        return;
+    }
+
+    if (mSeeking == NO_SEEK || (mFlags & SEEK_PREVIEW)) {
+        return;
+    }
+
+    if (mAudioPlayer != NULL) {
+        ALOGV("seeking audio to %lld us (%.2f secs).", videoTimeUs, videoTimeUs / 1E6);
+
+        // If we don't have a video time, seek audio to the originally
+        // requested seek time instead.
+
+        mAudioPlayer->seekTo(videoTimeUs < 0 ? mSeekTimeUs : videoTimeUs);
+        mWatchForAudioSeekComplete = true;
+        mWatchForAudioEOS = true;
+    } else if (!mSeekNotificationSent) {
+        // If we're playing video only, report seek complete now,
+        // otherwise audio player will notify us later.
+        notifyListener_l(MEDIA_SEEK_COMPLETE);
+        mSeekNotificationSent = true;
+    }
+
+    mFlags |= FIRST_FRAME;
+    mSeeking = NO_SEEK;
+}
+
+void PreviewPlayer::onCheckAudioStatus() {
+    Mutex::Autolock autoLock(mLock);
+    if (!mAudioStatusEventPending) {
+        // Event was dispatched and while we were blocking on the mutex,
+        // has already been cancelled.
+        return;
+    }
+
+    mAudioStatusEventPending = false;
+
+    if (mWatchForAudioSeekComplete && !mAudioPlayer->isSeeking()) {
+        mWatchForAudioSeekComplete = false;
+
+        if (!mSeekNotificationSent) {
+            notifyListener_l(MEDIA_SEEK_COMPLETE);
+            mSeekNotificationSent = true;
+        }
+
+        mSeeking = NO_SEEK;
+    }
+
+    status_t finalStatus;
+    if (mWatchForAudioEOS && mAudioPlayer->reachedEOS(&finalStatus)) {
+        mWatchForAudioEOS = false;
+        mFlags |= AUDIO_AT_EOS;
+        mFlags |= FIRST_FRAME;
+        postStreamDoneEvent_l(finalStatus);
+    }
+}
+
+void PreviewPlayer::postVideoEvent_l(int64_t delayUs) {
+    if (mVideoEventPending) {
+        return;
+    }
+
+    mVideoEventPending = true;
+    mQueue.postEventWithDelay(mVideoEvent, delayUs < 0 ? 10000 : delayUs);
+}
+
+void PreviewPlayer::postStreamDoneEvent_l(status_t status) {
+    if (mStreamDoneEventPending) {
+        return;
+    }
+    mStreamDoneEventPending = true;
+
+    mStreamDoneStatus = status;
+    mQueue.postEvent(mStreamDoneEvent);
+}
+
+void PreviewPlayer::postVideoLagEvent_l() {
+    if (mVideoLagEventPending) {
+        return;
+    }
+    mVideoLagEventPending = true;
+    mQueue.postEventWithDelay(mVideoLagEvent, 1000000ll);
+}
+
+void PreviewPlayer::postCheckAudioStatusEvent_l(int64_t delayUs) {
+    if (mAudioStatusEventPending) {
+        return;
+    }
+    mAudioStatusEventPending = true;
+    mQueue.postEventWithDelay(mCheckAudioStatusEvent, delayUs);
+}
+
+void PreviewPlayer::abortPrepare(status_t err) {
+    CHECK(err != OK);
+
+    if (mIsAsyncPrepare) {
+        notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
+    }
+
+    mPrepareResult = err;
+    mFlags &= ~(PREPARING|PREPARE_CANCELLED|PREPARING_CONNECTED);
+    mAsyncPrepareEvent = NULL;
+    mPreparedCondition.broadcast();
+}
+
+uint32_t PreviewPlayer::getSourceSeekFlags() const {
+    Mutex::Autolock lock(mLock);
+    return mExtractorFlags;
+}
+
+void PreviewPlayer::postAudioEOS(int64_t delayUs) {
+    Mutex::Autolock autoLock(mLock);
+    postCheckAudioStatusEvent_l(delayUs);
+}
+
+void PreviewPlayer::postAudioSeekComplete() {
+    Mutex::Autolock autoLock(mLock);
+    postCheckAudioStatusEvent_l(0 /* delayUs */);
+}
+
+void PreviewPlayer::updateBatteryUsage_l() {
+    uint32_t params = IMediaPlayerService::kBatteryDataTrackDecoder;
+    if ((mAudioSource != NULL) && (mAudioSource != mAudioTrack)) {
+        params |= IMediaPlayerService::kBatteryDataTrackAudio;
+    }
+    if (mVideoSource != NULL) {
+        params |= IMediaPlayerService::kBatteryDataTrackVideo;
+    }
+    addBatteryData(params);
+}
+
 }  // namespace android
diff --git a/libvideoeditor/lvpp/PreviewPlayer.h b/libvideoeditor/lvpp/PreviewPlayer.h
index 16dcca2..177853f 100755
--- a/libvideoeditor/lvpp/PreviewPlayer.h
+++ b/libvideoeditor/lvpp/PreviewPlayer.h
@@ -22,43 +22,53 @@
 #include "VideoEditorAudioPlayer.h"
 
 #include <media/MediaPlayerInterface.h>
-#include <media/stagefright/DataSource.h>
 #include <media/stagefright/OMXClient.h>
 #include <media/stagefright/TimeSource.h>
 #include <utils/threads.h>
-#include "PreviewPlayerBase.h"
-#include "VideoEditorPreviewController.h"
 #include "NativeWindowRenderer.h"
 
 namespace android {
 
-struct AudioPlayerBase;
-struct DataSource;
-struct MediaBuffer;
+struct VideoEditorAudioPlayer;
 struct MediaExtractor;
-struct MediaSource;
 
-struct PreviewPlayer : public PreviewPlayerBase {
+struct PreviewPlayer {
     PreviewPlayer(NativeWindowRenderer* renderer);
     ~PreviewPlayer();
 
-    //Override baseclass methods
+    void setListener(const wp<MediaPlayerBase> &listener);
     void reset();
 
     status_t play();
+    status_t pause();
 
+    bool isPlaying() const;
+    void setSurface(const sp<Surface> &surface);
+    void setSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture);
     status_t seekTo(int64_t timeUs);
 
     status_t getVideoDimensions(int32_t *width, int32_t *height) const;
 
+
+    // FIXME: Sync between ...
     void acquireLock();
     void releaseLock();
 
     status_t prepare();
-    status_t setDataSource(
-        const char *uri, const KeyedVector<String8, String8> *headers);
+    status_t prepareAsync();
+    status_t setDataSource(const char *path);
+    status_t setDataSource(const sp<IStreamSource> &source);
 
-    //Added methods
+    void setAudioSink(const sp<MediaPlayerBase::AudioSink> &audioSink);
+    status_t setLooping(bool shouldLoop);
+    status_t getDuration(int64_t *durationUs);
+    status_t getPosition(int64_t *positionUs);
+
+    uint32_t getSourceSeekFlags() const;
+
+    void postAudioEOS(int64_t delayUs = 0ll);
+    void postAudioSeekComplete();
+
     status_t loadEffectsSettings(M4VSS3GPP_EffectSettings* pEffectSettings,
                                  int nEffects);
     status_t loadAudioMixSettings(M4xVSS_AudioMixingSettings* pAudioMixSettings);
@@ -78,11 +88,9 @@
     status_t setImageClipProperties(uint32_t width, uint32_t height);
     status_t readFirstVideoFrame();
     status_t getLastRenderedTimeMs(uint32_t *lastRenderedTimeMs);
-    status_t setAudioPlayer(AudioPlayerBase *audioPlayer);
+    status_t setAudioPlayer(VideoEditorAudioPlayer *audioPlayer);
 
 private:
-    friend struct PreviewPlayerEvent;
-
     enum {
         PLAYING             = 1,
         LOOPING             = 2,
@@ -96,24 +104,92 @@
         VIDEO_AT_EOS        = 512,
         AUTO_LOOPING        = 1024,
         INFORMED_AV_EOS     = 2048,
+
+        // We are basically done preparing but are currently buffering
+        // sufficient data to begin playback and finish the preparation phase
+        // for good.
+        PREPARING_CONNECTED = 2048,
+
+        // We're triggering a single video event to display the first frame
+        // after the seekpoint.
+        SEEK_PREVIEW        = 4096,
+
+        AUDIO_RUNNING       = 8192,
+        AUDIOPLAYER_STARTED = 16384,
+
+        INCOGNITO           = 32768,
     };
 
-    void cancelPlayerEvents(bool keepBufferingGoing = false);
-    status_t setDataSource_l(const sp<MediaExtractor> &extractor);
-    status_t setDataSource_l(
-        const char *uri, const KeyedVector<String8, String8> *headers);
-    void reset_l();
-    status_t play_l();
-    status_t initRenderer_l();
-    status_t initAudioDecoder();
-    status_t initVideoDecoder(uint32_t flags = 0);
-    void onVideoEvent();
-    void onStreamDone();
-    status_t finishSetDataSource_l();
-    static bool ContinuePreparation(void *cookie);
-    void onPrepareAsyncEvent();
-    void finishAsyncPrepare_l();
-    status_t startAudioPlayer_l();
+    mutable Mutex mLock;
+
+    OMXClient mClient;
+    TimedEventQueue mQueue;
+    bool mQueueStarted;
+    wp<MediaPlayerBase> mListener;
+
+    sp<Surface> mSurface;
+    sp<ANativeWindow> mNativeWindow;
+    sp<MediaPlayerBase::AudioSink> mAudioSink;
+
+    SystemTimeSource mSystemTimeSource;
+    TimeSource *mTimeSource;
+
+    String8 mUri;
+
+    sp<MediaSource> mVideoTrack;
+    sp<MediaSource> mVideoSource;
+    bool mVideoRendererIsPreview;
+
+    sp<MediaSource> mAudioTrack;
+    sp<MediaSource> mAudioSource;
+    VideoEditorAudioPlayer *mAudioPlayer;
+    int64_t mDurationUs;
+
+    int32_t mDisplayWidth;
+    int32_t mDisplayHeight;
+
+    uint32_t mFlags;
+    uint32_t mExtractorFlags;
+
+    int64_t mTimeSourceDeltaUs;
+    int64_t mVideoTimeUs;
+
+    enum SeekType {
+        NO_SEEK,
+        SEEK,
+        SEEK_VIDEO_ONLY
+    };
+    SeekType mSeeking;
+
+    bool mSeekNotificationSent;
+    int64_t mSeekTimeUs;
+
+    int64_t mBitrate;  // total bitrate of the file (in bps) or -1 if unknown.
+
+    bool mWatchForAudioSeekComplete;
+    bool mWatchForAudioEOS;
+
+    sp<TimedEventQueue::Event> mVideoEvent;
+    bool mVideoEventPending;
+    sp<TimedEventQueue::Event> mStreamDoneEvent;
+    bool mStreamDoneEventPending;
+    sp<TimedEventQueue::Event> mCheckAudioStatusEvent;
+    bool mAudioStatusEventPending;
+    sp<TimedEventQueue::Event> mVideoLagEvent;
+    bool mVideoLagEventPending;
+
+    sp<TimedEventQueue::Event> mAsyncPrepareEvent;
+    Condition mPreparedCondition;
+    bool mIsAsyncPrepare;
+    status_t mPrepareResult;
+    status_t mStreamDoneStatus;
+
+    MediaBuffer *mVideoBuffer;
+    int64_t mLastVideoTimeUs;
+    ARect mCropRect;
+    int32_t mGivenWidth, mGivenHeight;
+
+
     bool mIsChangeSourceRequired;
 
     NativeWindowRenderer *mNativeWindowRenderer;
@@ -159,22 +235,58 @@
     M4VIFI_UInt8*  mFrameRGBBuffer;
     M4VIFI_UInt8*  mFrameYUVBuffer;
 
+    void cancelPlayerEvents_l(bool updateProgressCb = false);
+    status_t setDataSource_l(const sp<MediaExtractor> &extractor);
+    status_t setDataSource_l(const char *path);
+    void setNativeWindow_l(const sp<ANativeWindow> &native);
+    void reset_l();
+    void clear_l();
+    status_t play_l();
+    status_t pause_l(bool at_eos = false);
+    status_t initRenderer_l();
+    status_t initAudioDecoder_l();
+    status_t initVideoDecoder_l(uint32_t flags = 0);
+    void notifyVideoSize_l();
+    void notifyListener_l(int msg, int ext1 = 0, int ext2 = 0);
+    void onVideoEvent();
+    void onVideoLagUpdate();
+    void onStreamDone();
+    void onCheckAudioStatus();
+    void onPrepareAsyncEvent();
+
+    void finishAsyncPrepare_l();
+    void abortPrepare(status_t err);
+
+    status_t startAudioPlayer_l();
+    void setVideoSource(const sp<MediaSource>& source);
+    status_t finishSetDataSource_l();
+    void setAudioSource(const sp<MediaSource>& source);
+
+    status_t seekTo_l(int64_t timeUs);
+    void seekAudioIfNecessary_l();
+    void finishSeekIfNecessary(int64_t videoTimeUs);
+
+    void postCheckAudioStatusEvent_l(int64_t delayUs);
+    void postVideoLagEvent_l();
+    void postStreamDoneEvent_l(status_t status);
+    void postVideoEvent_l(int64_t delayUs = -1);
     void setVideoPostProcessingNode(
                     M4VSS3GPP_VideoEffectType type, M4OSA_Bool enable);
     void postProgressCallbackEvent_l();
+    void shutdownVideoDecoder_l();
     void onProgressCbEvent();
 
     void postOverlayUpdateEvent_l();
     void onUpdateOverlayEvent();
 
     status_t setDataSource_l_jpg();
-
     status_t prepare_l();
     status_t prepareAsync_l();
-
+    void updateBatteryUsage_l();
     void updateSizeToRender(sp<MetaData> meta);
 
-    VideoEditorAudioPlayer  *mVeAudioPlayer;
+    void setDuration_l(int64_t durationUs);
+    void setPosition_l(int64_t timeUs);
 
     PreviewPlayer(const PreviewPlayer &);
     PreviewPlayer &operator=(const PreviewPlayer &);
diff --git a/libvideoeditor/lvpp/PreviewPlayerBase.cpp b/libvideoeditor/lvpp/PreviewPlayerBase.cpp
deleted file mode 100644
index aed26ef..0000000
--- a/libvideoeditor/lvpp/PreviewPlayerBase.cpp
+++ /dev/null
@@ -1,1861 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#undef DEBUG_HDCP
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "PreviewPlayerBase"
-#include <utils/Log.h>
-
-#include <dlfcn.h>
-
-#include "PreviewPlayerBase.h"
-#include "AudioPlayerBase.h"
-#include "include/SoftwareRenderer.h"
-#include "include/NuCachedSource2.h"
-#include "include/ThrottledSource.h"
-#include "include/MPEG2TSExtractor.h"
-
-#include <binder/IPCThreadState.h>
-#include <binder/IServiceManager.h>
-#include <media/IMediaPlayerService.h>
-#include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/DataSource.h>
-#include <media/stagefright/FileSource.h>
-#include <media/stagefright/MediaBuffer.h>
-#include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MediaExtractor.h>
-#include <media/stagefright/MediaSource.h>
-#include <media/stagefright/MetaData.h>
-#include <media/stagefright/OMXCodec.h>
-
-#include <surfaceflinger/Surface.h>
-#include <gui/ISurfaceTexture.h>
-#include <gui/SurfaceTextureClient.h>
-#include <surfaceflinger/ISurfaceComposer.h>
-
-#include <cutils/properties.h>
-
-#define USE_SURFACE_ALLOC 1
-
-namespace android {
-
-static int64_t kLowWaterMarkUs = 2000000ll;  // 2secs
-static int64_t kHighWaterMarkUs = 10000000ll;  // 10secs
-static const size_t kLowWaterMarkBytes = 40000;
-static const size_t kHighWaterMarkBytes = 200000;
-
-struct AwesomeEvent : public TimedEventQueue::Event {
-    AwesomeEvent(
-            PreviewPlayerBase *player,
-            void (PreviewPlayerBase::*method)())
-        : mPlayer(player),
-          mMethod(method) {
-    }
-
-protected:
-    virtual ~AwesomeEvent() {}
-
-    virtual void fire(TimedEventQueue *queue, int64_t /* now_us */) {
-        (mPlayer->*mMethod)();
-    }
-
-private:
-    PreviewPlayerBase *mPlayer;
-    void (PreviewPlayerBase::*mMethod)();
-
-    AwesomeEvent(const AwesomeEvent &);
-    AwesomeEvent &operator=(const AwesomeEvent &);
-};
-
-struct AwesomeLocalRenderer : public AwesomeRenderer {
-    AwesomeLocalRenderer(
-            const sp<ANativeWindow> &nativeWindow, const sp<MetaData> &meta)
-        : mTarget(new SoftwareRenderer(nativeWindow, meta)) {
-    }
-
-    virtual void render(MediaBuffer *buffer) {
-        render((const uint8_t *)buffer->data() + buffer->range_offset(),
-               buffer->range_length());
-    }
-
-    void render(const void *data, size_t size) {
-        mTarget->render(data, size, NULL);
-    }
-
-protected:
-    virtual ~AwesomeLocalRenderer() {
-        delete mTarget;
-        mTarget = NULL;
-    }
-
-private:
-    SoftwareRenderer *mTarget;
-
-    AwesomeLocalRenderer(const AwesomeLocalRenderer &);
-    AwesomeLocalRenderer &operator=(const AwesomeLocalRenderer &);;
-};
-
-struct AwesomeNativeWindowRenderer : public AwesomeRenderer {
-    AwesomeNativeWindowRenderer(
-            const sp<ANativeWindow> &nativeWindow,
-            int32_t rotationDegrees)
-        : mNativeWindow(nativeWindow) {
-        applyRotation(rotationDegrees);
-    }
-
-    virtual void render(MediaBuffer *buffer) {
-        status_t err = mNativeWindow->queueBuffer(
-                mNativeWindow.get(), buffer->graphicBuffer().get());
-        if (err != 0) {
-            LOGE("queueBuffer failed with error %s (%d)", strerror(-err),
-                    -err);
-            return;
-        }
-
-        sp<MetaData> metaData = buffer->meta_data();
-        metaData->setInt32(kKeyRendered, 1);
-    }
-
-protected:
-    virtual ~AwesomeNativeWindowRenderer() {}
-
-private:
-    sp<ANativeWindow> mNativeWindow;
-
-    void applyRotation(int32_t rotationDegrees) {
-        uint32_t transform;
-        switch (rotationDegrees) {
-            case 0: transform = 0; break;
-            case 90: transform = HAL_TRANSFORM_ROT_90; break;
-            case 180: transform = HAL_TRANSFORM_ROT_180; break;
-            case 270: transform = HAL_TRANSFORM_ROT_270; break;
-            default: transform = 0; break;
-        }
-
-        if (transform) {
-            CHECK_EQ(0, native_window_set_buffers_transform(
-                        mNativeWindow.get(), transform));
-        }
-    }
-
-    AwesomeNativeWindowRenderer(const AwesomeNativeWindowRenderer &);
-    AwesomeNativeWindowRenderer &operator=(
-            const AwesomeNativeWindowRenderer &);
-};
-
-// To collect the decoder usage
-void addBatteryData(uint32_t params) {
-    sp<IBinder> binder =
-        defaultServiceManager()->getService(String16("media.player"));
-    sp<IMediaPlayerService> service = interface_cast<IMediaPlayerService>(binder);
-    CHECK(service.get() != NULL);
-
-    service->addBatteryData(params);
-}
-
-////////////////////////////////////////////////////////////////////////////////
-PreviewPlayerBase::PreviewPlayerBase()
-    : mQueueStarted(false),
-      mTimeSource(NULL),
-      mVideoRendererIsPreview(false),
-      mAudioPlayer(NULL),
-      mDisplayWidth(0),
-      mDisplayHeight(0),
-      mFlags(0),
-      mExtractorFlags(0),
-      mVideoBuffer(NULL),
-      mDecryptHandle(NULL),
-      mLastVideoTimeUs(-1) {
-    CHECK_EQ(mClient.connect(), (status_t)OK);
-
-    DataSource::RegisterDefaultSniffers();
-
-    mVideoEvent = new AwesomeEvent(this, &PreviewPlayerBase::onVideoEvent);
-    mVideoEventPending = false;
-    mStreamDoneEvent = new AwesomeEvent(this, &PreviewPlayerBase::onStreamDone);
-    mStreamDoneEventPending = false;
-    mBufferingEvent = new AwesomeEvent(this, &PreviewPlayerBase::onBufferingUpdate);
-    mBufferingEventPending = false;
-    mVideoLagEvent = new AwesomeEvent(this, &PreviewPlayerBase::onVideoLagUpdate);
-    mVideoEventPending = false;
-
-    mCheckAudioStatusEvent = new AwesomeEvent(
-            this, &PreviewPlayerBase::onCheckAudioStatus);
-
-    mAudioStatusEventPending = false;
-
-    reset();
-}
-
-PreviewPlayerBase::~PreviewPlayerBase() {
-    if (mQueueStarted) {
-        mQueue.stop();
-    }
-
-    reset();
-
-    mClient.disconnect();
-}
-
-void PreviewPlayerBase::cancelPlayerEvents(bool keepBufferingGoing) {
-    mQueue.cancelEvent(mVideoEvent->eventID());
-    mVideoEventPending = false;
-    mQueue.cancelEvent(mStreamDoneEvent->eventID());
-    mStreamDoneEventPending = false;
-    mQueue.cancelEvent(mCheckAudioStatusEvent->eventID());
-    mAudioStatusEventPending = false;
-    mQueue.cancelEvent(mVideoLagEvent->eventID());
-    mVideoLagEventPending = false;
-
-    if (!keepBufferingGoing) {
-        mQueue.cancelEvent(mBufferingEvent->eventID());
-        mBufferingEventPending = false;
-    }
-}
-
-void PreviewPlayerBase::setListener(const wp<MediaPlayerBase> &listener) {
-    Mutex::Autolock autoLock(mLock);
-    mListener = listener;
-}
-
-status_t PreviewPlayerBase::setDataSource(
-        const char *uri, const KeyedVector<String8, String8> *headers) {
-    Mutex::Autolock autoLock(mLock);
-    return setDataSource_l(uri, headers);
-}
-
-status_t PreviewPlayerBase::setDataSource_l(
-        const char *uri, const KeyedVector<String8, String8> *headers) {
-    reset_l();
-
-    mUri = uri;
-
-    if (headers) {
-        mUriHeaders = *headers;
-
-        ssize_t index = mUriHeaders.indexOfKey(String8("x-hide-urls-from-log"));
-        if (index >= 0) {
-            // Browser is in "incognito" mode, suppress logging URLs.
-
-            // This isn't something that should be passed to the server.
-            mUriHeaders.removeItemsAt(index);
-
-            mFlags |= INCOGNITO;
-        }
-    }
-
-    if (!(mFlags & INCOGNITO)) {
-        LOGI("setDataSource_l('%s')", mUri.string());
-    } else {
-        LOGI("setDataSource_l(URL suppressed)");
-    }
-
-    // The actual work will be done during preparation in the call to
-    // ::finishSetDataSource_l to avoid blocking the calling thread in
-    // setDataSource for any significant time.
-
-    return OK;
-}
-
-status_t PreviewPlayerBase::setDataSource(
-        int fd, int64_t offset, int64_t length) {
-    Mutex::Autolock autoLock(mLock);
-
-    reset_l();
-
-    sp<DataSource> dataSource = new FileSource(fd, offset, length);
-
-    status_t err = dataSource->initCheck();
-
-    if (err != OK) {
-        return err;
-    }
-
-    mFileSource = dataSource;
-
-    return setDataSource_l(dataSource);
-}
-
-status_t PreviewPlayerBase::setDataSource(const sp<IStreamSource> &source) {
-    return INVALID_OPERATION;
-}
-
-status_t PreviewPlayerBase::setDataSource_l(
-        const sp<DataSource> &dataSource) {
-    sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource);
-
-    if (extractor == NULL) {
-        return UNKNOWN_ERROR;
-    }
-
-    dataSource->getDrmInfo(mDecryptHandle, &mDrmManagerClient);
-    if (mDecryptHandle != NULL) {
-        CHECK(mDrmManagerClient);
-        if (RightsStatus::RIGHTS_VALID != mDecryptHandle->status) {
-            notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, ERROR_DRM_NO_LICENSE);
-        }
-    }
-
-    return setDataSource_l(extractor);
-}
-
-status_t PreviewPlayerBase::setDataSource_l(const sp<MediaExtractor> &extractor) {
-    // Attempt to approximate overall stream bitrate by summing all
-    // tracks' individual bitrates, if not all of them advertise bitrate,
-    // we have to fail.
-
-    int64_t totalBitRate = 0;
-
-    for (size_t i = 0; i < extractor->countTracks(); ++i) {
-        sp<MetaData> meta = extractor->getTrackMetaData(i);
-
-        int32_t bitrate;
-        if (!meta->findInt32(kKeyBitRate, &bitrate)) {
-            totalBitRate = -1;
-            break;
-        }
-
-        totalBitRate += bitrate;
-    }
-
-    mBitrate = totalBitRate;
-
-    LOGV("mBitrate = %lld bits/sec", mBitrate);
-
-    bool haveAudio = false;
-    bool haveVideo = false;
-    for (size_t i = 0; i < extractor->countTracks(); ++i) {
-        sp<MetaData> meta = extractor->getTrackMetaData(i);
-
-        const char *mime;
-        CHECK(meta->findCString(kKeyMIMEType, &mime));
-
-        if (!haveVideo && !strncasecmp(mime, "video/", 6)) {
-            setVideoSource(extractor->getTrack(i));
-            haveVideo = true;
-
-            // Set the presentation/display size
-            int32_t displayWidth, displayHeight;
-            bool success = meta->findInt32(kKeyDisplayWidth, &displayWidth);
-            if (success) {
-                success = meta->findInt32(kKeyDisplayHeight, &displayHeight);
-            }
-            if (success) {
-                mDisplayWidth = displayWidth;
-                mDisplayHeight = displayHeight;
-            }
-
-        } else if (!haveAudio && !strncasecmp(mime, "audio/", 6)) {
-            setAudioSource(extractor->getTrack(i));
-            haveAudio = true;
-
-            if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) {
-                // Only do this for vorbis audio, none of the other audio
-                // formats even support this ringtone specific hack and
-                // retrieving the metadata on some extractors may turn out
-                // to be very expensive.
-                sp<MetaData> fileMeta = extractor->getMetaData();
-                int32_t loop;
-                if (fileMeta != NULL
-                        && fileMeta->findInt32(kKeyAutoLoop, &loop) && loop != 0) {
-                    mFlags |= AUTO_LOOPING;
-                }
-            }
-        }
-
-        if (haveAudio && haveVideo) {
-            break;
-        }
-    }
-
-    if (!haveAudio && !haveVideo) {
-        return UNKNOWN_ERROR;
-    }
-
-    mExtractorFlags = extractor->flags();
-
-    return OK;
-}
-
-void PreviewPlayerBase::reset() {
-    Mutex::Autolock autoLock(mLock);
-    reset_l();
-}
-
-void PreviewPlayerBase::reset_l() {
-    mDisplayWidth = 0;
-    mDisplayHeight = 0;
-
-    if (mDecryptHandle != NULL) {
-            mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
-                    Playback::STOP, 0);
-            mDecryptHandle = NULL;
-            mDrmManagerClient = NULL;
-    }
-
-    if (mFlags & PLAYING) {
-        uint32_t params = IMediaPlayerService::kBatteryDataTrackDecoder;
-        if ((mAudioSource != NULL) && (mAudioSource != mAudioTrack)) {
-            params |= IMediaPlayerService::kBatteryDataTrackAudio;
-        }
-        if (mVideoSource != NULL) {
-            params |= IMediaPlayerService::kBatteryDataTrackVideo;
-        }
-        addBatteryData(params);
-    }
-
-    if (mFlags & PREPARING) {
-        mFlags |= PREPARE_CANCELLED;
-        if (mConnectingDataSource != NULL) {
-            LOGI("interrupting the connection process");
-            mConnectingDataSource->disconnect();
-        }
-
-        if (mFlags & PREPARING_CONNECTED) {
-            // We are basically done preparing, we're just buffering
-            // enough data to start playback, we can safely interrupt that.
-            finishAsyncPrepare_l();
-        }
-    }
-
-    while (mFlags & PREPARING) {
-        mPreparedCondition.wait(mLock);
-    }
-
-    cancelPlayerEvents();
-
-    mCachedSource.clear();
-    mAudioTrack.clear();
-    mVideoTrack.clear();
-
-    // Shutdown audio first, so that the respone to the reset request
-    // appears to happen instantaneously as far as the user is concerned
-    // If we did this later, audio would continue playing while we
-    // shutdown the video-related resources and the player appear to
-    // not be as responsive to a reset request.
-    if (mAudioPlayer == NULL && mAudioSource != NULL) {
-        // If we had an audio player, it would have effectively
-        // taken possession of the audio source and stopped it when
-        // _it_ is stopped. Otherwise this is still our responsibility.
-        mAudioSource->stop();
-    }
-    mAudioSource.clear();
-
-    mTimeSource = NULL;
-
-    delete mAudioPlayer;
-    mAudioPlayer = NULL;
-
-    mVideoRenderer.clear();
-
-    if (mVideoSource != NULL) {
-        shutdownVideoDecoder_l();
-    }
-
-    mDurationUs = -1;
-    mFlags = 0;
-    mExtractorFlags = 0;
-    mTimeSourceDeltaUs = 0;
-    mVideoTimeUs = 0;
-
-    mSeeking = NO_SEEK;
-    mSeekNotificationSent = false;
-    mSeekTimeUs = 0;
-
-    mUri.setTo("");
-    mUriHeaders.clear();
-
-    mFileSource.clear();
-
-    mBitrate = -1;
-    mLastVideoTimeUs = -1;
-}
-
-void PreviewPlayerBase::notifyListener_l(int msg, int ext1, int ext2) {
-    if (mListener != NULL) {
-        sp<MediaPlayerBase> listener = mListener.promote();
-
-        if (listener != NULL) {
-            listener->sendEvent(msg, ext1, ext2);
-        }
-    }
-}
-
-bool PreviewPlayerBase::getBitrate(int64_t *bitrate) {
-    off64_t size;
-    if (mDurationUs >= 0 && mCachedSource != NULL
-            && mCachedSource->getSize(&size) == OK) {
-        *bitrate = size * 8000000ll / mDurationUs;  // in bits/sec
-        return true;
-    }
-
-    if (mBitrate >= 0) {
-        *bitrate = mBitrate;
-        return true;
-    }
-
-    *bitrate = 0;
-
-    return false;
-}
-
-// Returns true iff cached duration is available/applicable.
-bool PreviewPlayerBase::getCachedDuration_l(int64_t *durationUs, bool *eos) {
-    int64_t bitrate;
-
-    if (mCachedSource != NULL && getBitrate(&bitrate)) {
-        status_t finalStatus;
-        size_t cachedDataRemaining = mCachedSource->approxDataRemaining(&finalStatus);
-        *durationUs = cachedDataRemaining * 8000000ll / bitrate;
-        *eos = (finalStatus != OK);
-        return true;
-    }
-
-    return false;
-}
-
-void PreviewPlayerBase::ensureCacheIsFetching_l() {
-    if (mCachedSource != NULL) {
-        mCachedSource->resumeFetchingIfNecessary();
-    }
-}
-
-void PreviewPlayerBase::onVideoLagUpdate() {
-    Mutex::Autolock autoLock(mLock);
-    if (!mVideoLagEventPending) {
-        return;
-    }
-    mVideoLagEventPending = false;
-
-    int64_t audioTimeUs = mAudioPlayer->getMediaTimeUs();
-    int64_t videoLateByUs = audioTimeUs - mVideoTimeUs;
-
-    if (!(mFlags & VIDEO_AT_EOS) && videoLateByUs > 300000ll) {
-        LOGV("video late by %lld ms.", videoLateByUs / 1000ll);
-
-        notifyListener_l(
-                MEDIA_INFO,
-                MEDIA_INFO_VIDEO_TRACK_LAGGING,
-                videoLateByUs / 1000ll);
-    }
-
-    postVideoLagEvent_l();
-}
-
-void PreviewPlayerBase::onBufferingUpdate() {
-    Mutex::Autolock autoLock(mLock);
-    if (!mBufferingEventPending) {
-        return;
-    }
-    mBufferingEventPending = false;
-
-    if (mCachedSource != NULL) {
-        status_t finalStatus;
-        size_t cachedDataRemaining = mCachedSource->approxDataRemaining(&finalStatus);
-        bool eos = (finalStatus != OK);
-
-        if (eos) {
-            if (finalStatus == ERROR_END_OF_STREAM) {
-                notifyListener_l(MEDIA_BUFFERING_UPDATE, 100);
-            }
-            if (mFlags & PREPARING) {
-                LOGV("cache has reached EOS, prepare is done.");
-                finishAsyncPrepare_l();
-            }
-        } else {
-            int64_t bitrate;
-            if (getBitrate(&bitrate)) {
-                size_t cachedSize = mCachedSource->cachedSize();
-                int64_t cachedDurationUs = cachedSize * 8000000ll / bitrate;
-
-                int percentage = 100.0 * (double)cachedDurationUs / mDurationUs;
-                if (percentage > 100) {
-                    percentage = 100;
-                }
-
-                notifyListener_l(MEDIA_BUFFERING_UPDATE, percentage);
-            } else {
-                // We don't know the bitrate of the stream, use absolute size
-                // limits to maintain the cache.
-
-                if ((mFlags & PLAYING) && !eos
-                        && (cachedDataRemaining < kLowWaterMarkBytes)) {
-                    LOGI("cache is running low (< %d) , pausing.",
-                         kLowWaterMarkBytes);
-                    mFlags |= CACHE_UNDERRUN;
-                    pause_l();
-                    ensureCacheIsFetching_l();
-                    notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_START);
-                } else if (eos || cachedDataRemaining > kHighWaterMarkBytes) {
-                    if (mFlags & CACHE_UNDERRUN) {
-                        LOGI("cache has filled up (> %d), resuming.",
-                             kHighWaterMarkBytes);
-                        mFlags &= ~CACHE_UNDERRUN;
-                        play_l();
-                        notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_END);
-                    } else if (mFlags & PREPARING) {
-                        LOGV("cache has filled up (> %d), prepare is done",
-                             kHighWaterMarkBytes);
-                        finishAsyncPrepare_l();
-                    }
-                }
-            }
-        }
-    }
-
-    int64_t cachedDurationUs;
-    bool eos;
-    if (getCachedDuration_l(&cachedDurationUs, &eos)) {
-        LOGV("cachedDurationUs = %.2f secs, eos=%d",
-             cachedDurationUs / 1E6, eos);
-
-        if ((mFlags & PLAYING) && !eos
-                && (cachedDurationUs < kLowWaterMarkUs)) {
-            LOGI("cache is running low (%.2f secs) , pausing.",
-                 cachedDurationUs / 1E6);
-            mFlags |= CACHE_UNDERRUN;
-            pause_l();
-            ensureCacheIsFetching_l();
-            notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_START);
-        } else if (eos || cachedDurationUs > kHighWaterMarkUs) {
-            if (mFlags & CACHE_UNDERRUN) {
-                LOGI("cache has filled up (%.2f secs), resuming.",
-                     cachedDurationUs / 1E6);
-                mFlags &= ~CACHE_UNDERRUN;
-                play_l();
-                notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_END);
-            } else if (mFlags & PREPARING) {
-                LOGV("cache has filled up (%.2f secs), prepare is done",
-                     cachedDurationUs / 1E6);
-                finishAsyncPrepare_l();
-            }
-        }
-    }
-
-    postBufferingEvent_l();
-}
-
-void PreviewPlayerBase::onStreamDone() {
-    // Posted whenever any stream finishes playing.
-
-    Mutex::Autolock autoLock(mLock);
-    if (!mStreamDoneEventPending) {
-        return;
-    }
-    mStreamDoneEventPending = false;
-
-    if (mStreamDoneStatus != ERROR_END_OF_STREAM) {
-        LOGV("MEDIA_ERROR %d", mStreamDoneStatus);
-
-        notifyListener_l(
-                MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, mStreamDoneStatus);
-
-        pause_l(true /* at eos */);
-
-        mFlags |= AT_EOS;
-        return;
-    }
-
-    const bool allDone =
-        (mVideoSource == NULL || (mFlags & VIDEO_AT_EOS))
-            && (mAudioSource == NULL || (mFlags & AUDIO_AT_EOS));
-
-    if (!allDone) {
-        return;
-    }
-
-    if (mFlags & (LOOPING | AUTO_LOOPING)) {
-        seekTo_l(0);
-
-        if (mVideoSource != NULL) {
-            postVideoEvent_l();
-        }
-    } else {
-        LOGV("MEDIA_PLAYBACK_COMPLETE");
-        notifyListener_l(MEDIA_PLAYBACK_COMPLETE);
-
-        pause_l(true /* at eos */);
-
-        mFlags |= AT_EOS;
-    }
-}
-
-status_t PreviewPlayerBase::play() {
-    Mutex::Autolock autoLock(mLock);
-
-    mFlags &= ~CACHE_UNDERRUN;
-
-    return play_l();
-}
-
-status_t PreviewPlayerBase::play_l() {
-    mFlags &= ~SEEK_PREVIEW;
-
-    if (mFlags & PLAYING) {
-        return OK;
-    }
-
-    if (!(mFlags & PREPARED)) {
-        status_t err = prepare_l();
-
-        if (err != OK) {
-            return err;
-        }
-    }
-
-    mFlags |= PLAYING;
-    mFlags |= FIRST_FRAME;
-
-    if (mDecryptHandle != NULL) {
-        int64_t position;
-        getPosition(&position);
-        mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
-                Playback::START, position / 1000);
-    }
-
-    if (mAudioSource != NULL) {
-        if (mAudioPlayer == NULL) {
-            if (mAudioSink != NULL) {
-                mAudioPlayer = new AudioPlayerBase(mAudioSink, this);
-                mAudioPlayer->setSource(mAudioSource);
-
-                mTimeSource = mAudioPlayer;
-
-                // If there was a seek request before we ever started,
-                // honor the request now.
-                // Make sure to do this before starting the audio player
-                // to avoid a race condition.
-                seekAudioIfNecessary_l();
-            }
-        }
-
-        CHECK(!(mFlags & AUDIO_RUNNING));
-
-        if (mVideoSource == NULL) {
-            status_t err = startAudioPlayer_l();
-
-            if (err != OK) {
-                delete mAudioPlayer;
-                mAudioPlayer = NULL;
-
-                mFlags &= ~(PLAYING | FIRST_FRAME);
-
-                if (mDecryptHandle != NULL) {
-                    mDrmManagerClient->setPlaybackStatus(
-                            mDecryptHandle, Playback::STOP, 0);
-                }
-
-                return err;
-            }
-        }
-    }
-
-    if (mTimeSource == NULL && mAudioPlayer == NULL) {
-        mTimeSource = &mSystemTimeSource;
-    }
-
-    if (mVideoSource != NULL) {
-        // Kick off video playback
-        postVideoEvent_l();
-
-        if (mAudioSource != NULL && mVideoSource != NULL) {
-            postVideoLagEvent_l();
-        }
-    }
-
-    if (mFlags & AT_EOS) {
-        // Legacy behaviour, if a stream finishes playing and then
-        // is started again, we play from the start...
-        seekTo_l(0);
-    }
-
-    uint32_t params = IMediaPlayerService::kBatteryDataCodecStarted
-        | IMediaPlayerService::kBatteryDataTrackDecoder;
-    if ((mAudioSource != NULL) && (mAudioSource != mAudioTrack)) {
-        params |= IMediaPlayerService::kBatteryDataTrackAudio;
-    }
-    if (mVideoSource != NULL) {
-        params |= IMediaPlayerService::kBatteryDataTrackVideo;
-    }
-    addBatteryData(params);
-
-    return OK;
-}
-
-status_t PreviewPlayerBase::startAudioPlayer_l() {
-    CHECK(!(mFlags & AUDIO_RUNNING));
-
-    if (mAudioSource == NULL || mAudioPlayer == NULL) {
-        return OK;
-    }
-
-    if (!(mFlags & AUDIOPLAYER_STARTED)) {
-        mFlags |= AUDIOPLAYER_STARTED;
-
-        // We've already started the MediaSource in order to enable
-        // the prefetcher to read its data.
-        status_t err = mAudioPlayer->start(
-                true /* sourceAlreadyStarted */);
-
-        if (err != OK) {
-            notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
-            return err;
-        }
-    } else {
-        mAudioPlayer->resume();
-    }
-
-    mFlags |= AUDIO_RUNNING;
-
-    mWatchForAudioEOS = true;
-
-    return OK;
-}
-
-void PreviewPlayerBase::notifyVideoSize_l() {
-    sp<MetaData> meta = mVideoSource->getFormat();
-
-    int32_t vWidth, vHeight;
-    int32_t cropLeft, cropTop, cropRight, cropBottom;
-
-    CHECK(meta->findInt32(kKeyWidth, &vWidth));
-    CHECK(meta->findInt32(kKeyHeight, &vHeight));
-
-    mGivenWidth = vWidth;
-    mGivenHeight = vHeight;
-
-    if (!meta->findRect(
-                kKeyCropRect, &cropLeft, &cropTop, &cropRight, &cropBottom)) {
-
-        cropLeft = cropTop = 0;
-        cropRight = vWidth - 1;
-        cropBottom = vHeight - 1;
-
-        LOGD("got dimensions only %d x %d", vWidth, vHeight);
-    } else {
-        LOGD("got crop rect %d, %d, %d, %d",
-             cropLeft, cropTop, cropRight, cropBottom);
-    }
-
-    mCropRect.left = cropLeft;
-    mCropRect.right = cropRight;
-    mCropRect.top = cropTop;
-    mCropRect.bottom = cropBottom;
-
-    int32_t displayWidth;
-    if (meta->findInt32(kKeyDisplayWidth, &displayWidth)) {
-        LOGV("Display width changed (%d=>%d)", mDisplayWidth, displayWidth);
-        mDisplayWidth = displayWidth;
-    }
-    int32_t displayHeight;
-    if (meta->findInt32(kKeyDisplayHeight, &displayHeight)) {
-        LOGV("Display height changed (%d=>%d)", mDisplayHeight, displayHeight);
-        mDisplayHeight = displayHeight;
-    }
-
-    int32_t usableWidth = cropRight - cropLeft + 1;
-    int32_t usableHeight = cropBottom - cropTop + 1;
-    if (mDisplayWidth != 0) {
-        usableWidth = mDisplayWidth;
-    }
-    if (mDisplayHeight != 0) {
-        usableHeight = mDisplayHeight;
-    }
-
-    int32_t rotationDegrees;
-    if (!mVideoTrack->getFormat()->findInt32(
-                kKeyRotation, &rotationDegrees)) {
-        rotationDegrees = 0;
-    }
-
-    if (rotationDegrees == 90 || rotationDegrees == 270) {
-        notifyListener_l(
-                MEDIA_SET_VIDEO_SIZE, usableHeight, usableWidth);
-    } else {
-        notifyListener_l(
-                MEDIA_SET_VIDEO_SIZE, usableWidth, usableHeight);
-    }
-}
-
-void PreviewPlayerBase::initRenderer_l() {
-    if (mNativeWindow == NULL) {
-        return;
-    }
-
-    sp<MetaData> meta = mVideoSource->getFormat();
-
-    int32_t format;
-    const char *component;
-    CHECK(meta->findInt32(kKeyColorFormat, &format));
-    CHECK(meta->findCString(kKeyDecoderComponent, &component));
-
-    int32_t rotationDegrees;
-    if (!mVideoTrack->getFormat()->findInt32(
-                kKeyRotation, &rotationDegrees)) {
-        rotationDegrees = 0;
-    }
-
-    mVideoRenderer.clear();
-
-    // Must ensure that mVideoRenderer's destructor is actually executed
-    // before creating a new one.
-    IPCThreadState::self()->flushCommands();
-
-    if (USE_SURFACE_ALLOC && strncmp(component, "OMX.", 4) == 0) {
-        // Hardware decoders avoid the CPU color conversion by decoding
-        // directly to ANativeBuffers, so we must use a renderer that
-        // just pushes those buffers to the ANativeWindow.
-        mVideoRenderer =
-            new AwesomeNativeWindowRenderer(mNativeWindow, rotationDegrees);
-    } else {
-        // Other decoders are instantiated locally and as a consequence
-        // allocate their buffers in local address space.  This renderer
-        // then performs a color conversion and copy to get the data
-        // into the ANativeBuffer.
-        mVideoRenderer = new AwesomeLocalRenderer(mNativeWindow, meta);
-    }
-}
-
-status_t PreviewPlayerBase::pause() {
-    Mutex::Autolock autoLock(mLock);
-
-    mFlags &= ~CACHE_UNDERRUN;
-
-    return pause_l();
-}
-
-status_t PreviewPlayerBase::pause_l(bool at_eos) {
-    if (!(mFlags & PLAYING)) {
-        return OK;
-    }
-
-    cancelPlayerEvents(true /* keepBufferingGoing */);
-
-    if (mAudioPlayer != NULL && (mFlags & AUDIO_RUNNING)) {
-        if (at_eos) {
-            // If we played the audio stream to completion we
-            // want to make sure that all samples remaining in the audio
-            // track's queue are played out.
-            mAudioPlayer->pause(true /* playPendingSamples */);
-        } else {
-            mAudioPlayer->pause();
-        }
-
-        mFlags &= ~AUDIO_RUNNING;
-    }
-
-    mFlags &= ~PLAYING;
-
-    if (mDecryptHandle != NULL) {
-        mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
-                Playback::PAUSE, 0);
-    }
-
-    uint32_t params = IMediaPlayerService::kBatteryDataTrackDecoder;
-    if ((mAudioSource != NULL) && (mAudioSource != mAudioTrack)) {
-        params |= IMediaPlayerService::kBatteryDataTrackAudio;
-    }
-    if (mVideoSource != NULL) {
-        params |= IMediaPlayerService::kBatteryDataTrackVideo;
-    }
-
-    addBatteryData(params);
-
-    return OK;
-}
-
-bool PreviewPlayerBase::isPlaying() const {
-    return (mFlags & PLAYING) || (mFlags & CACHE_UNDERRUN);
-}
-
-void PreviewPlayerBase::setSurface(const sp<Surface> &surface) {
-    Mutex::Autolock autoLock(mLock);
-
-    mSurface = surface;
-    setNativeWindow_l(surface);
-}
-
-void PreviewPlayerBase::setSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture) {
-    Mutex::Autolock autoLock(mLock);
-
-    mSurface.clear();
-    if (surfaceTexture != NULL) {
-        setNativeWindow_l(new SurfaceTextureClient(surfaceTexture));
-    }
-}
-
-void PreviewPlayerBase::shutdownVideoDecoder_l() {
-    if (mVideoBuffer) {
-        mVideoBuffer->release();
-        mVideoBuffer = NULL;
-    }
-
-    mVideoSource->stop();
-
-    // The following hack is necessary to ensure that the OMX
-    // component is completely released by the time we may try
-    // to instantiate it again.
-    wp<MediaSource> tmp = mVideoSource;
-    mVideoSource.clear();
-    while (tmp.promote() != NULL) {
-        usleep(1000);
-    }
-    IPCThreadState::self()->flushCommands();
-}
-
-void PreviewPlayerBase::setNativeWindow_l(const sp<ANativeWindow> &native) {
-    mNativeWindow = native;
-
-    if (mVideoSource == NULL) {
-        return;
-    }
-
-    LOGI("attempting to reconfigure to use new surface");
-
-    bool wasPlaying = (mFlags & PLAYING) != 0;
-
-    pause_l();
-    mVideoRenderer.clear();
-
-    shutdownVideoDecoder_l();
-
-    CHECK_EQ(initVideoDecoder(), (status_t)OK);
-
-    if (mLastVideoTimeUs >= 0) {
-        mSeeking = SEEK;
-        mSeekNotificationSent = true;
-        mSeekTimeUs = mLastVideoTimeUs;
-        mFlags &= ~(AT_EOS | AUDIO_AT_EOS | VIDEO_AT_EOS);
-    }
-
-    if (wasPlaying) {
-        play_l();
-    }
-}
-
-void PreviewPlayerBase::setAudioSink(
-        const sp<MediaPlayerBase::AudioSink> &audioSink) {
-    Mutex::Autolock autoLock(mLock);
-
-    mAudioSink = audioSink;
-}
-
-status_t PreviewPlayerBase::setLooping(bool shouldLoop) {
-    Mutex::Autolock autoLock(mLock);
-
-    mFlags = mFlags & ~LOOPING;
-
-    if (shouldLoop) {
-        mFlags |= LOOPING;
-    }
-
-    return OK;
-}
-
-status_t PreviewPlayerBase::getDuration(int64_t *durationUs) {
-    Mutex::Autolock autoLock(mMiscStateLock);
-
-    if (mDurationUs < 0) {
-        return UNKNOWN_ERROR;
-    }
-
-    *durationUs = mDurationUs;
-
-    return OK;
-}
-
-status_t PreviewPlayerBase::getPosition(int64_t *positionUs) {
-    if (mSeeking != NO_SEEK) {
-        *positionUs = mSeekTimeUs;
-    } else if (mVideoSource != NULL
-            && (mAudioPlayer == NULL || !(mFlags & VIDEO_AT_EOS))) {
-        Mutex::Autolock autoLock(mMiscStateLock);
-        *positionUs = mVideoTimeUs;
-    } else if (mAudioPlayer != NULL) {
-        *positionUs = mAudioPlayer->getMediaTimeUs();
-    } else {
-        *positionUs = 0;
-    }
-
-    return OK;
-}
-
-status_t PreviewPlayerBase::seekTo(int64_t timeUs) {
-    if (mExtractorFlags & MediaExtractor::CAN_SEEK) {
-        Mutex::Autolock autoLock(mLock);
-        return seekTo_l(timeUs);
-    }
-
-    return OK;
-}
-
-status_t PreviewPlayerBase::seekTo_l(int64_t timeUs) {
-    if (mFlags & CACHE_UNDERRUN) {
-        mFlags &= ~CACHE_UNDERRUN;
-        play_l();
-    }
-
-    if ((mFlags & PLAYING) && mVideoSource != NULL && (mFlags & VIDEO_AT_EOS)) {
-        // Video playback completed before, there's no pending
-        // video event right now. In order for this new seek
-        // to be honored, we need to post one.
-
-        postVideoEvent_l();
-    }
-
-    mSeeking = SEEK;
-    mSeekNotificationSent = false;
-    mSeekTimeUs = timeUs;
-    mFlags &= ~(AT_EOS | AUDIO_AT_EOS | VIDEO_AT_EOS);
-
-    seekAudioIfNecessary_l();
-
-    if (!(mFlags & PLAYING)) {
-        LOGV("seeking while paused, sending SEEK_COMPLETE notification"
-             " immediately.");
-
-        notifyListener_l(MEDIA_SEEK_COMPLETE);
-        mSeekNotificationSent = true;
-
-        if ((mFlags & PREPARED) && mVideoSource != NULL) {
-            mFlags |= SEEK_PREVIEW;
-            postVideoEvent_l();
-        }
-    }
-
-    return OK;
-}
-
-void PreviewPlayerBase::seekAudioIfNecessary_l() {
-    if (mSeeking != NO_SEEK && mVideoSource == NULL && mAudioPlayer != NULL) {
-        mAudioPlayer->seekTo(mSeekTimeUs);
-
-        mWatchForAudioSeekComplete = true;
-        mWatchForAudioEOS = true;
-
-        if (mDecryptHandle != NULL) {
-            mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
-                    Playback::PAUSE, 0);
-            mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
-                    Playback::START, mSeekTimeUs / 1000);
-        }
-    }
-}
-
-void PreviewPlayerBase::setAudioSource(sp<MediaSource> source) {
-    CHECK(source != NULL);
-
-    mAudioTrack = source;
-}
-
-status_t PreviewPlayerBase::initAudioDecoder() {
-    sp<MetaData> meta = mAudioTrack->getFormat();
-
-    const char *mime;
-    CHECK(meta->findCString(kKeyMIMEType, &mime));
-
-    if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) {
-        mAudioSource = mAudioTrack;
-    } else {
-        mAudioSource = OMXCodec::Create(
-                mClient.interface(), mAudioTrack->getFormat(),
-                false, // createEncoder
-                mAudioTrack);
-    }
-
-    if (mAudioSource != NULL) {
-        int64_t durationUs;
-        if (mAudioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
-            Mutex::Autolock autoLock(mMiscStateLock);
-            if (mDurationUs < 0 || durationUs > mDurationUs) {
-                mDurationUs = durationUs;
-            }
-        }
-
-        status_t err = mAudioSource->start();
-
-        if (err != OK) {
-            mAudioSource.clear();
-            return err;
-        }
-    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_QCELP)) {
-        // For legacy reasons we're simply going to ignore the absence
-        // of an audio decoder for QCELP instead of aborting playback
-        // altogether.
-        return OK;
-    }
-
-    return mAudioSource != NULL ? OK : UNKNOWN_ERROR;
-}
-
-void PreviewPlayerBase::setVideoSource(sp<MediaSource> source) {
-    CHECK(source != NULL);
-
-    mVideoTrack = source;
-}
-
-status_t PreviewPlayerBase::initVideoDecoder(uint32_t flags) {
-
-    // Either the application or the DRM system can independently say
-    // that there must be a hardware-protected path to an external video sink.
-    // For now we always require a hardware-protected path to external video sink
-    // if content is DRMed, but eventually this could be optional per DRM agent.
-    // When the application wants protection, then
-    //   (USE_SURFACE_ALLOC && (mSurface != 0) &&
-    //   (mSurface->getFlags() & ISurfaceComposer::eProtectedByApp))
-    // will be true, but that part is already handled by SurfaceFlinger.
-
-#ifdef DEBUG_HDCP
-    // For debugging, we allow a system property to control the protected usage.
-    // In case of uninitialized or unexpected property, we default to "DRM only".
-    bool setProtectionBit = false;
-    char value[PROPERTY_VALUE_MAX];
-    if (property_get("persist.sys.hdcp_checking", value, NULL)) {
-        if (!strcmp(value, "never")) {
-            // nop
-        } else if (!strcmp(value, "always")) {
-            setProtectionBit = true;
-        } else if (!strcmp(value, "drm-only")) {
-            if (mDecryptHandle != NULL) {
-                setProtectionBit = true;
-            }
-        // property value is empty, or unexpected value
-        } else {
-            if (mDecryptHandle != NULL) {
-                setProtectionBit = true;
-            }
-        }
-    // can' read property value
-    } else {
-        if (mDecryptHandle != NULL) {
-            setProtectionBit = true;
-        }
-    }
-    // note that usage bit is already cleared, so no need to clear it in the "else" case
-    if (setProtectionBit) {
-        flags |= OMXCodec::kEnableGrallocUsageProtected;
-    }
-#else
-    if (mDecryptHandle != NULL) {
-        flags |= OMXCodec::kEnableGrallocUsageProtected;
-    }
-#endif
-    LOGV("initVideoDecoder flags=0x%x", flags);
-    mVideoSource = OMXCodec::Create(
-            mClient.interface(), mVideoTrack->getFormat(),
-            false, // createEncoder
-            mVideoTrack,
-            NULL, flags, USE_SURFACE_ALLOC ? mNativeWindow : NULL);
-
-    if (mVideoSource != NULL) {
-        int64_t durationUs;
-        if (mVideoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
-            Mutex::Autolock autoLock(mMiscStateLock);
-            if (mDurationUs < 0 || durationUs > mDurationUs) {
-                mDurationUs = durationUs;
-            }
-        }
-
-        status_t err = mVideoSource->start();
-
-        if (err != OK) {
-            mVideoSource.clear();
-            return err;
-        }
-    }
-
-    return mVideoSource != NULL ? OK : UNKNOWN_ERROR;
-}
-
-void PreviewPlayerBase::finishSeekIfNecessary(int64_t videoTimeUs) {
-    if (mSeeking == SEEK_VIDEO_ONLY) {
-        mSeeking = NO_SEEK;
-        return;
-    }
-
-    if (mSeeking == NO_SEEK || (mFlags & SEEK_PREVIEW)) {
-        return;
-    }
-
-    if (mAudioPlayer != NULL) {
-        LOGV("seeking audio to %lld us (%.2f secs).", videoTimeUs, videoTimeUs / 1E6);
-
-        // If we don't have a video time, seek audio to the originally
-        // requested seek time instead.
-
-        mAudioPlayer->seekTo(videoTimeUs < 0 ? mSeekTimeUs : videoTimeUs);
-        mWatchForAudioSeekComplete = true;
-        mWatchForAudioEOS = true;
-    } else if (!mSeekNotificationSent) {
-        // If we're playing video only, report seek complete now,
-        // otherwise audio player will notify us later.
-        notifyListener_l(MEDIA_SEEK_COMPLETE);
-        mSeekNotificationSent = true;
-    }
-
-    mFlags |= FIRST_FRAME;
-    mSeeking = NO_SEEK;
-
-    if (mDecryptHandle != NULL) {
-        mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
-                Playback::PAUSE, 0);
-        mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
-                Playback::START, videoTimeUs / 1000);
-    }
-}
-
-void PreviewPlayerBase::onVideoEvent() {
-    Mutex::Autolock autoLock(mLock);
-    if (!mVideoEventPending) {
-        // The event has been cancelled in reset_l() but had already
-        // been scheduled for execution at that time.
-        return;
-    }
-    mVideoEventPending = false;
-
-    if (mSeeking != NO_SEEK) {
-        if (mVideoBuffer) {
-            mVideoBuffer->release();
-            mVideoBuffer = NULL;
-        }
-
-        if (mSeeking == SEEK && mCachedSource != NULL && mAudioSource != NULL
-                && !(mFlags & SEEK_PREVIEW)) {
-            // We're going to seek the video source first, followed by
-            // the audio source.
-            // In order to avoid jumps in the DataSource offset caused by
-            // the audio codec prefetching data from the old locations
-            // while the video codec is already reading data from the new
-            // locations, we'll "pause" the audio source, causing it to
-            // stop reading input data until a subsequent seek.
-
-            if (mAudioPlayer != NULL && (mFlags & AUDIO_RUNNING)) {
-                mAudioPlayer->pause();
-
-                mFlags &= ~AUDIO_RUNNING;
-            }
-            mAudioSource->pause();
-        }
-    }
-
-    if (!mVideoBuffer) {
-        MediaSource::ReadOptions options;
-        if (mSeeking != NO_SEEK) {
-            LOGV("seeking to %lld us (%.2f secs)", mSeekTimeUs, mSeekTimeUs / 1E6);
-
-            options.setSeekTo(
-                    mSeekTimeUs,
-                    mSeeking == SEEK_VIDEO_ONLY
-                        ? MediaSource::ReadOptions::SEEK_NEXT_SYNC
-                        : MediaSource::ReadOptions::SEEK_CLOSEST_SYNC);
-        }
-        for (;;) {
-            status_t err = mVideoSource->read(&mVideoBuffer, &options);
-            options.clearSeekTo();
-
-            if (err != OK) {
-                CHECK(mVideoBuffer == NULL);
-
-                if (err == INFO_FORMAT_CHANGED) {
-                    LOGV("VideoSource signalled format change.");
-
-                    notifyVideoSize_l();
-
-                    if (mVideoRenderer != NULL) {
-                        mVideoRendererIsPreview = false;
-                        initRenderer_l();
-                    }
-                    continue;
-                }
-
-                // So video playback is complete, but we may still have
-                // a seek request pending that needs to be applied
-                // to the audio track.
-                if (mSeeking != NO_SEEK) {
-                    LOGV("video stream ended while seeking!");
-                }
-                finishSeekIfNecessary(-1);
-
-                if (mAudioPlayer != NULL
-                        && !(mFlags & (AUDIO_RUNNING | SEEK_PREVIEW))) {
-                    startAudioPlayer_l();
-                }
-
-                mFlags |= VIDEO_AT_EOS;
-                postStreamDoneEvent_l(err);
-                return;
-            }
-
-            if (mVideoBuffer->range_length() == 0) {
-                // Some decoders, notably the PV AVC software decoder
-                // return spurious empty buffers that we just want to ignore.
-
-                mVideoBuffer->release();
-                mVideoBuffer = NULL;
-                continue;
-            }
-
-            break;
-        }
-    }
-
-    int64_t timeUs;
-    CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
-
-    mLastVideoTimeUs = timeUs;
-
-    if (mSeeking == SEEK_VIDEO_ONLY) {
-        if (mSeekTimeUs > timeUs) {
-            LOGI("XXX mSeekTimeUs = %lld us, timeUs = %lld us",
-                 mSeekTimeUs, timeUs);
-        }
-    }
-
-    {
-        Mutex::Autolock autoLock(mMiscStateLock);
-        mVideoTimeUs = timeUs;
-    }
-
-    SeekType wasSeeking = mSeeking;
-    finishSeekIfNecessary(timeUs);
-
-    if (mAudioPlayer != NULL && !(mFlags & (AUDIO_RUNNING | SEEK_PREVIEW))) {
-        status_t err = startAudioPlayer_l();
-        if (err != OK) {
-            LOGE("Startung the audio player failed w/ err %d", err);
-            return;
-        }
-    }
-
-    TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource;
-
-    if (mFlags & FIRST_FRAME) {
-        mFlags &= ~FIRST_FRAME;
-        mTimeSourceDeltaUs = ts->getRealTimeUs() - timeUs;
-    }
-
-    int64_t realTimeUs, mediaTimeUs;
-    if (!(mFlags & AUDIO_AT_EOS) && mAudioPlayer != NULL
-        && mAudioPlayer->getMediaTimeMapping(&realTimeUs, &mediaTimeUs)) {
-        mTimeSourceDeltaUs = realTimeUs - mediaTimeUs;
-    }
-
-    if (wasSeeking == SEEK_VIDEO_ONLY) {
-        int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs;
-
-        int64_t latenessUs = nowUs - timeUs;
-
-        if (latenessUs > 0) {
-            LOGI("after SEEK_VIDEO_ONLY we're late by %.2f secs", latenessUs / 1E6);
-        }
-    }
-
-    if (wasSeeking == NO_SEEK) {
-        // Let's display the first frame after seeking right away.
-
-        int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs;
-
-        int64_t latenessUs = nowUs - timeUs;
-
-        if (latenessUs > 500000ll
-                && mAudioPlayer != NULL
-                && mAudioPlayer->getMediaTimeMapping(
-                    &realTimeUs, &mediaTimeUs)) {
-            LOGI("we're much too late (%.2f secs), video skipping ahead",
-                 latenessUs / 1E6);
-
-            mVideoBuffer->release();
-            mVideoBuffer = NULL;
-
-            mSeeking = SEEK_VIDEO_ONLY;
-            mSeekTimeUs = mediaTimeUs;
-
-            postVideoEvent_l();
-            return;
-        }
-
-        if (latenessUs > 40000) {
-            // We're more than 40ms late.
-            LOGV("we're late by %lld us (%.2f secs), dropping frame",
-                 latenessUs, latenessUs / 1E6);
-            mVideoBuffer->release();
-            mVideoBuffer = NULL;
-
-            postVideoEvent_l();
-            return;
-        }
-
-        if (latenessUs < -10000) {
-            // We're more than 10ms early.
-
-            postVideoEvent_l(10000);
-            return;
-        }
-    }
-
-    if (mVideoRendererIsPreview || mVideoRenderer == NULL) {
-        mVideoRendererIsPreview = false;
-
-        initRenderer_l();
-    }
-
-    if (mVideoRenderer != NULL) {
-        mVideoRenderer->render(mVideoBuffer);
-    }
-
-    mVideoBuffer->release();
-    mVideoBuffer = NULL;
-
-    if (wasSeeking != NO_SEEK && (mFlags & SEEK_PREVIEW)) {
-        mFlags &= ~SEEK_PREVIEW;
-        return;
-    }
-
-    postVideoEvent_l();
-}
-
-void PreviewPlayerBase::postVideoEvent_l(int64_t delayUs) {
-    if (mVideoEventPending) {
-        return;
-    }
-
-    mVideoEventPending = true;
-    mQueue.postEventWithDelay(mVideoEvent, delayUs < 0 ? 10000 : delayUs);
-}
-
-void PreviewPlayerBase::postStreamDoneEvent_l(status_t status) {
-    if (mStreamDoneEventPending) {
-        return;
-    }
-    mStreamDoneEventPending = true;
-
-    mStreamDoneStatus = status;
-    mQueue.postEvent(mStreamDoneEvent);
-}
-
-void PreviewPlayerBase::postBufferingEvent_l() {
-    if (mBufferingEventPending) {
-        return;
-    }
-    mBufferingEventPending = true;
-    mQueue.postEventWithDelay(mBufferingEvent, 1000000ll);
-}
-
-void PreviewPlayerBase::postVideoLagEvent_l() {
-    if (mVideoLagEventPending) {
-        return;
-    }
-    mVideoLagEventPending = true;
-    mQueue.postEventWithDelay(mVideoLagEvent, 1000000ll);
-}
-
-void PreviewPlayerBase::postCheckAudioStatusEvent_l(int64_t delayUs) {
-    if (mAudioStatusEventPending) {
-        return;
-    }
-    mAudioStatusEventPending = true;
-    mQueue.postEventWithDelay(mCheckAudioStatusEvent, delayUs);
-}
-
-void PreviewPlayerBase::onCheckAudioStatus() {
-    Mutex::Autolock autoLock(mLock);
-    if (!mAudioStatusEventPending) {
-        // Event was dispatched and while we were blocking on the mutex,
-        // has already been cancelled.
-        return;
-    }
-
-    mAudioStatusEventPending = false;
-
-    if (mWatchForAudioSeekComplete && !mAudioPlayer->isSeeking()) {
-        mWatchForAudioSeekComplete = false;
-
-        if (!mSeekNotificationSent) {
-            notifyListener_l(MEDIA_SEEK_COMPLETE);
-            mSeekNotificationSent = true;
-        }
-
-        mSeeking = NO_SEEK;
-    }
-
-    status_t finalStatus;
-    if (mWatchForAudioEOS && mAudioPlayer->reachedEOS(&finalStatus)) {
-        mWatchForAudioEOS = false;
-        mFlags |= AUDIO_AT_EOS;
-        mFlags |= FIRST_FRAME;
-        postStreamDoneEvent_l(finalStatus);
-    }
-}
-
-status_t PreviewPlayerBase::prepare() {
-    Mutex::Autolock autoLock(mLock);
-    return prepare_l();
-}
-
-status_t PreviewPlayerBase::prepare_l() {
-    if (mFlags & PREPARED) {
-        return OK;
-    }
-
-    if (mFlags & PREPARING) {
-        return UNKNOWN_ERROR;
-    }
-
-    mIsAsyncPrepare = false;
-    status_t err = prepareAsync_l();
-
-    if (err != OK) {
-        return err;
-    }
-
-    while (mFlags & PREPARING) {
-        mPreparedCondition.wait(mLock);
-    }
-
-    return mPrepareResult;
-}
-
-status_t PreviewPlayerBase::prepareAsync() {
-    Mutex::Autolock autoLock(mLock);
-
-    if (mFlags & PREPARING) {
-        return UNKNOWN_ERROR;  // async prepare already pending
-    }
-
-    mIsAsyncPrepare = true;
-    return prepareAsync_l();
-}
-
-status_t PreviewPlayerBase::prepareAsync_l() {
-    if (mFlags & PREPARING) {
-        return UNKNOWN_ERROR;  // async prepare already pending
-    }
-
-    if (!mQueueStarted) {
-        mQueue.start();
-        mQueueStarted = true;
-    }
-
-    mFlags |= PREPARING;
-    mAsyncPrepareEvent = new AwesomeEvent(
-            this, &PreviewPlayerBase::onPrepareAsyncEvent);
-
-    mQueue.postEvent(mAsyncPrepareEvent);
-
-    return OK;
-}
-
-status_t PreviewPlayerBase::finishSetDataSource_l() {
-    sp<DataSource> dataSource;
-
-    if (!strncasecmp("http://", mUri.string(), 7)
-            || !strncasecmp("https://", mUri.string(), 8)) {
-        mConnectingDataSource = HTTPBase::Create(
-                (mFlags & INCOGNITO)
-                    ? HTTPBase::kFlagIncognito
-                    : 0);
-
-        mLock.unlock();
-        status_t err = mConnectingDataSource->connect(mUri, &mUriHeaders);
-        mLock.lock();
-
-        if (err != OK) {
-            mConnectingDataSource.clear();
-
-            LOGI("mConnectingDataSource->connect() returned %d", err);
-            return err;
-        }
-
-#if 0
-        mCachedSource = new NuCachedSource2(
-                new ThrottledSource(
-                    mConnectingDataSource, 50 * 1024 /* bytes/sec */));
-#else
-        mCachedSource = new NuCachedSource2(mConnectingDataSource);
-#endif
-        mConnectingDataSource.clear();
-
-        dataSource = mCachedSource;
-
-        String8 contentType = dataSource->getMIMEType();
-
-        if (strncasecmp(contentType.string(), "audio/", 6)) {
-            // We're not doing this for streams that appear to be audio-only
-            // streams to ensure that even low bandwidth streams start
-            // playing back fairly instantly.
-
-            // We're going to prefill the cache before trying to instantiate
-            // the extractor below, as the latter is an operation that otherwise
-            // could block on the datasource for a significant amount of time.
-            // During that time we'd be unable to abort the preparation phase
-            // without this prefill.
-
-            mLock.unlock();
-
-            for (;;) {
-                status_t finalStatus;
-                size_t cachedDataRemaining =
-                    mCachedSource->approxDataRemaining(&finalStatus);
-
-                if (finalStatus != OK || cachedDataRemaining >= kHighWaterMarkBytes
-                        || (mFlags & PREPARE_CANCELLED)) {
-                    break;
-                }
-
-                usleep(200000);
-            }
-
-            mLock.lock();
-        }
-
-        if (mFlags & PREPARE_CANCELLED) {
-            LOGI("Prepare cancelled while waiting for initial cache fill.");
-            return UNKNOWN_ERROR;
-        }
-    } else {
-        dataSource = DataSource::CreateFromURI(mUri.string(), &mUriHeaders);
-    }
-
-    if (dataSource == NULL) {
-        return UNKNOWN_ERROR;
-    }
-
-    sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource);
-
-    if (extractor == NULL) {
-        return UNKNOWN_ERROR;
-    }
-
-    dataSource->getDrmInfo(mDecryptHandle, &mDrmManagerClient);
-
-    if (mDecryptHandle != NULL) {
-        CHECK(mDrmManagerClient);
-        if (RightsStatus::RIGHTS_VALID != mDecryptHandle->status) {
-            notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, ERROR_DRM_NO_LICENSE);
-        }
-    }
-
-    return setDataSource_l(extractor);
-}
-
-void PreviewPlayerBase::abortPrepare(status_t err) {
-    CHECK(err != OK);
-
-    if (mIsAsyncPrepare) {
-        notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
-    }
-
-    mPrepareResult = err;
-    mFlags &= ~(PREPARING|PREPARE_CANCELLED|PREPARING_CONNECTED);
-    mAsyncPrepareEvent = NULL;
-    mPreparedCondition.broadcast();
-}
-
-// static
-bool PreviewPlayerBase::ContinuePreparation(void *cookie) {
-    PreviewPlayerBase *me = static_cast<PreviewPlayerBase *>(cookie);
-
-    return (me->mFlags & PREPARE_CANCELLED) == 0;
-}
-
-void PreviewPlayerBase::onPrepareAsyncEvent() {
-    Mutex::Autolock autoLock(mLock);
-
-    if (mFlags & PREPARE_CANCELLED) {
-        LOGI("prepare was cancelled before doing anything");
-        abortPrepare(UNKNOWN_ERROR);
-        return;
-    }
-
-    if (mUri.size() > 0) {
-        status_t err = finishSetDataSource_l();
-
-        if (err != OK) {
-            abortPrepare(err);
-            return;
-        }
-    }
-
-    if (mVideoTrack != NULL && mVideoSource == NULL) {
-        status_t err = initVideoDecoder();
-
-        if (err != OK) {
-            abortPrepare(err);
-            return;
-        }
-    }
-
-    if (mAudioTrack != NULL && mAudioSource == NULL) {
-        status_t err = initAudioDecoder();
-
-        if (err != OK) {
-            abortPrepare(err);
-            return;
-        }
-    }
-
-    mFlags |= PREPARING_CONNECTED;
-
-    if (mCachedSource != NULL) {
-        postBufferingEvent_l();
-    } else {
-        finishAsyncPrepare_l();
-    }
-}
-
-void PreviewPlayerBase::finishAsyncPrepare_l() {
-    if (mIsAsyncPrepare) {
-        if (mVideoSource == NULL) {
-            notifyListener_l(MEDIA_SET_VIDEO_SIZE, 0, 0);
-        } else {
-            notifyVideoSize_l();
-        }
-
-        notifyListener_l(MEDIA_PREPARED);
-    }
-
-    mPrepareResult = OK;
-    mFlags &= ~(PREPARING|PREPARE_CANCELLED|PREPARING_CONNECTED);
-    mFlags |= PREPARED;
-    mAsyncPrepareEvent = NULL;
-    mPreparedCondition.broadcast();
-}
-
-uint32_t PreviewPlayerBase::flags() const {
-    return mExtractorFlags;
-}
-
-void PreviewPlayerBase::postAudioEOS(int64_t delayUs) {
-    Mutex::Autolock autoLock(mLock);
-    postCheckAudioStatusEvent_l(delayUs);
-}
-
-void PreviewPlayerBase::postAudioSeekComplete() {
-    Mutex::Autolock autoLock(mLock);
-    postCheckAudioStatusEvent_l(0 /* delayUs */);
-}
-
-status_t PreviewPlayerBase::setParameter(int key, const Parcel &request) {
-    return OK;
-}
-
-status_t PreviewPlayerBase::getParameter(int key, Parcel *reply) {
-    return OK;
-}
-
-}  // namespace android
diff --git a/libvideoeditor/lvpp/PreviewPlayerBase.h b/libvideoeditor/lvpp/PreviewPlayerBase.h
deleted file mode 100644
index bb5c3d3..0000000
--- a/libvideoeditor/lvpp/PreviewPlayerBase.h
+++ /dev/null
@@ -1,279 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef PREVIEW_PLAYER_BASE_H_
-
-#define PREVIEW_PLAYER_BASE_H_
-
-#include "HTTPBase.h"
-#include "TimedEventQueue.h"
-
-#include <media/MediaPlayerInterface.h>
-#include <media/stagefright/DataSource.h>
-#include <media/stagefright/MediaSource.h>
-#include <media/stagefright/OMXClient.h>
-#include <media/stagefright/TimeSource.h>
-#include <utils/threads.h>
-#include <drm/DrmManagerClient.h>
-
-namespace android {
-
-struct AudioPlayerBase;
-struct DataSource;
-struct MediaBuffer;
-struct MediaExtractor;
-struct MediaSource;
-struct NuCachedSource2;
-struct ISurfaceTexture;
-
-class DrmManagerClinet;
-class DecryptHandle;
-
-struct AwesomeRenderer : public RefBase {
-    AwesomeRenderer() {}
-
-    virtual void render(MediaBuffer *buffer) = 0;
-
-private:
-    AwesomeRenderer(const AwesomeRenderer &);
-    AwesomeRenderer &operator=(const AwesomeRenderer &);
-};
-
-struct PreviewPlayerBase {
-    PreviewPlayerBase();
-    ~PreviewPlayerBase();
-
-    void setListener(const wp<MediaPlayerBase> &listener);
-
-    status_t setDataSource(
-            const char *uri,
-            const KeyedVector<String8, String8> *headers = NULL);
-
-    status_t setDataSource(int fd, int64_t offset, int64_t length);
-
-    status_t setDataSource(const sp<IStreamSource> &source);
-
-    void reset();
-
-    status_t prepare();
-    status_t prepare_l();
-    status_t prepareAsync();
-    status_t prepareAsync_l();
-
-    status_t play();
-    status_t pause();
-
-    bool isPlaying() const;
-
-    void setSurface(const sp<Surface> &surface);
-    void setSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture);
-    void setAudioSink(const sp<MediaPlayerBase::AudioSink> &audioSink);
-    status_t setLooping(bool shouldLoop);
-
-    status_t getDuration(int64_t *durationUs);
-    status_t getPosition(int64_t *positionUs);
-
-    status_t setParameter(int key, const Parcel &request);
-    status_t getParameter(int key, Parcel *reply);
-
-    status_t seekTo(int64_t timeUs);
-
-    // This is a mask of MediaExtractor::Flags.
-    uint32_t flags() const;
-
-    void postAudioEOS(int64_t delayUs = 0ll);
-    void postAudioSeekComplete();
-
-private:
-    friend struct AwesomeEvent;
-    friend struct PreviewPlayer;
-
-    enum {
-        PLAYING             = 1,
-        LOOPING             = 2,
-        FIRST_FRAME         = 4,
-        PREPARING           = 8,
-        PREPARED            = 16,
-        AT_EOS              = 32,
-        PREPARE_CANCELLED   = 64,
-        CACHE_UNDERRUN      = 128,
-        AUDIO_AT_EOS        = 256,
-        VIDEO_AT_EOS        = 512,
-        AUTO_LOOPING        = 1024,
-
-        // We are basically done preparing but are currently buffering
-        // sufficient data to begin playback and finish the preparation phase
-        // for good.
-        PREPARING_CONNECTED = 2048,
-
-        // We're triggering a single video event to display the first frame
-        // after the seekpoint.
-        SEEK_PREVIEW        = 4096,
-
-        AUDIO_RUNNING       = 8192,
-        AUDIOPLAYER_STARTED = 16384,
-
-        INCOGNITO           = 32768,
-    };
-
-    mutable Mutex mLock;
-    Mutex mMiscStateLock;
-
-    OMXClient mClient;
-    TimedEventQueue mQueue;
-    bool mQueueStarted;
-    wp<MediaPlayerBase> mListener;
-
-    sp<Surface> mSurface;
-    sp<ANativeWindow> mNativeWindow;
-    sp<MediaPlayerBase::AudioSink> mAudioSink;
-
-    SystemTimeSource mSystemTimeSource;
-    TimeSource *mTimeSource;
-
-    String8 mUri;
-    KeyedVector<String8, String8> mUriHeaders;
-
-    sp<DataSource> mFileSource;
-
-    sp<MediaSource> mVideoTrack;
-    sp<MediaSource> mVideoSource;
-    sp<AwesomeRenderer> mVideoRenderer;
-    bool mVideoRendererIsPreview;
-
-    sp<MediaSource> mAudioTrack;
-    sp<MediaSource> mAudioSource;
-    AudioPlayerBase *mAudioPlayer;
-    int64_t mDurationUs;
-
-    int32_t mDisplayWidth;
-    int32_t mDisplayHeight;
-
-    uint32_t mFlags;
-    uint32_t mExtractorFlags;
-
-    int64_t mTimeSourceDeltaUs;
-    int64_t mVideoTimeUs;
-
-    enum SeekType {
-        NO_SEEK,
-        SEEK,
-        SEEK_VIDEO_ONLY
-    };
-    SeekType mSeeking;
-
-    bool mSeekNotificationSent;
-    int64_t mSeekTimeUs;
-
-    int64_t mBitrate;  // total bitrate of the file (in bps) or -1 if unknown.
-
-    bool mWatchForAudioSeekComplete;
-    bool mWatchForAudioEOS;
-
-    sp<TimedEventQueue::Event> mVideoEvent;
-    bool mVideoEventPending;
-    sp<TimedEventQueue::Event> mStreamDoneEvent;
-    bool mStreamDoneEventPending;
-    sp<TimedEventQueue::Event> mBufferingEvent;
-    bool mBufferingEventPending;
-    sp<TimedEventQueue::Event> mCheckAudioStatusEvent;
-    bool mAudioStatusEventPending;
-    sp<TimedEventQueue::Event> mVideoLagEvent;
-    bool mVideoLagEventPending;
-
-    sp<TimedEventQueue::Event> mAsyncPrepareEvent;
-    Condition mPreparedCondition;
-    bool mIsAsyncPrepare;
-    status_t mPrepareResult;
-    status_t mStreamDoneStatus;
-
-    void postVideoEvent_l(int64_t delayUs = -1);
-    void postBufferingEvent_l();
-    void postStreamDoneEvent_l(status_t status);
-    void postCheckAudioStatusEvent_l(int64_t delayUs);
-    void postVideoLagEvent_l();
-    status_t play_l();
-
-    MediaBuffer *mVideoBuffer;
-
-    sp<HTTPBase> mConnectingDataSource;
-    sp<NuCachedSource2> mCachedSource;
-
-    DrmManagerClient *mDrmManagerClient;
-    sp<DecryptHandle> mDecryptHandle;
-
-    int64_t mLastVideoTimeUs;
-
-    ARect mCropRect;
-    int32_t mGivenWidth, mGivenHeight;
-
-    status_t setDataSource_l(
-            const char *uri,
-            const KeyedVector<String8, String8> *headers = NULL);
-
-    status_t setDataSource_l(const sp<DataSource> &dataSource);
-    status_t setDataSource_l(const sp<MediaExtractor> &extractor);
-    void reset_l();
-    status_t seekTo_l(int64_t timeUs);
-    status_t pause_l(bool at_eos = false);
-    void initRenderer_l();
-    void notifyVideoSize_l();
-    void seekAudioIfNecessary_l();
-
-    void cancelPlayerEvents(bool keepBufferingGoing = false);
-
-    void setAudioSource(sp<MediaSource> source);
-    status_t initAudioDecoder();
-
-    void setVideoSource(sp<MediaSource> source);
-    status_t initVideoDecoder(uint32_t flags = 0);
-
-    void onStreamDone();
-
-    void notifyListener_l(int msg, int ext1 = 0, int ext2 = 0);
-
-    void onVideoEvent();
-    void onBufferingUpdate();
-    void onCheckAudioStatus();
-    void onPrepareAsyncEvent();
-    void abortPrepare(status_t err);
-    void finishAsyncPrepare_l();
-    void onVideoLagUpdate();
-
-    bool getCachedDuration_l(int64_t *durationUs, bool *eos);
-
-    status_t finishSetDataSource_l();
-
-    static bool ContinuePreparation(void *cookie);
-
-    bool getBitrate(int64_t *bitrate);
-
-    void finishSeekIfNecessary(int64_t videoTimeUs);
-    void ensureCacheIsFetching_l();
-
-    status_t startAudioPlayer_l();
-
-    void shutdownVideoDecoder_l();
-    void setNativeWindow_l(const sp<ANativeWindow> &native);
-
-    PreviewPlayerBase(const PreviewPlayerBase &);
-    PreviewPlayerBase &operator=(const PreviewPlayerBase &);
-};
-
-}  // namespace android
-
-#endif  // PREVIEW_PLAYER_BASE_H_
-
diff --git a/libvideoeditor/lvpp/PreviewRenderer.cpp b/libvideoeditor/lvpp/PreviewRenderer.cpp
index 59ac712..4aa4eb3 100755
--- a/libvideoeditor/lvpp/PreviewRenderer.cpp
+++ b/libvideoeditor/lvpp/PreviewRenderer.cpp
@@ -21,8 +21,8 @@
 
 #include "PreviewRenderer.h"
 
-#include <media/stagefright/MediaDebug.h>
-#include <surfaceflinger/Surface.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <gui/Surface.h>
 
 namespace android {
 
@@ -98,7 +98,7 @@
     int err = OK;
 
     if ((err = mSurface->ANativeWindow::dequeueBuffer(mSurface.get(), &mBuf)) != 0) {
-        LOGW("Surface::dequeueBuffer returned error %d", err);
+        ALOGW("Surface::dequeueBuffer returned error %d", err);
         return;
     }
 
@@ -132,7 +132,7 @@
         CHECK_EQ(0, mapper.unlock(mBuf->handle));
 
         if ((err = mSurface->ANativeWindow::queueBuffer(mSurface.get(), mBuf)) != 0) {
-            LOGW("Surface::queueBuffer returned error %d", err);
+            ALOGW("Surface::queueBuffer returned error %d", err);
         }
     }
     mBuf = NULL;
diff --git a/libvideoeditor/lvpp/PreviewRenderer.h b/libvideoeditor/lvpp/PreviewRenderer.h
index 4685d6f..91c2295 100755
--- a/libvideoeditor/lvpp/PreviewRenderer.h
+++ b/libvideoeditor/lvpp/PreviewRenderer.h
@@ -20,7 +20,7 @@
 
 #include <media/stagefright/ColorConverter.h>
 #include <utils/RefBase.h>
-#include <ui/android_native_buffer.h>
+#include <system/window.h>
 #include <ui/GraphicBufferMapper.h>
 #include "SoftwareRenderer.h"
 
diff --git a/libvideoeditor/lvpp/VideoEditorAudioPlayer.cpp b/libvideoeditor/lvpp/VideoEditorAudioPlayer.cpp
index 89a0450..d0ee51b 100755
--- a/libvideoeditor/lvpp/VideoEditorAudioPlayer.cpp
+++ b/libvideoeditor/lvpp/VideoEditorAudioPlayer.cpp
@@ -21,7 +21,7 @@
 #include <binder/IPCThreadState.h>
 #include <media/AudioTrack.h>
 #include <VideoEditorAudioPlayer.h>
-#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/MediaSource.h>
@@ -34,10 +34,26 @@
 
 VideoEditorAudioPlayer::VideoEditorAudioPlayer(
         const sp<MediaPlayerBase::AudioSink> &audioSink,
-        PreviewPlayerBase *observer)
-    : AudioPlayerBase(audioSink, observer) {
+        PreviewPlayer *observer)
+    : mAudioTrack(NULL),
+      mInputBuffer(NULL),
+      mSampleRate(0),
+      mLatencyUs(0),
+      mFrameSize(0),
+      mNumFramesPlayed(0),
+      mPositionTimeMediaUs(-1),
+      mPositionTimeRealUs(-1),
+      mSeeking(false),
+      mReachedEOS(false),
+      mFinalStatus(OK),
+      mStarted(false),
+      mIsFirstBuffer(false),
+      mFirstBufferResult(OK),
+      mFirstBuffer(NULL),
+      mAudioSink(audioSink),
+      mObserver(observer) {
 
-    LOGV("VideoEditorAudioPlayer");
+    ALOGV("Constructor");
     mBGAudioPCMFileHandle = NULL;
     mAudioProcess = NULL;
     mBGAudioPCMFileLength = 0;
@@ -54,7 +70,7 @@
 
 VideoEditorAudioPlayer::~VideoEditorAudioPlayer() {
 
-    LOGV("~VideoEditorAudioPlayer");
+    ALOGV("Destructor");
     if (mStarted) {
         reset();
     }
@@ -63,6 +79,184 @@
         mAudioProcess = NULL;
     }
 }
+
+void VideoEditorAudioPlayer::pause(bool playPendingSamples) {
+    ALOGV("pause: playPendingSamples=%d", playPendingSamples);
+    CHECK(mStarted);
+
+    if (playPendingSamples) {
+        if (mAudioSink.get() != NULL) {
+            mAudioSink->stop();
+        } else {
+            mAudioTrack->stop();
+        }
+    } else {
+        if (mAudioSink.get() != NULL) {
+            mAudioSink->pause();
+        } else {
+            mAudioTrack->pause();
+        }
+    }
+}
+
+void VideoEditorAudioPlayer::clear() {
+    ALOGV("clear");
+    if (!mStarted) {
+        return;
+    }
+
+    if (mAudioSink.get() != NULL) {
+        mAudioSink->stop();
+        mAudioSink->close();
+    } else {
+        mAudioTrack->stop();
+
+        delete mAudioTrack;
+        mAudioTrack = NULL;
+    }
+
+    // Make sure to release any buffer we hold onto so that the
+    // source is able to stop().
+
+    if (mFirstBuffer != NULL) {
+        mFirstBuffer->release();
+        mFirstBuffer = NULL;
+    }
+
+    if (mInputBuffer != NULL) {
+        ALOGV("AudioPlayerBase releasing input buffer.");
+
+        mInputBuffer->release();
+        mInputBuffer = NULL;
+    }
+
+    mSource->stop();
+
+    // The following hack is necessary to ensure that the OMX
+    // component is completely released by the time we may try
+    // to instantiate it again.
+    wp<MediaSource> tmp = mSource;
+    mSource.clear();
+    while (tmp.promote() != NULL) {
+        usleep(1000);
+    }
+    IPCThreadState::self()->flushCommands();
+
+    mNumFramesPlayed = 0;
+    mPositionTimeMediaUs = -1;
+    mPositionTimeRealUs = -1;
+    mSeeking = false;
+    mReachedEOS = false;
+    mFinalStatus = OK;
+    mStarted = false;
+}
+
+void VideoEditorAudioPlayer::resume() {
+    ALOGV("resume");
+
+    AudioMixSettings audioMixSettings;
+
+    // Single audio player is used;
+    // Pass on the audio ducking parameters
+    // which might have changed with new audio source
+    audioMixSettings.lvInDucking_threshold =
+        mAudioMixSettings->uiInDucking_threshold;
+    audioMixSettings.lvInDucking_lowVolume =
+        ((M4OSA_Float)mAudioMixSettings->uiInDucking_lowVolume) / 100.0;
+    audioMixSettings.lvInDucking_enable =
+        mAudioMixSettings->bInDucking_enable;
+    audioMixSettings.lvPTVolLevel =
+        ((M4OSA_Float)mBGAudioStoryBoardCurrentMediaVolumeVal) / 100.0;
+    audioMixSettings.lvBTVolLevel =
+        ((M4OSA_Float)mAudioMixSettings->uiAddVolume) / 100.0;
+    audioMixSettings.lvBTChannelCount = mAudioMixSettings->uiBTChannelCount;
+    audioMixSettings.lvPTChannelCount = mAudioMixSettings->uiNbChannels;
+
+    // Call to Audio mix param setting
+    mAudioProcess->setMixParams(audioMixSettings);
+
+    CHECK(mStarted);
+
+    if (mAudioSink.get() != NULL) {
+        mAudioSink->start();
+    } else {
+        mAudioTrack->start();
+    }
+}
+
+status_t VideoEditorAudioPlayer::seekTo(int64_t time_us) {
+    ALOGV("seekTo: %lld", time_us);
+    Mutex::Autolock autoLock(mLock);
+
+    mSeeking = true;
+    mPositionTimeRealUs = mPositionTimeMediaUs = -1;
+    mReachedEOS = false;
+    mSeekTimeUs = time_us;
+
+    if (mAudioSink != NULL) {
+        mAudioSink->flush();
+    } else {
+        mAudioTrack->flush();
+    }
+
+    return OK;
+}
+
+bool VideoEditorAudioPlayer::isSeeking() {
+    Mutex::Autolock lock(mLock);
+    ALOGV("isSeeking: mSeeking=%d", mSeeking);
+    return mSeeking;
+}
+
+bool VideoEditorAudioPlayer::reachedEOS(status_t *finalStatus) {
+    ALOGV("reachedEOS: status=%d", mFinalStatus);
+    *finalStatus = OK;
+
+    Mutex::Autolock autoLock(mLock);
+    *finalStatus = mFinalStatus;
+    return mReachedEOS;
+}
+
+int64_t VideoEditorAudioPlayer::getRealTimeUs() {
+    Mutex::Autolock autoLock(mLock);
+    return getRealTimeUs_l();
+}
+
+int64_t VideoEditorAudioPlayer::getRealTimeUs_l() {
+    return -mLatencyUs + (mNumFramesPlayed * 1000000) / mSampleRate;
+}
+
+int64_t VideoEditorAudioPlayer::getMediaTimeUs() {
+    ALOGV("getMediaTimeUs");
+    Mutex::Autolock autoLock(mLock);
+
+    if (mPositionTimeMediaUs < 0 || mPositionTimeRealUs < 0) {
+        if (mSeeking) {
+            return mSeekTimeUs;
+        }
+
+        return 0;
+    }
+
+    int64_t realTimeOffset = getRealTimeUs_l() - mPositionTimeRealUs;
+    if (realTimeOffset < 0) {
+        realTimeOffset = 0;
+    }
+
+    return mPositionTimeMediaUs + realTimeOffset;
+}
+
+bool VideoEditorAudioPlayer::getMediaTimeMapping(
+        int64_t *realtime_us, int64_t *mediatime_us) {
+    ALOGV("getMediaTimeMapping");
+    Mutex::Autolock autoLock(mLock);
+
+    *realtime_us = mPositionTimeRealUs;
+    *mediatime_us = mPositionTimeMediaUs;
+
+    return mPositionTimeRealUs != -1 && mPositionTimeMediaUs != -1;
+}
+
 void VideoEditorAudioPlayer::setSource(const sp<MediaSource> &source) {
     Mutex::Autolock autoLock(mLock);
 
@@ -76,7 +270,7 @@
     }
 
     if (mInputBuffer != NULL) {
-        LOGV("VideoEditorAudioPlayer releasing input buffer.");
+        ALOGV("VideoEditorAudioPlayer releasing input buffer.");
 
         mInputBuffer->release();
         mInputBuffer = NULL;
@@ -96,8 +290,8 @@
     return mSource;
 }
 
-void VideoEditorAudioPlayer::setObserver(PreviewPlayerBase *observer) {
-    LOGV("setObserver");
+void VideoEditorAudioPlayer::setObserver(PreviewPlayer *observer) {
+    ALOGV("setObserver");
     //CHECK(!mStarted);
     mObserver = observer;
 }
@@ -106,11 +300,28 @@
     return mStarted;
 }
 
+// static
+void VideoEditorAudioPlayer::AudioCallback(int event, void *user, void *info) {
+    static_cast<VideoEditorAudioPlayer *>(user)->AudioCallback(event, info);
+}
+
+
+void VideoEditorAudioPlayer::AudioCallback(int event, void *info) {
+    if (event != AudioTrack::EVENT_MORE_DATA) {
+        return;
+    }
+
+    AudioTrack::Buffer *buffer = (AudioTrack::Buffer *)info;
+    size_t numBytesWritten = fillBuffer(buffer->raw, buffer->size);
+
+    buffer->size = numBytesWritten;
+}
+
 status_t VideoEditorAudioPlayer::start(bool sourceAlreadyStarted) {
     Mutex::Autolock autoLock(mLock);
     CHECK(!mStarted);
     CHECK(mSource != NULL);
-    LOGV("Start");
+    ALOGV("Start");
     status_t err;
     M4OSA_ERR result = M4NO_ERROR;
     M4OSA_UInt32 startTime = 0;
@@ -126,7 +337,7 @@
 
     // Create the BG Audio handler
     mAudioProcess = new VideoEditorBGAudioProcessing();
-    veAudMixSettings audioMixSettings;
+    AudioMixSettings audioMixSettings;
 
     // Pass on the audio ducking parameters
     audioMixSettings.lvInDucking_threshold =
@@ -143,7 +354,7 @@
     audioMixSettings.lvPTChannelCount = mAudioMixSettings->uiNbChannels;
 
     // Call to Audio mix param setting
-    mAudioProcess->veSetAudioProcessingParams(audioMixSettings);
+    mAudioProcess->setMixParams(audioMixSettings);
 
     // Get the BG Audio PCM file details
     if ( mBGAudioPCMFileHandle ) {
@@ -157,12 +368,12 @@
         mBGAudioPCMFileTrimmedLength = mBGAudioPCMFileLength;
 
 
-        LOGV("VideoEditorAudioPlayer::start M4OSA_kFileReadGetFileSize = %lld",
+        ALOGV("VideoEditorAudioPlayer::start M4OSA_kFileReadGetFileSize = %lld",
                             mBGAudioPCMFileLength);
 
         // Get the duration in time of the audio BT
         if ( result == M4NO_ERROR ) {
-         LOGV("VEAP: channels = %d freq = %d",
+         ALOGV("VEAP: channels = %d freq = %d",
          mAudioMixSettings->uiNbChannels,  mAudioMixSettings->uiSamplingFrequency);
 
             // No trim
@@ -171,7 +382,7 @@
                     mAudioMixSettings->uiNbChannels))*1000 ) /
                     mAudioMixSettings->uiSamplingFrequency;
 
-            LOGV("VideoEditorAudioPlayer:: beginCutMs %d , endCutMs %d",
+            ALOGV("VideoEditorAudioPlayer:: beginCutMs %d , endCutMs %d",
                     (unsigned int) mAudioMixSettings->beginCutMs,
                     (unsigned int) mAudioMixSettings->endCutMs);
 
@@ -212,15 +423,15 @@
                     sizeof(M4OSA_UInt16)) / 1000; /*make to sec from ms*/
             }
 
-            LOGV("VideoEditorAudioPlayer: file duration recorded : %lld",
+            ALOGV("VideoEditorAudioPlayer: file duration recorded : %lld",
                     mBGAudioPCMFileDuration);
         }
 
         // Last played location to be seeked at for next media item
         if ( result == M4NO_ERROR ) {
-            LOGV("VideoEditorAudioPlayer::mBGAudioStoryBoardSkimTimeStamp %lld",
+            ALOGV("VideoEditorAudioPlayer::mBGAudioStoryBoardSkimTimeStamp %lld",
                     mBGAudioStoryBoardSkimTimeStamp);
-            LOGV("VideoEditorAudioPlayer::uiAddCts %d",
+            ALOGV("VideoEditorAudioPlayer::uiAddCts %d",
                     mAudioMixSettings->uiAddCts);
             if (mBGAudioStoryBoardSkimTimeStamp >= mAudioMixSettings->uiAddCts) {
                 startTime = (mBGAudioStoryBoardSkimTimeStamp -
@@ -230,7 +441,7 @@
                 // do nothing
             }
 
-            LOGV("VideoEditorAudioPlayer::startTime %d", startTime);
+            ALOGV("VideoEditorAudioPlayer::startTime %d", startTime);
             seekTimeStamp = 0;
             if (startTime) {
                 if (startTime >= mBGAudioPCMFileDuration) {
@@ -276,7 +487,7 @@
 
     mFirstBufferResult = mSource->read(&mFirstBuffer);
     if (mFirstBufferResult == INFO_FORMAT_CHANGED) {
-        LOGV("INFO_FORMAT_CHANGED!!!");
+        ALOGV("INFO_FORMAT_CHANGED!!!");
 
         CHECK(mFirstBuffer == NULL);
         mFirstBufferResult = OK;
@@ -300,7 +511,7 @@
 
     if (mAudioSink.get() != NULL) {
         status_t err = mAudioSink->open(
-                mSampleRate, numChannels, AUDIO_FORMAT_PCM_16_BIT,
+                mSampleRate, numChannels, CHANNEL_MASK_USE_CHANNEL_ORDER, AUDIO_FORMAT_PCM_16_BIT,
                 DEFAULT_AUDIOSINK_BUFFERCOUNT,
                 &VideoEditorAudioPlayer::AudioSinkCallback, this);
         if (err != OK) {
@@ -326,7 +537,7 @@
                 (numChannels == 2)
                     ? AUDIO_CHANNEL_OUT_STEREO
                     : AUDIO_CHANNEL_OUT_MONO,
-                0, 0, &AudioCallback, this, 0);
+                0, AUDIO_POLICY_OUTPUT_FLAG_NONE, &AudioCallback, this, 0);
 
         if ((err = mAudioTrack->initCheck()) != OK) {
             delete mAudioTrack;
@@ -355,37 +566,11 @@
     return OK;
 }
 
-void VideoEditorAudioPlayer::resume() {
-
-    veAudMixSettings audioMixSettings;
-
-    // Single audio player is used;
-    // Pass on the audio ducking parameters
-    // which might have changed with new audio source
-    audioMixSettings.lvInDucking_threshold =
-        mAudioMixSettings->uiInDucking_threshold;
-    audioMixSettings.lvInDucking_lowVolume =
-        ((M4OSA_Float)mAudioMixSettings->uiInDucking_lowVolume) / 100.0;
-    audioMixSettings.lvInDucking_enable =
-        mAudioMixSettings->bInDucking_enable;
-    audioMixSettings.lvPTVolLevel =
-        ((M4OSA_Float)mBGAudioStoryBoardCurrentMediaVolumeVal) / 100.0;
-    audioMixSettings.lvBTVolLevel =
-        ((M4OSA_Float)mAudioMixSettings->uiAddVolume) / 100.0;
-    audioMixSettings.lvBTChannelCount = mAudioMixSettings->uiBTChannelCount;
-    audioMixSettings.lvPTChannelCount = mAudioMixSettings->uiNbChannels;
-
-    // Call to Audio mix param setting
-    mAudioProcess->veSetAudioProcessingParams(audioMixSettings);
-
-    //Call the base class
-    AudioPlayerBase::resume();
-}
 
 void VideoEditorAudioPlayer::reset() {
 
-    LOGV("reset");
-    AudioPlayerBase::reset();
+    ALOGV("reset");
+    clear();
 
     // Capture the current seek point
     mBGAudioPCMFileSeekPoint = 0;
@@ -478,8 +663,8 @@
                           (mPositionTimeMediaUs - mSeekTimeUs)) >=
                           (int64_t)(mAudioMixSettings->uiAddCts * 1000)) {
 
-                        LOGV("VideoEditorAudioPlayer::INSIDE MIXING");
-                        LOGV("Checking %lld <= %lld",
+                        ALOGV("VideoEditorAudioPlayer::INSIDE MIXING");
+                        ALOGV("Checking %lld <= %lld",
                             mBGAudioPCMFileSeekPoint-mBGAudioPCMFileOriginalSeekPoint,
                             mBGAudioPCMFileTrimmedLength);
 
@@ -495,7 +680,7 @@
                         pPTMdata = (M4OSA_Int16*) ((uint8_t*) mInputBuffer->data()
                                 + mInputBuffer->range_offset());
 
-                        LOGV("mix with background malloc to do len %d", len);
+                        ALOGV("mix with background malloc to do len %d", len);
 
                         bgFrame.m_dataAddress = (M4OSA_UInt16*)M4OSA_32bitAlignedMalloc( len, 1,
                                                        (M4OSA_Char*)"bgFrame");
@@ -505,7 +690,7 @@
                                                     (M4OSA_Char*)"mixFrame");
                         mixFrame.m_bufferSize = len;
 
-                        LOGV("mix with bgm with size %lld", mBGAudioPCMFileLength);
+                        ALOGV("mix with bgm with size %lld", mBGAudioPCMFileLength);
 
                         CHECK(mInputBuffer->meta_data()->findInt64(kKeyTime,
                                          &mPositionTimeMediaUs));
@@ -514,11 +699,11 @@
                              mBGAudioPCMFileOriginalSeekPoint <=
                               (mBGAudioPCMFileTrimmedLength - len)) {
 
-                            LOGV("Checking mBGAudioPCMFileHandle %d",
+                            ALOGV("Checking mBGAudioPCMFileHandle %d",
                                 (unsigned int)mBGAudioPCMFileHandle);
 
                             if (mBGAudioPCMFileHandle != M4OSA_NULL) {
-                                LOGV("fillBuffer seeking file to %lld",
+                                ALOGV("fillBuffer seeking file to %lld",
                                     mBGAudioPCMFileSeekPoint);
 
                             // TODO : 32bits required for OSAL
@@ -531,7 +716,7 @@
                                 mBGAudioPCMFileSeekPoint = tmp32;
 
                                 if (err != M4NO_ERROR){
-                                    LOGE("M4OSA_fileReadSeek err %d",(int)err);
+                                    ALOGE("M4OSA_fileReadSeek err %d",(int)err);
                                 }
 
                                 err = M4OSA_fileReadData(mBGAudioPCMFileHandle,
@@ -539,7 +724,7 @@
                                        (M4OSA_UInt32*)&len);
                                 if (err == M4WAR_NO_DATA_YET ) {
 
-                                    LOGV("fillBuffer End of file reached");
+                                    ALOGV("fillBuffer End of file reached");
                                     err = M4NO_ERROR;
 
                                     // We reached the end of file
@@ -550,7 +735,7 @@
                                           mAudioMixSettings->uiSamplingFrequency) *
                                           mAudioMixSettings->uiNbChannels *
                                            sizeof(M4OSA_UInt16)) / 1000;
-                                        LOGV("fillBuffer Looping \
+                                        ALOGV("fillBuffer Looping \
                                             to mBGAudioPCMFileSeekPoint %lld",
                                             mBGAudioPCMFileSeekPoint);
                                     }
@@ -563,10 +748,10 @@
                                         }
                                     }
                                 } else if (err != M4NO_ERROR ) {
-                                     LOGV("fileReadData for audio err %d", err);
+                                     ALOGV("fileReadData for audio err %d", err);
                                 } else {
                                     mBGAudioPCMFileSeekPoint += len;
-                                    LOGV("fillBuffer mBGAudioPCMFileSeekPoint \
+                                    ALOGV("fillBuffer mBGAudioPCMFileSeekPoint \
                                          %lld", mBGAudioPCMFileSeekPoint);
 
                                     // Assign the ptr data to primary track
@@ -574,7 +759,7 @@
                                     ptFrame.m_bufferSize = len;
 
                                     // Call to mix and duck
-                                    mAudioProcess->veProcessAudioMixNDuck(
+                                    mAudioProcess->mixAndDuck(
                                          &ptFrame, &bgFrame, &mixFrame);
 
                                         // Overwrite the decoded buffer
@@ -617,7 +802,7 @@
             Mutex::Autolock autoLock(mLock);
 
             if (status != OK) {
-                LOGV("fillBuffer: mSource->read returned err %d", status);
+                ALOGV("fillBuffer: mSource->read returned err %d", status);
                 if (mObserver && !mReachedEOS) {
                     postEOS = true;
                 }
@@ -634,7 +819,7 @@
                 ((mNumFramesPlayed + size_done / mFrameSize) * 1000000)
                     / mSampleRate;
 
-            LOGV("buffer->size() = %d, "
+            ALOGV("buffer->size() = %d, "
                      "mPositionTimeMediaUs=%.2f mPositionTimeRealUs=%.2f",
                  mInputBuffer->range_length(),
                  mPositionTimeMediaUs / 1E6, mPositionTimeRealUs / 1E6);
diff --git a/libvideoeditor/lvpp/VideoEditorAudioPlayer.h b/libvideoeditor/lvpp/VideoEditorAudioPlayer.h
index 450a70a..626df39 100755
--- a/libvideoeditor/lvpp/VideoEditorAudioPlayer.h
+++ b/libvideoeditor/lvpp/VideoEditorAudioPlayer.h
@@ -15,19 +15,18 @@
  */
 
 #ifndef VE_AUDIO_PLAYER_H_
-
 #define VE_AUDIO_PLAYER_H_
 
 #include <media/MediaPlayerInterface.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/TimeSource.h>
 #include <utils/threads.h>
+
 #include "M4xVSS_API.h"
 #include "VideoEditorMain.h"
 #include "M4OSA_FileReader.h"
 #include "VideoEditorBGAudioProcessing.h"
-#include "AudioPlayerBase.h"
-#include "PreviewPlayerBase.h"
+
 
 namespace android {
 
@@ -35,8 +34,7 @@
 class AudioTrack;
 class PreviewPlayer;
 
-
-class VideoEditorAudioPlayer : public AudioPlayerBase {
+class VideoEditorAudioPlayer : public TimeSource {
 public:
     enum {
         REACHED_EOS,
@@ -44,12 +42,26 @@
     };
 
     VideoEditorAudioPlayer(const sp<MediaPlayerBase::AudioSink> &audioSink,
-        PreviewPlayerBase *audioObserver = NULL);
+        PreviewPlayer *audioObserver = NULL);
 
-    virtual ~VideoEditorAudioPlayer();
+    ~VideoEditorAudioPlayer();
+
+    // Return time in us.
+    int64_t getRealTimeUs();
+
+    // Returns the timestamp of the last buffer played (in us).
+    int64_t getMediaTimeUs();
+
+    // Returns true iff a mapping is established, i.e. the AudioPlayerBase
+    // has played at least one frame of audio.
+    bool getMediaTimeMapping(int64_t *realtime_us, int64_t *mediatime_us);
 
     status_t start(bool sourceAlreadyStarted = false);
+    void pause(bool playPendingSamples = false);
     void resume();
+    status_t seekTo(int64_t time_us);
+    bool isSeeking();
+    bool reachedEOS(status_t *finalStatus);
 
     void setAudioMixSettings(M4xVSS_AudioMixingSettings* pAudioMixSettings);
     void setAudioMixPCMFileHandle(M4OSA_Context pBGAudioPCMFileHandle);
@@ -58,7 +70,7 @@
         M4OSA_UInt32 pBGAudioCurrentMediaBeginCutTS,
         M4OSA_UInt32 pBGAudioCurrentMediaVolumeVal);
 
-    void setObserver(PreviewPlayerBase *observer);
+    void setObserver(PreviewPlayer *observer);
     void setSource(const sp<MediaSource> &source);
     sp<MediaSource> getSource();
 
@@ -78,15 +90,48 @@
     int64_t mBGAudioStoryBoardCurrentMediaBeginCutTS;
     int64_t mBGAudioStoryBoardCurrentMediaVolumeVal;
 
+    sp<MediaSource> mSource;
+    AudioTrack *mAudioTrack;
+
+    MediaBuffer *mInputBuffer;
+
+    int mSampleRate;
+    int64_t mLatencyUs;
+    size_t mFrameSize;
+
+    Mutex mLock;
+    int64_t mNumFramesPlayed;
+
+    int64_t mPositionTimeMediaUs;
+    int64_t mPositionTimeRealUs;
+
+    bool mSeeking;
+    bool mReachedEOS;
+    status_t mFinalStatus;
+    int64_t mSeekTimeUs;
+
+    bool mStarted;
+
+    bool mIsFirstBuffer;
+    status_t mFirstBufferResult;
+    MediaBuffer *mFirstBuffer;
+
+    sp<MediaPlayerBase::AudioSink> mAudioSink;
+    PreviewPlayer *mObserver;
+
+    static void AudioCallback(int event, void *user, void *info);
+    void AudioCallback(int event, void *info);
     size_t fillBuffer(void *data, size_t size);
-
-    void reset();
-    void setPrimaryTrackVolume(M4OSA_Int16 *data, M4OSA_UInt32 size, M4OSA_Float volLevel);
-
     static size_t AudioSinkCallback(
             MediaPlayerBase::AudioSink *audioSink,
             void *data, size_t size, void *me);
 
+    void reset();
+    void clear();
+    int64_t getRealTimeUs_l();
+    void setPrimaryTrackVolume(
+            M4OSA_Int16 *data, M4OSA_UInt32 size, M4OSA_Float volLevel);
+
     VideoEditorAudioPlayer(const VideoEditorAudioPlayer &);
     VideoEditorAudioPlayer &operator=(const VideoEditorAudioPlayer &);
 };
diff --git a/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.cpp b/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.cpp
index e3bbdf2..e24fcf4 100755
--- a/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.cpp
+++ b/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.cpp
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-#define LOG_NDEBUG 1
+//#define LOG_NDEBUG 0
 #define LOG_TAG "VideoEditorBGAudioProcessing"
 #include <utils/Log.h>
 #include "VideoEditorBGAudioProcessing.h"
@@ -22,8 +22,7 @@
 namespace android {
 
 VideoEditorBGAudioProcessing::VideoEditorBGAudioProcessing() {
-
-    LOGV("VideoEditorBGAudioProcessing:: Construct  VideoEditorBGAudioProcessing ");
+    ALOGV("Constructor");
 
     mAudVolArrIndex = 0;
     mDoDucking = 0;
@@ -46,28 +45,18 @@
     mBTChannelCount = 1;
 }
 
-M4OSA_Int32 VideoEditorBGAudioProcessing::veProcessAudioMixNDuck(
-        void *pPTBuffer, void *pBTBuffer, void *pOutBuffer) {
+M4OSA_Int32 VideoEditorBGAudioProcessing::mixAndDuck(
+        void *primaryTrackBuffer,
+        void *backgroundTrackBuffer,
+        void *outBuffer) {
 
-    M4AM_Buffer16* pPrimaryTrack   = (M4AM_Buffer16*)pPTBuffer;
-    M4AM_Buffer16* pBackgroundTrack = (M4AM_Buffer16*)pBTBuffer;
-    M4AM_Buffer16* pMixedOutBuffer  = (M4AM_Buffer16*)pOutBuffer;
+    ALOGV("mixAndDuck: track buffers (primary: 0x%x and background: 0x%x) "
+            "and out buffer 0x%x",
+            primaryTrackBuffer, backgroundTrackBuffer, outBuffer);
 
-    LOGV("VideoEditorBGAudioProcessing::lvProcessAudioMixNDuck \
-        pPTBuffer 0x%x pBTBuffer 0x%x pOutBuffer 0x%x", pPTBuffer,
-        pBTBuffer, pOutBuffer);
-
-    M4OSA_ERR result = M4NO_ERROR;
-    M4OSA_Int16 *pBTMdata1;
-    M4OSA_Int16 *pPTMdata2;
-    M4OSA_UInt32 uiPCMsize;
-
-    // Ducking variable
-    M4OSA_UInt16 loopIndex = 0;
-    M4OSA_Int16 *pPCM16Sample = M4OSA_NULL;
-    M4OSA_Int32 peakDbValue = 0;
-    M4OSA_Int32 previousDbValue = 0;
-    M4OSA_UInt32 i;
+    M4AM_Buffer16* pPrimaryTrack   = (M4AM_Buffer16*)primaryTrackBuffer;
+    M4AM_Buffer16* pBackgroundTrack = (M4AM_Buffer16*)backgroundTrackBuffer;
+    M4AM_Buffer16* pMixedOutBuffer  = (M4AM_Buffer16*)outBuffer;
 
     // Output size if same as PT size
     pMixedOutBuffer->m_bufferSize = pPrimaryTrack->m_bufferSize;
@@ -76,23 +65,23 @@
     memcpy((void *)pMixedOutBuffer->m_dataAddress,
         (void *)pPrimaryTrack->m_dataAddress, pMixedOutBuffer->m_bufferSize);
 
+    // Initialize ducking variables
     // Initially contains the input primary track
-    pPTMdata2 = (M4OSA_Int16*)pMixedOutBuffer->m_dataAddress;
+    M4OSA_Int16 *pPTMdata2 = (M4OSA_Int16*)pMixedOutBuffer->m_dataAddress;
+
     // Contains BG track processed data(like channel conversion etc..
-    pBTMdata1 = (M4OSA_Int16*) pBackgroundTrack->m_dataAddress;
+    M4OSA_Int16 *pBTMdata1 = (M4OSA_Int16*) pBackgroundTrack->m_dataAddress;
 
     // Since we need to give sample count and not buffer size
-    uiPCMsize = pMixedOutBuffer->m_bufferSize/2 ;
+    M4OSA_UInt32 uiPCMsize = pMixedOutBuffer->m_bufferSize / 2 ;
 
     if ((mDucking_enable) && (mPTVolLevel != 0.0)) {
-        // LOGI("VideoEditorBGAudioProcessing:: In Ducking analysis ");
-        loopIndex = 0;
-        peakDbValue = 0;
-        previousDbValue = peakDbValue;
+        M4OSA_Int32 peakDbValue = 0;
+        M4OSA_Int32 previousDbValue = 0;
+        M4OSA_Int16 *pPCM16Sample = (M4OSA_Int16*)pPrimaryTrack->m_dataAddress;
+        const size_t n = pPrimaryTrack->m_bufferSize / sizeof(M4OSA_Int16);
 
-        pPCM16Sample = (M4OSA_Int16*)pPrimaryTrack->m_dataAddress;
-
-        while (loopIndex < pPrimaryTrack->m_bufferSize/sizeof(M4OSA_Int16)) {
+        for (size_t loopIndex = 0; loopIndex < n; ++loopIndex) {
             if (pPCM16Sample[loopIndex] >= 0) {
                 peakDbValue = previousDbValue > pPCM16Sample[loopIndex] ?
                         previousDbValue : pPCM16Sample[loopIndex];
@@ -102,19 +91,15 @@
                         previousDbValue: -pPCM16Sample[loopIndex];
                 previousDbValue = peakDbValue;
             }
-            loopIndex++;
         }
 
         mAudioVolumeArray[mAudVolArrIndex] = getDecibelSound(peakDbValue);
 
-        LOGV("VideoEditorBGAudioProcessing:: getDecibelSound %d",
-            mAudioVolumeArray[mAudVolArrIndex]);
+        // Check for threshold is done after kProcessingWindowSize cycles
+        if (mAudVolArrIndex >= kProcessingWindowSize - 1) {
+            mDoDucking = isThresholdBreached(
+                    mAudioVolumeArray, mAudVolArrIndex, mDucking_threshold);
 
-        // WINDOW_SIZE is 10 by default
-        // Check for threshold is done after 10 cycles
-        if (mAudVolArrIndex >= WINDOW_SIZE - 1) {
-            mDoDucking = isThresholdBreached(mAudioVolumeArray,
-            mAudVolArrIndex,mDucking_threshold );
             mAudVolArrIndex = 0;
         } else {
             mAudVolArrIndex++;
@@ -136,7 +121,7 @@
                 // FADE OUT BG Track
                 // Increment ducking factor in total steps in factor
                 // of low volume steps to reach low volume level
-                mDuckingFactor -= (mDucking_lowVolume);
+                mDuckingFactor -= mDucking_lowVolume;
             } else {
                 mDuckingFactor = mDucking_lowVolume;
             }
@@ -145,7 +130,7 @@
                 // FADE IN BG Track
                 // Increment ducking factor in total steps of
                 // low volume factor to reach orig.volume level
-                mDuckingFactor += (mDucking_lowVolume);
+                mDuckingFactor += mDucking_lowVolume;
             } else {
                 mDuckingFactor = 1.0;
             }
@@ -153,14 +138,11 @@
     } // end if - mDucking_enable
 
 
-    // Mixing Logic
-
-    LOGV("VideoEditorBGAudioProcessing:: Out of Ducking analysis uiPCMsize\
-        %d %f %f", mDoDucking, mDuckingFactor,mBTVolLevel);
-
+    // Mixing logic
+    ALOGV("Out of Ducking analysis uiPCMsize %d %f %f",
+            mDoDucking, mDuckingFactor, mBTVolLevel);
     while (uiPCMsize-- > 0) {
 
-        M4OSA_Int32 temp;
         // Set vol factor for BT and PT
         *pBTMdata1 = (M4OSA_Int16)(*pBTMdata1*mBTVolLevel);
         *pPTMdata2 = (M4OSA_Int16)(*pPTMdata2*mPTVolLevel);
@@ -179,6 +161,7 @@
             *pBTMdata1 = (M4OSA_Int16)(*pBTMdata1 /2 + *pPTMdata2 /2);
         }
 
+        M4OSA_Int32 temp;
         if (*pBTMdata1 < 0) {
             temp = -(*pBTMdata1) * 2; // bring to original Amplitude level
 
@@ -199,17 +182,13 @@
         pBTMdata1++;
         pPTMdata2++;
     }
-    //LOGV("VideoEditorBGAudioProcessing:: Copy final out ");
+
     memcpy((void *)pMixedOutBuffer->m_dataAddress,
         (void *)pBackgroundTrack->m_dataAddress,
         pBackgroundTrack->m_bufferSize);
 
-    LOGV("VideoEditorBGAudioProcessing::lvProcessAudioMixNDuck EXIT");
-    return result;
-}
-
-VideoEditorBGAudioProcessing::~VideoEditorBGAudioProcessing() {
-
+    ALOGV("mixAndDuck: X");
+    return M4NO_ERROR;
 }
 
 M4OSA_Int32 VideoEditorBGAudioProcessing::calculateOutResampleBufSize() {
@@ -218,51 +197,48 @@
     return (mOutSampleRate / mInSampleRate) * mBTBuffer.m_bufferSize;
 }
 
-void VideoEditorBGAudioProcessing ::veSetAudioProcessingParams(
-        const veAudMixSettings& gInputParams) {
+void VideoEditorBGAudioProcessing::setMixParams(
+        const AudioMixSettings& setting) {
+    ALOGV("setMixParams");
 
-    LOGV("VideoEditorBGAudioProcessing:: ENTER lvSetAudioProcessingParams ");
-    mDucking_enable       = gInputParams.lvInDucking_enable;
-    mDucking_lowVolume    = gInputParams.lvInDucking_lowVolume;
-    mDucking_threshold    = gInputParams.lvInDucking_threshold;
+    mDucking_enable       = setting.lvInDucking_enable;
+    mDucking_lowVolume    = setting.lvInDucking_lowVolume;
+    mDucking_threshold    = setting.lvInDucking_threshold;
+    mPTVolLevel           = setting.lvPTVolLevel;
+    mBTVolLevel           = setting.lvBTVolLevel ;
+    mBTChannelCount       = setting.lvBTChannelCount;
+    mPTChannelCount       = setting.lvPTChannelCount;
+    mBTFormat             = setting.lvBTFormat;
+    mInSampleRate         = setting.lvInSampleRate;
+    mOutSampleRate        = setting.lvOutSampleRate;
 
-    mPTVolLevel           = gInputParams.lvPTVolLevel;
-    mBTVolLevel           = gInputParams.lvBTVolLevel ;
-
-    mBTChannelCount       = gInputParams.lvBTChannelCount;
-    mPTChannelCount       = gInputParams.lvPTChannelCount;
-
-    mBTFormat             = gInputParams.lvBTFormat;
-
-    mInSampleRate         = gInputParams.lvInSampleRate;
-    mOutSampleRate        = gInputParams.lvOutSampleRate;
-
+    // Reset the following params to default values
     mAudVolArrIndex       = 0;
     mDoDucking            = 0;
-    mDuckingFactor        = 1.0; // default
+    mDuckingFactor        = 1.0;
 
-    LOGV("VideoEditorBGAudioProcessing::  ducking_enable 0x%x \
-        ducking_lowVolume %f  ducking_threshold %d  fPTVolLevel %f BTVolLevel %f",
-        mDucking_enable, mDucking_lowVolume, mDucking_threshold,
-        mPTVolLevel, mPTVolLevel);
+    ALOGV("ducking enable 0x%x lowVolume %f threshold %d "
+            "fPTVolLevel %f BTVolLevel %f",
+            mDucking_enable, mDucking_lowVolume, mDucking_threshold,
+            mPTVolLevel, mPTVolLevel);
 
-    // Following logc decides if SSRC support is needed for this mixing
-    mIsSSRCneeded = (gInputParams.lvInSampleRate != gInputParams.lvOutSampleRate);
-    if (gInputParams.lvBTChannelCount != gInputParams.lvPTChannelCount){
-        if (gInputParams.lvBTChannelCount == 2){
-            mChannelConversion   = 1; // convert to MONO
+    // Decides if SSRC support is needed for this mixing
+    mIsSSRCneeded = (setting.lvInSampleRate != setting.lvOutSampleRate);
+    if (setting.lvBTChannelCount != setting.lvPTChannelCount){
+        if (setting.lvBTChannelCount == 2){
+            mChannelConversion = 1; // convert to MONO
         } else {
-            mChannelConversion   = 2; // Convert to STEREO
+            mChannelConversion = 2; // Convert to STEREO
         }
     } else {
-        mChannelConversion   = 0;
+        mChannelConversion = 0;
     }
-    LOGV("VideoEditorBGAudioProcessing:: EXIT veSetAudioProcessingParams ");
 }
 
-
 // Fast way to compute 10 * log(value)
 M4OSA_Int32 VideoEditorBGAudioProcessing::getDecibelSound(M4OSA_UInt32 value) {
+    ALOGV("getDecibelSound: %ld", value);
+
     if (value <= 0 || value > 0x8000) {
         return 0;
     } else if (value > 0x4000) { // 32768
@@ -305,6 +281,8 @@
         M4OSA_Int32 storeCount,
         M4OSA_Int32 thresholdValue) {
 
+    ALOGV("isThresholdBreached");
+
     int totalValue = 0;
     for (int i = 0; i < storeCount; ++i) {
         totalValue += averageValue[i];
diff --git a/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.h b/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.h
index 0798fc1..cb7a69f 100755
--- a/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.h
+++ b/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.h
@@ -14,30 +14,34 @@
  * limitations under the License.
  */
 
+#ifndef VE_BACKGROUND_AUDIO_PROC_H
+#define VE_BACKGROUND_AUDIO_PROC_H
+
 #include "M4OSA_Error.h"
 #include "M4OSA_Types.h"
 #include "M4OSA_Memory.h"
 #include "M4OSA_Export.h"
 #include "M4OSA_CoreID.h"
 
-namespace android{
 
-#define WINDOW_SIZE 10
-
-enum veAudioFormat {MONO_16_BIT, STEREO_16_BIT};
-
+namespace android {
 
 typedef struct {
     M4OSA_UInt16*   m_dataAddress; // Android SRC needs a Int16 pointer
     M4OSA_UInt32    m_bufferSize;
 } M4AM_Buffer16;    // Structure contains Int16_t pointer
 
+enum AudioFormat {
+    MONO_16_BIT,
+    STEREO_16_BIT
+};
+
 // Following struct will be used by app to supply the PT and BT properties
 // along with ducking values
 typedef struct {
     M4OSA_Int32 lvInSampleRate; // Sampling audio freq (8000,16000 or more )
     M4OSA_Int32 lvOutSampleRate; //Sampling audio freq (8000,16000 or more )
-    veAudioFormat lvBTFormat;
+    AudioFormat lvBTFormat;
 
     M4OSA_Int32 lvInDucking_threshold;
     M4OSA_Float lvInDucking_lowVolume;
@@ -46,25 +50,29 @@
     M4OSA_Float lvBTVolLevel;
     M4OSA_Int32 lvBTChannelCount;
     M4OSA_Int32 lvPTChannelCount;
-} veAudMixSettings;
+} AudioMixSettings;
 
 // This class is defined to get SF SRC access
 class VideoEditorBGAudioProcessing {
 public:
     VideoEditorBGAudioProcessing();
-    void veSetAudioProcessingParams(const veAudMixSettings& mixParams);
+    ~VideoEditorBGAudioProcessing() {}
 
-    M4OSA_Int32 veProcessAudioMixNDuck(
+    void setMixParams(const AudioMixSettings& params);
+
+    M4OSA_Int32 mixAndDuck(
                     void* primaryTrackBuffer,
                     void* backgroundTrackBuffer,
                     void* mixedOutputBuffer);
 
-    ~VideoEditorBGAudioProcessing();
-
 private:
+    enum {
+        kProcessingWindowSize = 10,
+    };
+
     M4OSA_Int32 mInSampleRate;
     M4OSA_Int32 mOutSampleRate;
-    veAudioFormat mBTFormat;
+    AudioFormat mBTFormat;
 
     M4OSA_Bool mIsSSRCneeded;
     M4OSA_Int32 mBTChannelCount;
@@ -75,7 +83,7 @@
     M4OSA_Float mDucking_lowVolume;
     M4OSA_Float mDuckingFactor ;
     M4OSA_Bool mDucking_enable;
-    M4OSA_Int32 mAudioVolumeArray[WINDOW_SIZE];
+    M4OSA_Int32 mAudioVolumeArray[kProcessingWindowSize];
     M4OSA_Int32 mAudVolArrIndex;
     M4OSA_Bool mDoDucking;
     M4OSA_Float mPTVolLevel;
@@ -90,5 +98,13 @@
     // This returns the size of buffer which needs to allocated
     // before resampling is called
     M4OSA_Int32 calculateOutResampleBufSize();
+
+    // Don't call me.
+    VideoEditorBGAudioProcessing(const VideoEditorBGAudioProcessing&);
+    VideoEditorBGAudioProcessing& operator=(
+            const VideoEditorBGAudioProcessing&);
 };
-} // namespace android
+
+}  // namespace android
+
+#endif // VE_BACKGROUND_AUDIO_PROC_H
diff --git a/libvideoeditor/lvpp/VideoEditorPlayer.cpp b/libvideoeditor/lvpp/VideoEditorPlayer.cpp
index 90a5821..57cab08 100755
--- a/libvideoeditor/lvpp/VideoEditorPlayer.cpp
+++ b/libvideoeditor/lvpp/VideoEditorPlayer.cpp
@@ -31,12 +31,12 @@
 VideoEditorPlayer::VideoEditorPlayer(NativeWindowRenderer* renderer)
     : mPlayer(new PreviewPlayer(renderer)) {
 
-    LOGV("VideoEditorPlayer");
+    ALOGV("VideoEditorPlayer");
     mPlayer->setListener(this);
 }
 
 VideoEditorPlayer::~VideoEditorPlayer() {
-    LOGV("~VideoEditorPlayer");
+    ALOGV("~VideoEditorPlayer");
 
     reset();
     mVeAudioSink.clear();
@@ -46,7 +46,7 @@
 }
 
 status_t VideoEditorPlayer::initCheck() {
-    LOGV("initCheck");
+    ALOGV("initCheck");
     return OK;
 }
 
@@ -58,34 +58,38 @@
 
 status_t VideoEditorPlayer::setDataSource(
         const char *url, const KeyedVector<String8, String8> *headers) {
-    LOGI("setDataSource('%s')", url);
+    ALOGI("setDataSource('%s')", url);
+    if (headers != NULL) {
+        ALOGE("Headers parameter is not supported");
+        return INVALID_OPERATION;
+    }
 
-    return mPlayer->setDataSource(url, headers);
+    return mPlayer->setDataSource(url);
 }
 
 //We donot use this in preview, dummy implimentation as this is pure virtual
 status_t VideoEditorPlayer::setDataSource(int fd, int64_t offset,
     int64_t length) {
-    LOGE("setDataSource(%d, %lld, %lld) Not supported", fd, offset, length);
+    ALOGE("setDataSource(%d, %lld, %lld) Not supported", fd, offset, length);
     return (!OK);
 }
 
 status_t VideoEditorPlayer::setVideoSurface(const sp<Surface> &surface) {
-    LOGV("setVideoSurface");
+    ALOGV("setVideoSurface");
 
     mPlayer->setSurface(surface);
     return OK;
 }
 
 status_t VideoEditorPlayer::setVideoSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture) {
-    LOGV("setVideoSurfaceTexture");
+    ALOGV("setVideoSurfaceTexture");
 
     mPlayer->setSurfaceTexture(surfaceTexture);
     return OK;
 }
 
 status_t VideoEditorPlayer::prepare() {
-    LOGV("prepare");
+    ALOGV("prepare");
     return mPlayer->prepare();
 }
 
@@ -94,33 +98,33 @@
 }
 
 status_t VideoEditorPlayer::start() {
-    LOGV("start");
+    ALOGV("start");
     return mPlayer->play();
 }
 
 status_t VideoEditorPlayer::stop() {
-    LOGV("stop");
+    ALOGV("stop");
     return pause();
 }
 
 status_t VideoEditorPlayer::pause() {
-    LOGV("pause");
+    ALOGV("pause");
     return mPlayer->pause();
 }
 
 bool VideoEditorPlayer::isPlaying() {
-    LOGV("isPlaying");
+    ALOGV("isPlaying");
     return mPlayer->isPlaying();
 }
 
 status_t VideoEditorPlayer::seekTo(int msec) {
-    LOGV("seekTo");
+    ALOGV("seekTo");
     status_t err = mPlayer->seekTo((int64_t)msec * 1000);
     return err;
 }
 
 status_t VideoEditorPlayer::getCurrentPosition(int *msec) {
-    LOGV("getCurrentPosition");
+    ALOGV("getCurrentPosition");
     int64_t positionUs;
     status_t err = mPlayer->getPosition(&positionUs);
 
@@ -133,7 +137,7 @@
 }
 
 status_t VideoEditorPlayer::getDuration(int *msec) {
-    LOGV("getDuration");
+    ALOGV("getDuration");
 
     int64_t durationUs;
     status_t err = mPlayer->getDuration(&durationUs);
@@ -148,38 +152,38 @@
 }
 
 status_t VideoEditorPlayer::reset() {
-    LOGV("reset");
+    ALOGV("reset");
     mPlayer->reset();
     return OK;
 }
 
 status_t VideoEditorPlayer::setLooping(int loop) {
-    LOGV("setLooping");
+    ALOGV("setLooping");
     return mPlayer->setLooping(loop);
 }
 
 status_t VideoEditorPlayer::setParameter(int key, const Parcel &request) {
-    LOGV("setParameter");
-    return mPlayer->setParameter(key, request);
+    ALOGE("setParameter not implemented");
+    return INVALID_OPERATION;
 }
 
 status_t VideoEditorPlayer::getParameter(int key, Parcel *reply) {
-    LOGV("getParameter");
-    return mPlayer->getParameter(key, reply);
+    ALOGE("getParameter not implemented");
+    return INVALID_OPERATION;
 }
 
 player_type VideoEditorPlayer::playerType() {
-    LOGV("playerType");
+    ALOGV("playerType");
     return STAGEFRIGHT_PLAYER;
 }
 
 void VideoEditorPlayer::acquireLock() {
-    LOGV("acquireLock");
+    ALOGV("acquireLock");
     mPlayer->acquireLock();
 }
 
 void VideoEditorPlayer::releaseLock() {
-    LOGV("releaseLock");
+    ALOGV("releaseLock");
     mPlayer->releaseLock();
 }
 
@@ -197,7 +201,7 @@
         const media::Metadata::Filter& ids, Parcel *records) {
     using media::Metadata;
 
-    uint32_t flags = mPlayer->flags();
+    uint32_t flags = mPlayer->getSourceSeekFlags();
 
     Metadata metadata(records);
 
@@ -222,20 +226,20 @@
 
 status_t VideoEditorPlayer::loadEffectsSettings(
     M4VSS3GPP_EffectSettings* pEffectSettings, int nEffects) {
-    LOGV("loadEffectsSettings");
+    ALOGV("loadEffectsSettings");
     return mPlayer->loadEffectsSettings(pEffectSettings, nEffects);
 }
 
 status_t VideoEditorPlayer::loadAudioMixSettings(
     M4xVSS_AudioMixingSettings* pAudioMixSettings) {
-    LOGV("VideoEditorPlayer: loadAudioMixSettings");
+    ALOGV("VideoEditorPlayer: loadAudioMixSettings");
     return mPlayer->loadAudioMixSettings(pAudioMixSettings);
 }
 
 status_t VideoEditorPlayer::setAudioMixPCMFileHandle(
     M4OSA_Context pAudioMixPCMFileHandle) {
 
-    LOGV("VideoEditorPlayer: loadAudioMixSettings");
+    ALOGV("VideoEditorPlayer: loadAudioMixSettings");
     return mPlayer->setAudioMixPCMFileHandle(pAudioMixPCMFileHandle);
 }
 
@@ -244,28 +248,28 @@
     M4OSA_UInt32 currentMediaBeginCutTime,
     M4OSA_UInt32 primaryTrackVolValue) {
 
-    LOGV("VideoEditorPlayer: loadAudioMixSettings");
+    ALOGV("VideoEditorPlayer: loadAudioMixSettings");
     return mPlayer->setAudioMixStoryBoardParam(audioMixStoryBoardTS,
      currentMediaBeginCutTime, primaryTrackVolValue);
 }
 
 status_t VideoEditorPlayer::setPlaybackBeginTime(uint32_t msec) {
-    LOGV("setPlaybackBeginTime");
+    ALOGV("setPlaybackBeginTime");
     return mPlayer->setPlaybackBeginTime(msec);
 }
 
 status_t VideoEditorPlayer::setPlaybackEndTime(uint32_t msec) {
-    LOGV("setPlaybackEndTime");
+    ALOGV("setPlaybackEndTime");
     return mPlayer->setPlaybackEndTime(msec);
 }
 
 status_t VideoEditorPlayer::setStoryboardStartTime(uint32_t msec) {
-    LOGV("setStoryboardStartTime");
+    ALOGV("setStoryboardStartTime");
     return mPlayer->setStoryboardStartTime(msec);
 }
 
 status_t VideoEditorPlayer::setProgressCallbackInterval(uint32_t cbInterval) {
-    LOGV("setProgressCallbackInterval");
+    ALOGV("setProgressCallbackInterval");
     return mPlayer->setProgressCallbackInterval(cbInterval);
 }
 
@@ -273,12 +277,12 @@
     M4xVSS_MediaRendering mode,
     M4VIDEOEDITING_VideoFrameSize outputVideoSize) {
 
-    LOGV("setMediaRenderingMode");
+    ALOGV("setMediaRenderingMode");
     return mPlayer->setMediaRenderingMode(mode, outputVideoSize);
 }
 
 status_t VideoEditorPlayer::resetJniCallbackTimeStamp() {
-    LOGV("resetJniCallbackTimeStamp");
+    ALOGV("resetJniCallbackTimeStamp");
     return mPlayer->resetJniCallbackTimeStamp();
 }
 
@@ -379,7 +383,8 @@
 }
 
 status_t VideoEditorPlayer::VeAudioOutput::open(
-        uint32_t sampleRate, int channelCount, int format, int bufferCount,
+        uint32_t sampleRate, int channelCount, audio_channel_mask_t channelMask,
+        audio_format_t format, int bufferCount,
         AudioCallback cb, void *cookie) {
 
     mCallback = cb;
@@ -387,12 +392,12 @@
 
     // Check argument "bufferCount" against the mininum buffer count
     if (bufferCount < mMinBufferCount) {
-        LOGV("bufferCount (%d) is too small and increased to %d",
+        ALOGV("bufferCount (%d) is too small and increased to %d",
             bufferCount, mMinBufferCount);
         bufferCount = mMinBufferCount;
 
     }
-    LOGV("open(%u, %d, %d, %d)", sampleRate, channelCount, format, bufferCount);
+    ALOGV("open(%u, %d, %d, %d)", sampleRate, channelCount, format, bufferCount);
     if (mTrack) close();
     int afSampleRate;
     int afFrameCount;
@@ -409,16 +414,28 @@
 
     frameCount = (sampleRate*afFrameCount*bufferCount)/afSampleRate;
 
+    if (channelMask == CHANNEL_MASK_USE_CHANNEL_ORDER) {
+        switch(channelCount) {
+          case 1:
+            channelMask = AUDIO_CHANNEL_OUT_MONO;
+            break;
+          case 2:
+            channelMask = AUDIO_CHANNEL_OUT_STEREO;
+            break;
+          default:
+            return NO_INIT;
+        }
+    }
+
     AudioTrack *t;
     if (mCallback != NULL) {
         t = new AudioTrack(
                 mStreamType,
                 sampleRate,
                 format,
-                (channelCount == 2) ?
-                 AUDIO_CHANNEL_OUT_STEREO : AUDIO_CHANNEL_OUT_MONO,
+                channelMask,
                 frameCount,
-                0 /* flags */,
+                AUDIO_POLICY_OUTPUT_FLAG_NONE,
                 CallbackWrapper,
                 this);
     } else {
@@ -426,18 +443,17 @@
                 mStreamType,
                 sampleRate,
                 format,
-                (channelCount == 2) ?
-                 AUDIO_CHANNEL_OUT_STEREO : AUDIO_CHANNEL_OUT_MONO,
+                channelMask,
                 frameCount);
     }
 
     if ((t == 0) || (t->initCheck() != NO_ERROR)) {
-        LOGE("Unable to create audio track");
+        ALOGE("Unable to create audio track");
         delete t;
         return NO_INIT;
     }
 
-    LOGV("setVolume");
+    ALOGV("setVolume");
     t->setVolume(mLeftVolume, mRightVolume);
     mMsecsPerFrame = 1.e3 / (float) sampleRate;
     mLatency = t->latency();
@@ -447,7 +463,7 @@
 
 void VideoEditorPlayer::VeAudioOutput::start() {
 
-    LOGV("start");
+    ALOGV("start");
     if (mTrack) {
         mTrack->setVolume(mLeftVolume, mRightVolume);
         mTrack->start();
@@ -467,7 +483,7 @@
 
     LOG_FATAL_IF(mCallback != NULL, "Don't call write if supplying a callback.");
 
-    //LOGV("write(%p, %u)", buffer, size);
+    //ALOGV("write(%p, %u)", buffer, size);
     if (mTrack) {
         snoopWrite(buffer, size);
         ssize_t ret = mTrack->write(buffer, size);
@@ -479,32 +495,32 @@
 
 void VideoEditorPlayer::VeAudioOutput::stop() {
 
-    LOGV("stop");
+    ALOGV("stop");
     if (mTrack) mTrack->stop();
 }
 
 void VideoEditorPlayer::VeAudioOutput::flush() {
 
-    LOGV("flush");
+    ALOGV("flush");
     if (mTrack) mTrack->flush();
 }
 
 void VideoEditorPlayer::VeAudioOutput::pause() {
 
-    LOGV("VeAudioOutput::pause");
+    ALOGV("VeAudioOutput::pause");
     if (mTrack) mTrack->pause();
 }
 
 void VideoEditorPlayer::VeAudioOutput::close() {
 
-    LOGV("close");
+    ALOGV("close");
     delete mTrack;
     mTrack = 0;
 }
 
 void VideoEditorPlayer::VeAudioOutput::setVolume(float left, float right) {
 
-    LOGV("setVolume(%f, %f)", left, right);
+    ALOGV("setVolume(%f, %f)", left, right);
     mLeftVolume = left;
     mRightVolume = right;
     if (mTrack) {
@@ -515,7 +531,7 @@
 // static
 void VideoEditorPlayer::VeAudioOutput::CallbackWrapper(
         int event, void *cookie, void *info) {
-    //LOGV("VeAudioOutput::callbackwrapper");
+    //ALOGV("VeAudioOutput::callbackwrapper");
     if (event != AudioTrack::EVENT_MORE_DATA) {
         return;
     }
diff --git a/libvideoeditor/lvpp/VideoEditorPlayer.h b/libvideoeditor/lvpp/VideoEditorPlayer.h
index 6b10b36..6962501 100755
--- a/libvideoeditor/lvpp/VideoEditorPlayer.h
+++ b/libvideoeditor/lvpp/VideoEditorPlayer.h
@@ -18,7 +18,7 @@
 #define ANDROID_VIDEOEDITOR_PLAYER_H
 
 #include <media/MediaPlayerInterface.h>
-#include "AudioTrack.h"
+#include <media/AudioTrack.h>
 #include "M4xVSS_API.h"
 #include "VideoEditorMain.h"
 #include "VideoEditorTools.h"
@@ -49,8 +49,8 @@
         virtual int             getSessionId();
 
         virtual status_t        open(
-                uint32_t sampleRate, int channelCount,
-                int format, int bufferCount,
+                uint32_t sampleRate, int channelCount, audio_channel_mask_t channelMask,
+                audio_format_t format, int bufferCount,
                 AudioCallback cb, void *cookie);
 
         virtual void            start();
@@ -59,7 +59,7 @@
         virtual void            flush();
         virtual void            pause();
         virtual void            close();
-        void setAudioStreamType(int streamType) { mStreamType = streamType; }
+        void setAudioStreamType(audio_stream_type_t streamType) { mStreamType = streamType; }
                 void            setVolume(float left, float right);
         virtual status_t        dump(int fd,const Vector<String16>& args) const;
 
@@ -73,7 +73,7 @@
         AudioTrack*             mTrack;
         AudioCallback           mCallback;
         void *                  mCallbackCookie;
-        int                     mStreamType;
+        audio_stream_type_t     mStreamType;
         float                   mLeftVolume;
         float                   mRightVolume;
         float                   mMsecsPerFrame;
diff --git a/libvideoeditor/lvpp/VideoEditorPreviewController.cpp b/libvideoeditor/lvpp/VideoEditorPreviewController.cpp
index 3140b46..149c4ea 100755
--- a/libvideoeditor/lvpp/VideoEditorPreviewController.cpp
+++ b/libvideoeditor/lvpp/VideoEditorPreviewController.cpp
@@ -14,13 +14,20 @@
  * limitations under the License.
  */
 
-#define LOG_NDEBUG 1
-#define LOG_TAG "VideoEditorPreviewController"
+// #define LOG_NDEBUG 0
+#define LOG_TAG "PreviewController"
+#include <utils/Log.h>
+
+#include <gui/Surface.h>
+
+#include "VideoEditorAudioPlayer.h"
+#include "PreviewRenderer.h"
+#include "M4OSA_Semaphore.h"
+#include "M4OSA_Thread.h"
 #include "VideoEditorPreviewController.h"
 
 namespace android {
 
-#define PREVIEW_THREAD_STACK_SIZE                           (65536)
 
 VideoEditorPreviewController::VideoEditorPreviewController()
     : mCurrentPlayer(0),
@@ -52,32 +59,32 @@
       mOutputVideoHeight(0),
       bStopThreadInProgress(false),
       mSemThreadWait(NULL) {
-    LOGV("VideoEditorPreviewController");
+    ALOGV("VideoEditorPreviewController");
     mRenderingMode = M4xVSS_kBlackBorders;
     mIsFiftiesEffectStarted = false;
 
-    for (int i=0; i<NBPLAYER_INSTANCES; i++) {
+    for (int i = 0; i < kTotalNumPlayerInstances; ++i) {
         mVePlayer[i] = NULL;
     }
 }
 
 VideoEditorPreviewController::~VideoEditorPreviewController() {
+    ALOGV("~VideoEditorPreviewController");
     M4OSA_UInt32 i = 0;
     M4OSA_ERR err = M4NO_ERROR;
-    LOGV("~VideoEditorPreviewController");
 
     // Stop the thread if its still running
     if(mThreadContext != NULL) {
         err = M4OSA_threadSyncStop(mThreadContext);
         if(err != M4NO_ERROR) {
-            LOGV("~VideoEditorPreviewController: error 0x%x \
+            ALOGV("~VideoEditorPreviewController: error 0x%x \
             in trying to stop thread", err);
             // Continue even if error
         }
 
         err = M4OSA_threadSyncClose(mThreadContext);
         if(err != M4NO_ERROR) {
-            LOGE("~VideoEditorPreviewController: error 0x%x \
+            ALOGE("~VideoEditorPreviewController: error 0x%x \
             in trying to close thread", (unsigned int) err);
             // Continue even if error
         }
@@ -85,10 +92,10 @@
         mThreadContext = NULL;
     }
 
-    for (int playerInst=0; playerInst<NBPLAYER_INSTANCES;
+    for (int playerInst=0; playerInst<kTotalNumPlayerInstances;
          playerInst++) {
         if(mVePlayer[playerInst] != NULL) {
-            LOGV("clearing mVePlayer %d", playerInst);
+            ALOGV("clearing mVePlayer %d", playerInst);
             mVePlayer[playerInst].clear();
         }
     }
@@ -139,7 +146,7 @@
 
     mOverlayState = OVERLAY_CLEAR;
 
-    LOGV("~VideoEditorPreviewController returns");
+    ALOGV("~VideoEditorPreviewController returns");
 }
 
 M4OSA_ERR VideoEditorPreviewController::loadEditSettings(
@@ -149,12 +156,12 @@
     M4VIFI_UInt8 *tmp = NULL;
     M4OSA_ERR err = M4NO_ERROR;
 
-    LOGV("loadEditSettings");
-    LOGV("loadEditSettings Channels = %d, sampling Freq %d",
+    ALOGV("loadEditSettings");
+    ALOGV("loadEditSettings Channels = %d, sampling Freq %d",
           bgmSettings->uiNbChannels, bgmSettings->uiSamplingFrequency  );
           bgmSettings->uiSamplingFrequency = 32000;
 
-    LOGV("loadEditSettings Channels = %d, sampling Freq %d",
+    ALOGV("loadEditSettings Channels = %d, sampling Freq %d",
           bgmSettings->uiNbChannels, bgmSettings->uiSamplingFrequency  );
     Mutex::Autolock autoLock(mLock);
 
@@ -202,14 +209,14 @@
 
     if(mClipList == NULL) {
         mNumberClipsInStoryBoard = pSettings->uiClipNumber;
-        LOGV("loadEditSettings: # of Clips = %d", mNumberClipsInStoryBoard);
+        ALOGV("loadEditSettings: # of Clips = %d", mNumberClipsInStoryBoard);
 
         mClipList = (M4VSS3GPP_ClipSettings**)M4OSA_32bitAlignedMalloc(
          sizeof(M4VSS3GPP_ClipSettings*)*pSettings->uiClipNumber, M4VS,
          (M4OSA_Char*)"LvPP, copy of pClipList");
 
         if(NULL == mClipList) {
-            LOGE("loadEditSettings: Malloc error");
+            ALOGE("loadEditSettings: Malloc error");
             return M4ERR_ALLOC;
         }
         memset((void *)mClipList,0,
@@ -224,7 +231,7 @@
 
             if(mClipList[i] == NULL) {
 
-                LOGE("loadEditSettings: Allocation error for mClipList[%d]", (int)i);
+                ALOGE("loadEditSettings: Allocation error for mClipList[%d]", (int)i);
                 return M4ERR_ALLOC;
             }
             // Copy plain structure
@@ -239,7 +246,7 @@
 
                 if(NULL == mClipList[i]->pFile)
                 {
-                    LOGE("loadEditSettings : ERROR allocating filename");
+                    ALOGE("loadEditSettings : ERROR allocating filename");
                     return M4ERR_ALLOC;
                 }
 
@@ -248,7 +255,7 @@
                  pSettings->pClipList[i]->filePathSize);
             }
             else {
-                LOGE("NULL file path");
+                ALOGE("NULL file path");
                 return M4ERR_PARAMETER;
             }
 
@@ -262,7 +269,7 @@
 
     if(mEffectsSettings == NULL) {
         mNumberEffects = pSettings->nbEffects;
-        LOGV("loadEditSettings: mNumberEffects = %d", mNumberEffects);
+        ALOGV("loadEditSettings: mNumberEffects = %d", mNumberEffects);
 
         if(mNumberEffects != 0) {
             mEffectsSettings = (M4VSS3GPP_EffectSettings*)M4OSA_32bitAlignedMalloc(
@@ -270,7 +277,7 @@
              M4VS, (M4OSA_Char*)"effects settings");
 
             if(mEffectsSettings == NULL) {
-                LOGE("loadEffectsSettings: Allocation error");
+                ALOGE("loadEffectsSettings: Allocation error");
                 return M4ERR_ALLOC;
             }
 
@@ -295,7 +302,7 @@
                      M4VS, (M4OSA_Char*)"lvpp framing buffer");
 
                     if(mEffectsSettings[i].xVSS.pFramingBuffer == NULL) {
-                        LOGE("loadEffectsSettings:Alloc error for pFramingBuf");
+                        ALOGE("loadEffectsSettings:Alloc error for pFramingBuf");
                         free(mEffectsSettings);
                         mEffectsSettings = NULL;
                         return M4ERR_ALLOC;
@@ -314,7 +321,7 @@
                          pSettings->Effects[i].xVSS.pFramingBuffer->u_height*3;
                     }
                     else {
-                        LOGE("loadEffectsSettings: wrong RGB type");
+                        ALOGE("loadEffectsSettings: wrong RGB type");
                         free(mEffectsSettings);
                         mEffectsSettings = NULL;
                         return M4ERR_PARAMETER;
@@ -324,7 +331,7 @@
                      (M4OSA_Char*)"framing buffer pac_data");
 
                     if(tmp == NULL) {
-                        LOGE("loadEffectsSettings:Alloc error pFramingBuf pac");
+                        ALOGE("loadEffectsSettings:Alloc error pFramingBuf pac");
                         free(mEffectsSettings);
                         mEffectsSettings = NULL;
                         free(mEffectsSettings[i].xVSS.pFramingBuffer);
@@ -380,7 +387,7 @@
         (M4OSA_Char*)"LvPP, copy of bgmSettings");
 
         if(NULL == mBackgroundAudioSetting) {
-            LOGE("loadEditSettings: mBackgroundAudioSetting Malloc failed");
+            ALOGE("loadEditSettings: mBackgroundAudioSetting Malloc failed");
             return M4ERR_ALLOC;
         }
 
@@ -400,7 +407,7 @@
              mBackgroundAudioSetting->pFile, M4OSA_kFileRead);
 
             if (err != M4NO_ERROR) {
-                LOGE("loadEditSettings: mBackgroundAudio PCM File open failed");
+                ALOGE("loadEditSettings: mBackgroundAudio PCM File open failed");
                 return M4ERR_PARAMETER;
             }
         }
@@ -412,7 +419,7 @@
 }
 
 M4OSA_ERR VideoEditorPreviewController::setSurface(const sp<Surface> &surface) {
-    LOGV("setSurface");
+    ALOGV("setSurface");
     Mutex::Autolock autoLock(mLock);
 
     mSurface = surface;
@@ -425,22 +432,22 @@
 
     M4OSA_ERR err = M4NO_ERROR;
     M4OSA_UInt32 i = 0, iIncrementedDuration = 0;
-    LOGV("startPreview");
+    ALOGV("startPreview");
 
     if(fromMS > (M4OSA_UInt32)toMs) {
-        LOGE("startPreview: fromMS > toMs");
+        ALOGE("startPreview: fromMS > toMs");
         return M4ERR_PARAMETER;
     }
 
     if(toMs == 0) {
-        LOGE("startPreview: toMs is 0");
+        ALOGE("startPreview: toMs is 0");
         return M4ERR_PARAMETER;
     }
 
     // If already started, then stop preview first
-    for(int playerInst=0; playerInst<NBPLAYER_INSTANCES; playerInst++) {
+    for(int playerInst=0; playerInst<kTotalNumPlayerInstances; playerInst++) {
         if(mVePlayer[playerInst] != NULL) {
-            LOGV("startPreview: stopping previously started preview playback");
+            ALOGV("startPreview: stopping previously started preview playback");
             stopPreview();
             break;
         }
@@ -448,7 +455,7 @@
 
     // If renderPreview was called previously, then delete Renderer object first
     if(mTarget != NULL) {
-        LOGV("startPreview: delete previous PreviewRenderer");
+        ALOGV("startPreview: delete previous PreviewRenderer");
         delete mTarget;
         mTarget = NULL;
     }
@@ -465,36 +472,36 @@
     getVideoSizeByResolution(mOutputVideoSize, &width, &height);
     mNativeWindowRenderer = new NativeWindowRenderer(mSurface, width, height);
 
-    LOGV("startPreview: loop = %d", loop);
+    ALOGV("startPreview: loop = %d", loop);
     mPreviewLooping = loop;
 
-    LOGV("startPreview: callBackAfterFrameCount = %d", callBackAfterFrameCount);
+    ALOGV("startPreview: callBackAfterFrameCount = %d", callBackAfterFrameCount);
     mCallBackAfterFrameCnt = callBackAfterFrameCount;
 
-    for (int playerInst=0; playerInst<NBPLAYER_INSTANCES; playerInst++) {
+    for (int playerInst=0; playerInst<kTotalNumPlayerInstances; playerInst++) {
         mVePlayer[playerInst] = new VideoEditorPlayer(mNativeWindowRenderer);
         if(mVePlayer[playerInst] == NULL) {
-            LOGE("startPreview:Error creating VideoEditorPlayer %d",playerInst);
+            ALOGE("startPreview:Error creating VideoEditorPlayer %d",playerInst);
             return M4ERR_ALLOC;
         }
-        LOGV("startPreview: object created");
+        ALOGV("startPreview: object created");
 
         mVePlayer[playerInst]->setNotifyCallback(this,(notify_callback_f)notify);
-        LOGV("startPreview: notify callback set");
+        ALOGV("startPreview: notify callback set");
 
         mVePlayer[playerInst]->loadEffectsSettings(mEffectsSettings,
          mNumberEffects);
-        LOGV("startPreview: effects settings loaded");
+        ALOGV("startPreview: effects settings loaded");
 
         mVePlayer[playerInst]->loadAudioMixSettings(mBackgroundAudioSetting);
-        LOGV("startPreview: AudioMixSettings settings loaded");
+        ALOGV("startPreview: AudioMixSettings settings loaded");
 
         mVePlayer[playerInst]->setAudioMixPCMFileHandle(mAudioMixPCMFileHandle);
-        LOGV("startPreview: AudioMixPCMFileHandle set");
+        ALOGV("startPreview: AudioMixPCMFileHandle set");
 
         mVePlayer[playerInst]->setProgressCallbackInterval(
          mCallBackAfterFrameCnt);
-        LOGV("startPreview: setProgressCallBackInterval");
+        ALOGV("startPreview: setProgressCallBackInterval");
     }
 
     mPlayerState = VePlayerIdle;
@@ -507,9 +514,9 @@
         mVideoStoryBoardTimeMsUptoFirstPreviewClip = 0;
     }
     else {
-        LOGV("startPreview: fromMS=%d", fromMS);
+        ALOGV("startPreview: fromMS=%d", fromMS);
         if(fromMS >= mClipTotalDuration) {
-            LOGE("startPreview: fromMS >= mClipTotalDuration");
+            ALOGE("startPreview: fromMS >= mClipTotalDuration");
             return M4ERR_PARAMETER;
         }
         for(i=0;i<mNumberClipsInStoryBoard;i++) {
@@ -519,7 +526,7 @@
                 // as threadProcess first increments the clip index
                 // and then processes clip in thread loop
                 mCurrentClipNumber = i-1;
-                LOGD("startPreview:mCurrentClipNumber = %d fromMS=%d",i,fromMS);
+                ALOGD("startPreview:mCurrentClipNumber = %d fromMS=%d",i,fromMS);
 
                 // Save original value
                 mFirstPreviewClipBeginTime = mClipList[i]->uiBeginCutTime;
@@ -542,12 +549,12 @@
         mVideoStoryBoardTimeMsUptoFirstPreviewClip = iIncrementedDuration;
     }
 
-    for (int playerInst=0; playerInst<NBPLAYER_INSTANCES; playerInst++) {
+    for (int playerInst=0; playerInst<kTotalNumPlayerInstances; playerInst++) {
         mVePlayer[playerInst]->setAudioMixStoryBoardParam(fromMS,
          mFirstPreviewClipBeginTime,
          mClipList[i]->ClipProperties.uiClipAudioVolumePercentage);
 
-        LOGV("startPreview:setAudioMixStoryBoardSkimTimeStamp set %d cuttime \
+        ALOGV("startPreview:setAudioMixStoryBoardSkimTimeStamp set %d cuttime \
          %d", fromMS, mFirstPreviewClipBeginTime);
     }
 
@@ -558,16 +565,16 @@
     mActivePlayerIndex = 0;
 
     if(toMs == -1) {
-        LOGV("startPreview: Preview till end of storyboard");
+        ALOGV("startPreview: Preview till end of storyboard");
         mNumberClipsToPreview = mNumberClipsInStoryBoard;
         // Save original value
         mLastPreviewClipEndTime =
          mClipList[mNumberClipsToPreview-1]->uiEndCutTime;
     }
     else {
-        LOGV("startPreview: toMs=%d", toMs);
+        ALOGV("startPreview: toMs=%d", toMs);
         if((M4OSA_UInt32)toMs > mClipTotalDuration) {
-            LOGE("startPreview: toMs > mClipTotalDuration");
+            ALOGE("startPreview: toMs > mClipTotalDuration");
             return M4ERR_PARAMETER;
         }
 
@@ -599,16 +606,16 @@
     // Open the preview process thread
     err = M4OSA_threadSyncOpen(&mThreadContext, (M4OSA_ThreadDoIt)threadProc);
     if (M4NO_ERROR != err) {
-        LOGE("VideoEditorPreviewController:M4OSA_threadSyncOpen error %d", (int) err);
+        ALOGE("VideoEditorPreviewController:M4OSA_threadSyncOpen error %d", (int) err);
         return err;
     }
 
     // Set the stacksize
     err = M4OSA_threadSyncSetOption(mThreadContext, M4OSA_ThreadStackSize,
-     (M4OSA_DataOption)PREVIEW_THREAD_STACK_SIZE);
+     (M4OSA_DataOption) kPreviewThreadStackSize);
 
     if (M4NO_ERROR != err) {
-        LOGE("VideoEditorPreviewController: threadSyncSetOption error %d", (int) err);
+        ALOGE("VideoEditorPreviewController: threadSyncSetOption error %d", (int) err);
         M4OSA_threadSyncClose(mThreadContext);
         mThreadContext = NULL;
         return err;
@@ -617,21 +624,21 @@
      // Start the thread
      err = M4OSA_threadSyncStart(mThreadContext, (M4OSA_Void*)this);
      if (M4NO_ERROR != err) {
-        LOGE("VideoEditorPreviewController: threadSyncStart error %d", (int) err);
+        ALOGE("VideoEditorPreviewController: threadSyncStart error %d", (int) err);
         M4OSA_threadSyncClose(mThreadContext);
         mThreadContext = NULL;
         return err;
     }
     bStopThreadInProgress = false;
 
-    LOGV("startPreview: process thread started");
+    ALOGV("startPreview: process thread started");
     return M4NO_ERROR;
 }
 
 M4OSA_UInt32 VideoEditorPreviewController::stopPreview() {
     M4OSA_ERR err = M4NO_ERROR;
     uint32_t lastRenderedFrameTimeMs = 0;
-    LOGV("stopPreview");
+    ALOGV("stopPreview");
 
     // Stop the thread
     if(mThreadContext != NULL) {
@@ -645,13 +652,13 @@
 
         err = M4OSA_threadSyncStop(mThreadContext);
         if(err != M4NO_ERROR) {
-            LOGV("stopPreview: error 0x%x in trying to stop thread", err);
+            ALOGV("stopPreview: error 0x%x in trying to stop thread", err);
             // Continue even if error
         }
 
         err = M4OSA_threadSyncClose(mThreadContext);
         if(err != M4NO_ERROR) {
-            LOGE("stopPreview: error 0x%x in trying to close thread", (unsigned int)err);
+            ALOGE("stopPreview: error 0x%x in trying to close thread", (unsigned int)err);
             // Continue even if error
         }
 
@@ -663,15 +670,15 @@
         Mutex::Autolock autoLock(mLockSem);
         if(mSemThreadWait != NULL) {
             err = M4OSA_semaphoreClose(mSemThreadWait);
-            LOGV("stopPreview: close semaphore returns 0x%x", err);
+            ALOGV("stopPreview: close semaphore returns 0x%x", err);
             mSemThreadWait = NULL;
         }
     }
 
-    for (int playerInst=0; playerInst<NBPLAYER_INSTANCES; playerInst++) {
+    for (int playerInst=0; playerInst<kTotalNumPlayerInstances; playerInst++) {
         if(mVePlayer[playerInst] != NULL) {
             if(mVePlayer[playerInst]->isPlaying()) {
-                LOGV("stop the player first");
+                ALOGV("stop the player first");
                 mVePlayer[playerInst]->stop();
             }
             if (playerInst == mActivePlayerIndex) {
@@ -683,13 +690,13 @@
             //stopPreview() in PreviewController
             sp<VideoEditorPlayer> temp = mVePlayer[playerInst];
             temp->acquireLock();
-            LOGV("stopPreview: clearing mVePlayer");
+            ALOGV("stopPreview: clearing mVePlayer");
             mVePlayer[playerInst].clear();
             mVePlayer[playerInst] = NULL;
             temp->releaseLock();
         }
     }
-    LOGV("stopPreview: clear audioSink and audioPlayer");
+    ALOGV("stopPreview: clear audioSink and audioPlayer");
     mVEAudioSink.clear();
     if (mVEAudioPlayer) {
         delete mVEAudioPlayer;
@@ -719,7 +726,7 @@
     mOutputVideoWidth = 0;
     mOutputVideoHeight = 0;
 
-    LOGV("stopPreview() lastRenderedFrameTimeMs %ld", lastRenderedFrameTimeMs);
+    ALOGV("stopPreview() lastRenderedFrameTimeMs %ld", lastRenderedFrameTimeMs);
     return lastRenderedFrameTimeMs;
 }
 
@@ -730,7 +737,7 @@
     VideoEditor_renderPreviewFrameStr* pFrameStr = pFrameInfo;
     M4OSA_UInt32 outputBufferWidth =0, outputBufferHeight=0;
     M4VIFI_ImagePlane planeOut[3];
-    LOGV("Inside preview clear frame");
+    ALOGV("Inside preview clear frame");
 
     Mutex::Autolock autoLock(mLock);
 
@@ -751,7 +758,7 @@
             outputBufferWidth, outputBufferHeight);
 
         if(mTarget == NULL) {
-            LOGE("renderPreviewFrame: cannot create PreviewRenderer");
+            ALOGE("renderPreviewFrame: cannot create PreviewRenderer");
             return M4ERR_ALLOC;
         }
     }
@@ -760,7 +767,7 @@
     uint8_t* outBuffer;
     size_t outBufferStride = 0;
 
-    LOGV("doMediaRendering CALL getBuffer()");
+    ALOGV("doMediaRendering CALL getBuffer()");
     mTarget->getBufferYV12(&outBuffer, &outBufferStride);
 
     // Set the output YUV420 plane to be compatible with YV12 format
@@ -818,7 +825,7 @@
             mOutputVideoWidth, mOutputVideoHeight);
 
         if(mTarget == NULL) {
-            LOGE("renderPreviewFrame: cannot create PreviewRenderer");
+            ALOGE("renderPreviewFrame: cannot create PreviewRenderer");
             return M4ERR_ALLOC;
         }
     }
@@ -831,7 +838,7 @@
                   pFrameStr->uiFrameWidth, pFrameStr->uiFrameHeight,
                   pFrameStr->videoRotationDegree);
         if (M4NO_ERROR != err) {
-            LOGE("renderPreviewFrame: cannot rotate video, err 0x%x", (unsigned int)err);
+            ALOGE("renderPreviewFrame: cannot rotate video, err 0x%x", (unsigned int)err);
             delete mTarget;
             mTarget = NULL;
             return err;
@@ -885,9 +892,9 @@
             }
             if ((index < mNumberEffects) && (pCurrEditInfo != NULL)) {
                 pCurrEditInfo->overlaySettingsIndex = index;
-                LOGV("Framing index = %d", index);
+                ALOGV("Framing index = %d", index);
             } else {
-                LOGV("No framing effects found");
+                ALOGV("No framing effects found");
             }
         }
 
@@ -898,7 +905,7 @@
              (M4OSA_Void *)pixelArray);
 
             if(err != M4NO_ERROR) {
-                LOGE("renderPreviewFrame: applyVideoEffect error 0x%x", (unsigned int)err);
+                ALOGE("renderPreviewFrame: applyVideoEffect error 0x%x", (unsigned int)err);
                 delete mTarget;
                 mTarget = NULL;
                 free(pixelArray);
@@ -914,7 +921,7 @@
              pFrameStr->uiFrameHeight, (M4OSA_Void *)pixelArray);
 
             if(err != M4NO_ERROR) {
-                LOGE("renderPreviewFrame:doImageRenderingMode error 0x%x", (unsigned int)err);
+                ALOGE("renderPreviewFrame:doImageRenderingMode error 0x%x", (unsigned int)err);
                 delete mTarget;
                 mTarget = NULL;
                 free(pixelArray);
@@ -930,7 +937,7 @@
          pFrameStr->uiFrameHeight, (M4OSA_Void *)pixelArray);
 
         if(err != M4NO_ERROR) {
-            LOGE("renderPreviewFrame: doImageRenderingMode error 0x%x", (unsigned int)err);
+            ALOGE("renderPreviewFrame: doImageRenderingMode error 0x%x", (unsigned int)err);
             delete mTarget;
             mTarget = NULL;
             free(pixelArray);
@@ -945,7 +952,7 @@
 
 M4OSA_Void VideoEditorPreviewController::setJniCallback(void* cookie,
     jni_progress_callback_fct callbackFct) {
-    //LOGV("setJniCallback");
+    //ALOGV("setJniCallback");
     mJniCookie = cookie;
     mJniCallback = callbackFct;
 }
@@ -957,21 +964,22 @@
     VideoEditorPreviewController *pController =
      (VideoEditorPreviewController *)param;
 
-    LOGV("preparePlayer: instance %d file %d", playerInstance, index);
+    ALOGV("preparePlayer: instance %d file %d", playerInstance, index);
 
-    pController->mVePlayer[playerInstance]->setDataSource(
-    (const char *)pController->mClipList[index]->pFile, NULL);
-    LOGV("preparePlayer: setDataSource instance %s",
+    const char* fileName = (const char*) pController->mClipList[index]->pFile;
+    pController->mVePlayer[playerInstance]->setDataSource(fileName, NULL);
+
+    ALOGV("preparePlayer: setDataSource instance %s",
      (const char *)pController->mClipList[index]->pFile);
 
     pController->mVePlayer[playerInstance]->setVideoSurface(
      pController->mSurface);
-    LOGV("preparePlayer: setVideoSurface");
+    ALOGV("preparePlayer: setVideoSurface");
 
     pController->mVePlayer[playerInstance]->setMediaRenderingMode(
      pController->mClipList[index]->xVSS.MediaRendering,
      pController->mOutputVideoSize);
-    LOGV("preparePlayer: setMediaRenderingMode");
+    ALOGV("preparePlayer: setMediaRenderingMode");
 
     if((M4OSA_UInt32)index == pController->mStartingClipIndex) {
         pController->mVePlayer[playerInstance]->setPlaybackBeginTime(
@@ -981,35 +989,35 @@
         pController->mVePlayer[playerInstance]->setPlaybackBeginTime(
         pController->mClipList[index]->uiBeginCutTime);
     }
-    LOGV("preparePlayer: setPlaybackBeginTime(%d)",
+    ALOGV("preparePlayer: setPlaybackBeginTime(%d)",
      pController->mClipList[index]->uiBeginCutTime);
 
     pController->mVePlayer[playerInstance]->setPlaybackEndTime(
      pController->mClipList[index]->uiEndCutTime);
-    LOGV("preparePlayer: setPlaybackEndTime(%d)",
+    ALOGV("preparePlayer: setPlaybackEndTime(%d)",
      pController->mClipList[index]->uiEndCutTime);
 
     if(pController->mClipList[index]->FileType == M4VIDEOEDITING_kFileType_ARGB8888) {
         pController->mVePlayer[playerInstance]->setImageClipProperties(
                  pController->mClipList[index]->ClipProperties.uiVideoWidth,
                  pController->mClipList[index]->ClipProperties.uiVideoHeight);
-        LOGV("preparePlayer: setImageClipProperties");
+        ALOGV("preparePlayer: setImageClipProperties");
     }
 
     pController->mVePlayer[playerInstance]->prepare();
-    LOGV("preparePlayer: prepared");
+    ALOGV("preparePlayer: prepared");
 
     if(pController->mClipList[index]->uiBeginCutTime > 0) {
         pController->mVePlayer[playerInstance]->seekTo(
          pController->mClipList[index]->uiBeginCutTime);
 
-        LOGV("preparePlayer: seekTo(%d)",
+        ALOGV("preparePlayer: seekTo(%d)",
          pController->mClipList[index]->uiBeginCutTime);
     }
     pController->mVePlayer[pController->mCurrentPlayer]->setAudioPlayer(pController->mVEAudioPlayer);
 
     pController->mVePlayer[playerInstance]->readFirstVideoFrame();
-    LOGV("preparePlayer: readFirstVideoFrame of clip");
+    ALOGV("preparePlayer: readFirstVideoFrame of clip");
 
     return err;
 }
@@ -1020,17 +1028,17 @@
     VideoEditorPreviewController *pController =
      (VideoEditorPreviewController *)param;
 
-    LOGV("inside threadProc");
+    ALOGV("inside threadProc");
     if(pController->mPlayerState == VePlayerIdle) {
         (pController->mCurrentClipNumber)++;
 
-        LOGD("threadProc: playing file index %d total clips %d",
+        ALOGD("threadProc: playing file index %d total clips %d",
          pController->mCurrentClipNumber, pController->mNumberClipsToPreview);
 
         if((M4OSA_UInt32)pController->mCurrentClipNumber >=
          pController->mNumberClipsToPreview) {
 
-            LOGD("All clips previewed");
+            ALOGD("All clips previewed");
 
             pController->mCurrentPlayedDuration = 0;
             pController->mCurrentClipDuration = 0;
@@ -1040,11 +1048,11 @@
                 pController->mCurrentClipNumber =
                  pController->mStartingClipIndex;
 
-                LOGD("Preview looping TRUE, restarting from clip index %d",
+                ALOGD("Preview looping TRUE, restarting from clip index %d",
                  pController->mCurrentClipNumber);
 
                 // Reset the story board timestamp inside the player
-                for (int playerInst=0; playerInst<NBPLAYER_INSTANCES;
+                for (int playerInst=0; playerInst<kTotalNumPlayerInstances;
                  playerInst++) {
                     pController->mVePlayer[playerInst]->resetJniCallbackTimeStamp();
                 }
@@ -1092,7 +1100,7 @@
 
         pController->mVePlayer[pController->mCurrentPlayer]->setStoryboardStartTime(
          pController->mCurrentPlayedDuration);
-        LOGV("threadProc: setStoryboardStartTime");
+        ALOGV("threadProc: setStoryboardStartTime");
 
         // Set the next clip duration for Audio mix here
         if((M4OSA_UInt32)pController->mCurrentClipNumber != pController->mStartingClipIndex) {
@@ -1102,7 +1110,7 @@
              pController->mClipList[index]->uiBeginCutTime,
              pController->mClipList[index]->ClipProperties.uiClipAudioVolumePercentage);
 
-            LOGV("threadProc: setAudioMixStoryBoardParam fromMS %d \
+            ALOGV("threadProc: setAudioMixStoryBoardParam fromMS %d \
              ClipBeginTime %d", pController->mCurrentPlayedDuration +
              pController->mClipList[index]->uiBeginCutTime,
              pController->mClipList[index]->uiBeginCutTime,
@@ -1112,12 +1120,12 @@
         pController->mActivePlayerIndex = pController->mCurrentPlayer;
 
         pController->mVePlayer[pController->mCurrentPlayer]->start();
-        LOGV("threadProc: started");
+        ALOGV("threadProc: started");
 
         pController->mPlayerState = VePlayerBusy;
 
     } else if(pController->mPlayerState == VePlayerAutoStop) {
-        LOGV("Preview completed..auto stop the player");
+        ALOGV("Preview completed..auto stop the player");
     } else if ((pController->mPlayerState == VePlayerBusy) && (pController->mPrepareReqest)) {
         // Prepare the player here
         pController->mPrepareReqest = M4OSA_FALSE;
@@ -1129,13 +1137,13 @@
         }
     } else {
         if (!pController->bStopThreadInProgress) {
-            LOGV("threadProc: state busy...wait for sem");
+            ALOGV("threadProc: state busy...wait for sem");
             if (pController->mSemThreadWait != NULL) {
                 err = M4OSA_semaphoreWait(pController->mSemThreadWait,
                  M4OSA_WAIT_FOREVER);
              }
         }
-        LOGV("threadProc: sem wait returned err = 0x%x", err);
+        ALOGV("threadProc: sem wait returned err = 0x%x", err);
     }
 
     //Always return M4NO_ERROR to ensure the thread keeps running
@@ -1152,14 +1160,14 @@
     uint32_t clipDuration = 0;
     switch (msg) {
         case MEDIA_NOP: // interface test message
-            LOGV("MEDIA_NOP");
+            ALOGV("MEDIA_NOP");
             break;
         case MEDIA_PREPARED:
-            LOGV("MEDIA_PREPARED");
+            ALOGV("MEDIA_PREPARED");
             break;
         case MEDIA_PLAYBACK_COMPLETE:
         {
-            LOGD("notify:MEDIA_PLAYBACK_COMPLETE, mCurrentClipNumber = %d",
+            ALOGD("notify:MEDIA_PLAYBACK_COMPLETE, mCurrentClipNumber = %d",
                     pController->mCurrentClipNumber);
             pController->mPlayerState = VePlayerIdle;
 
@@ -1212,7 +1220,7 @@
           // Always log errors.
           // ext1: Media framework error code.
           // ext2: Implementation dependant error code.
-            LOGE("MEDIA_ERROR; error (%d, %d)", ext1, ext2);
+            ALOGE("MEDIA_ERROR; error (%d, %d)", ext1, ext2);
             if(pController->mJniCallback != NULL) {
                 pController->mJniCallback(pController->mJniCookie,
                  MSG_TYPE_PLAYER_ERROR, &err_val);
@@ -1224,7 +1232,7 @@
             int info_val = ext2;
             // ext1: Media framework error code.
             // ext2: Implementation dependant error code.
-            //LOGW("MEDIA_INFO; info/warning (%d, %d)", ext1, ext2);
+            //ALOGW("MEDIA_INFO; info/warning (%d, %d)", ext1, ext2);
             if(pController->mJniCallback != NULL) {
                 pController->mJniCallback(pController->mJniCookie,
                  MSG_TYPE_PROGRESS_INDICATION, &info_val);
@@ -1232,23 +1240,23 @@
             break;
         }
         case MEDIA_SEEK_COMPLETE:
-            LOGV("MEDIA_SEEK_COMPLETE; Received seek complete");
+            ALOGV("MEDIA_SEEK_COMPLETE; Received seek complete");
             break;
         case MEDIA_BUFFERING_UPDATE:
-            LOGV("MEDIA_BUFFERING_UPDATE; buffering %d", ext1);
+            ALOGV("MEDIA_BUFFERING_UPDATE; buffering %d", ext1);
             break;
         case MEDIA_SET_VIDEO_SIZE:
-            LOGV("MEDIA_SET_VIDEO_SIZE; New video size %d x %d", ext1, ext2);
+            ALOGV("MEDIA_SET_VIDEO_SIZE; New video size %d x %d", ext1, ext2);
             break;
         case 0xAAAAAAAA:
-            LOGV("VIDEO PLAYBACK ALMOST over, prepare next player");
+            ALOGV("VIDEO PLAYBACK ALMOST over, prepare next player");
             // Select next player and prepare it
             // If there is a clip after this one
             if ((M4OSA_UInt32)(pController->mCurrentClipNumber+1) <
              pController->mNumberClipsToPreview) {
                 pController->mPrepareReqest = M4OSA_TRUE;
                 pController->mCurrentPlayer++;
-                if (pController->mCurrentPlayer >= NBPLAYER_INSTANCES) {
+                if (pController->mCurrentPlayer >= kTotalNumPlayerInstances) {
                     pController->mCurrentPlayer = 0;
                 }
                 // Prepare the first clip to be played
@@ -1262,7 +1270,7 @@
             break;
         case 0xBBBBBBBB:
         {
-            LOGV("VIDEO PLAYBACK, Update Overlay");
+            ALOGV("VIDEO PLAYBACK, Update Overlay");
             int overlayIndex = ext2;
             VideoEditorCurretEditInfo *pEditInfo =
                     (VideoEditorCurretEditInfo*)M4OSA_32bitAlignedMalloc(sizeof(VideoEditorCurretEditInfo),
@@ -1271,7 +1279,7 @@
             //     = 2; Clear the overlay.
             pEditInfo->overlaySettingsIndex = ext2;
             pEditInfo->clipIndex = pController->mCurrentClipNumber;
-            LOGV("pController->mCurrentClipNumber = %d",pController->mCurrentClipNumber);
+            ALOGV("pController->mCurrentClipNumber = %d",pController->mCurrentClipNumber);
             if (pController->mJniCallback != NULL) {
                 if (ext1 == 1) {
                     pController->mOverlayState = OVERLAY_UPDATE;
@@ -1287,7 +1295,7 @@
             break;
         }
         default:
-            LOGV("unrecognized message: (%d, %d, %d)", msg, ext1, ext2);
+            ALOGV("unrecognized message: (%d, %d, %d)", msg, ext1, ext2);
             break;
     }
 }
@@ -1404,7 +1412,7 @@
 status_t VideoEditorPreviewController::setPreviewFrameRenderingMode(
     M4xVSS_MediaRendering mode, M4VIDEOEDITING_VideoFrameSize outputVideoSize) {
 
-    LOGV("setMediaRenderingMode: outputVideoSize = %d", outputVideoSize);
+    ALOGV("setMediaRenderingMode: outputVideoSize = %d", outputVideoSize);
     mRenderingMode = mode;
 
     status_t err = OK;
@@ -1438,7 +1446,7 @@
     uint8_t* outBuffer;
     size_t outBufferStride = 0;
 
-    LOGV("doMediaRendering CALL getBuffer()");
+    ALOGV("doMediaRendering CALL getBuffer()");
     mTarget->getBufferYV12(&outBuffer, &outBufferStride);
 
     // Set the output YUV420 plane to be compatible with YV12 format
@@ -1451,7 +1459,7 @@
 
     err = applyRenderingMode(planeIn, planeOut, mRenderingMode);
     if(err != M4NO_ERROR) {
-        LOGE("doImageRenderingMode: applyRenderingMode returned err=0x%x", (unsigned int)err);
+        ALOGE("doImageRenderingMode: applyRenderingMode returned err=0x%x", (unsigned int)err);
     }
     return err;
 }
diff --git a/libvideoeditor/lvpp/VideoEditorPreviewController.h b/libvideoeditor/lvpp/VideoEditorPreviewController.h
index b4537ec..1756f32 100755
--- a/libvideoeditor/lvpp/VideoEditorPreviewController.h
+++ b/libvideoeditor/lvpp/VideoEditorPreviewController.h
@@ -17,87 +17,78 @@
 #ifndef ANDROID_VE_PREVIEWCONTROLLER_H
 #define ANDROID_VE_PREVIEWCONTROLLER_H
 
-#include <utils/Log.h>
 #include "VideoEditorPlayer.h"
-#include "VideoEditorAudioPlayer.h"
-#include "M4OSA_Semaphore.h"
-#include "M4OSA_Thread.h"
-#include "M4OSA_Clock.h"
-#include "M4OSA_Time.h"
-#include "M4xVSS_API.h"
-#include "M4xVSS_Internal.h"
-#include "M4VFL_transition.h"
 #include "VideoEditorTools.h"
-#include "VideoEditorThumbnailMain.h"
-#include "VideoEditorMain.h"
-
-#include "OMX_IVCommon.h"
-#include "mediaplayer.h"
-#include <surfaceflinger/Surface.h>
-#include <surfaceflinger/ISurface.h>
-#include <surfaceflinger/ISurfaceComposer.h>
-#include <surfaceflinger/SurfaceComposerClient.h>
-#include <media/stagefright/MediaBuffer.h>
-#include "PreviewRenderer.h"
-
-
-#define NBPLAYER_INSTANCES 2
 
 namespace android {
 
-typedef enum {
-    VePlayerIdle=0,
-    VePlayerBusy,
-    VePlayerAutoStop
-} VePlayerState;
-
-typedef enum {
-    OVERLAY_UPDATE = 0,
-    OVERLAY_CLEAR
-} OverlayState;
-
 // Callback mechanism from PreviewController to Jni  */
 typedef void (*jni_progress_callback_fct)(void* cookie, M4OSA_UInt32 msgType, void *argc);
 
+struct Surface;
+struct PreviewRenderer;
 
 class VideoEditorPreviewController {
 
 public:
     VideoEditorPreviewController();
-    virtual ~VideoEditorPreviewController();
+    ~VideoEditorPreviewController();
 
-    M4OSA_ERR loadEditSettings(M4VSS3GPP_EditSettings* pSettings,
-        M4xVSS_AudioMixingSettings* bgmSettings);
+    M4OSA_ERR loadEditSettings(
+            M4VSS3GPP_EditSettings* pSettings,
+            M4xVSS_AudioMixingSettings* bgmSettings);
 
-    M4OSA_ERR setSurface(const sp<Surface> &surface);
+    M4OSA_ERR setSurface(const sp<Surface>& surface);
 
-    M4OSA_ERR startPreview(M4OSA_UInt32 fromMS, M4OSA_Int32 toMs,
-        M4OSA_UInt16 callBackAfterFrameCount, M4OSA_Bool loop) ;
+    M4OSA_ERR startPreview(
+            M4OSA_UInt32 fromMS, M4OSA_Int32 toMs,
+            M4OSA_UInt16 callBackAfterFrameCount,
+            M4OSA_Bool loop) ;
 
     M4OSA_UInt32 stopPreview();
 
-    M4OSA_ERR renderPreviewFrame(const sp<Surface> &surface,
-        VideoEditor_renderPreviewFrameStr* pFrameInfo,
-        VideoEditorCurretEditInfo *pCurrEditInfo);
+    M4OSA_ERR renderPreviewFrame(
+            const sp<Surface>& surface,
+            VideoEditor_renderPreviewFrameStr* pFrameInfo,
+            VideoEditorCurretEditInfo *pCurrEditInfo);
 
-    M4OSA_ERR clearSurface(const sp<Surface> &surface,
-     VideoEditor_renderPreviewFrameStr* pFrameInfo);
+    M4OSA_ERR clearSurface(
+            const sp<Surface>& surface,
+            VideoEditor_renderPreviewFrameStr* pFrameInfo);
 
-    M4OSA_Void setJniCallback(void* cookie,
-        jni_progress_callback_fct callbackFct);
+    M4OSA_Void setJniCallback(
+            void* cookie,
+            jni_progress_callback_fct callbackFct);
 
-    status_t setPreviewFrameRenderingMode(M4xVSS_MediaRendering mode,
-        M4VIDEOEDITING_VideoFrameSize outputVideoSize);
+    status_t setPreviewFrameRenderingMode(
+            M4xVSS_MediaRendering mode,
+            M4VIDEOEDITING_VideoFrameSize outputVideoSize);
 
 private:
-    sp<VideoEditorPlayer> mVePlayer[NBPLAYER_INSTANCES];
-    int mCurrentPlayer; //Instance of the player currently being used
+    enum {
+        kTotalNumPlayerInstances = 2,
+        kPreviewThreadStackSize = 65536,
+    };
+
+    typedef enum {
+        VePlayerIdle = 0,
+        VePlayerBusy,
+        VePlayerAutoStop
+    } PlayerState;
+
+    typedef enum {
+        OVERLAY_UPDATE = 0,
+        OVERLAY_CLEAR
+    } OverlayState;
+
+    sp<VideoEditorPlayer> mVePlayer[kTotalNumPlayerInstances];
+    int mCurrentPlayer;  // player instance currently being used
     sp<Surface>  mSurface;
     mutable Mutex mLock;
     M4OSA_Context mThreadContext;
-    VePlayerState mPlayerState;
+    PlayerState mPlayerState;
     M4OSA_Bool    mPrepareReqest;
-    M4VSS3GPP_ClipSettings **mClipList; //Pointer to an array of clip settings
+    M4VSS3GPP_ClipSettings **mClipList;
     M4OSA_UInt32 mNumberClipsInStoryBoard;
     M4OSA_UInt32 mNumberClipsToPreview;
     M4OSA_UInt32 mStartingClipIndex;
@@ -137,24 +128,30 @@
     M4VIFI_UInt8*  mFrameRGBBuffer;
     M4VIFI_UInt8*  mFrameYUVBuffer;
     mutable Mutex mLockSem;
+
+
     static M4OSA_ERR preparePlayer(void* param, int playerInstance, int index);
     static M4OSA_ERR threadProc(M4OSA_Void* param);
     static void notify(void* cookie, int msg, int ext1, int ext2);
 
     void setVideoEffectType(M4VSS3GPP_VideoEffectType type, M4OSA_Bool enable);
 
-    M4OSA_ERR applyVideoEffect(M4OSA_Void * dataPtr, M4OSA_UInt32 colorFormat,
-     M4OSA_UInt32 videoWidth, M4OSA_UInt32 videoHeight,
-     M4OSA_UInt32 timeMs, M4OSA_Void* outPtr);
+    M4OSA_ERR applyVideoEffect(
+            M4OSA_Void * dataPtr, M4OSA_UInt32 colorFormat,
+            M4OSA_UInt32 videoWidth, M4OSA_UInt32 videoHeight,
+            M4OSA_UInt32 timeMs, M4OSA_Void* outPtr);
 
-    M4OSA_ERR doImageRenderingMode(M4OSA_Void * dataPtr,
-     M4OSA_UInt32 colorFormat, M4OSA_UInt32 videoWidth,
-     M4OSA_UInt32 videoHeight, M4OSA_Void* outPtr);
+    M4OSA_ERR doImageRenderingMode(
+            M4OSA_Void * dataPtr,
+            M4OSA_UInt32 colorFormat, M4OSA_UInt32 videoWidth,
+            M4OSA_UInt32 videoHeight, M4OSA_Void* outPtr);
 
+    // Don't call me!
     VideoEditorPreviewController(const VideoEditorPreviewController &);
-    VideoEditorPreviewController &operator=(const VideoEditorPreviewController &);
+    VideoEditorPreviewController &operator=(
+            const VideoEditorPreviewController &);
 };
 
 }
 
-#endif //ANDROID_VE_PREVIEWCONTROLLER_H
+#endif // ANDROID_VE_PREVIEWCONTROLLER_H
diff --git a/libvideoeditor/lvpp/VideoEditorSRC.cpp b/libvideoeditor/lvpp/VideoEditorSRC.cpp
index 365eeca..4753dd4 100755
--- a/libvideoeditor/lvpp/VideoEditorSRC.cpp
+++ b/libvideoeditor/lvpp/VideoEditorSRC.cpp
@@ -17,23 +17,26 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "VideoEditorSRC"
 
-#include "VideoEditorSRC.h"
+#include <stdlib.h>
+#include <utils/Log.h>
+#include <audio_utils/primitives.h>
+#include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/MetaData.h>
-#include <media/stagefright/MediaDebug.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaDefs.h>
-#include "AudioMixer.h"
-#include <utils/Log.h>
+#include "VideoEditorSRC.h"
+
 
 namespace android {
 
 VideoEditorSRC::VideoEditorSRC(const sp<MediaSource> &source) {
-    LOGV("VideoEditorSRC::VideoEditorSRC %p(%p)", this, source.get());
+    ALOGV("VideoEditorSRC %p(%p)", this, source.get());
+    static const int32_t kDefaultSamplingFreqencyHz = kFreq32000Hz;
     mSource = source;
     mResampler = NULL;
     mChannelCnt = 0;
     mSampleRate = 0;
-    mOutputSampleRate = DEFAULT_SAMPLING_FREQ;
+    mOutputSampleRate = kDefaultSamplingFreqencyHz;
     mStarted = false;
     mInitialTimeStampUs = -1;
     mAccuOutBufferSize  = 0;
@@ -53,18 +56,18 @@
     // Set the metadata of the output after resampling.
     mOutputFormat = new MetaData;
     mOutputFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_RAW);
-    mOutputFormat->setInt32(kKeySampleRate, DEFAULT_SAMPLING_FREQ);
-    mOutputFormat->setInt32(kKeyChannelCount, 2);
+    mOutputFormat->setInt32(kKeySampleRate, kDefaultSamplingFreqencyHz);
+    mOutputFormat->setInt32(kKeyChannelCount, 2);  // always stereo
 }
 
 VideoEditorSRC::~VideoEditorSRC() {
-    LOGV("VideoEditorSRC::~VideoEditorSRC %p(%p)", this, mSource.get());
+    ALOGV("~VideoEditorSRC %p(%p)", this, mSource.get());
     stop();
 }
 
 status_t VideoEditorSRC::start(MetaData *params) {
+    ALOGV("start %p(%p)", this, mSource.get());
     CHECK(!mStarted);
-    LOGV("VideoEditorSRC:start %p(%p)", this, mSource.get());
 
     // Set resampler if required
     checkAndSetResampler();
@@ -78,17 +81,21 @@
 }
 
 status_t VideoEditorSRC::stop() {
-    LOGV("VideoEditorSRC::stop %p(%p)", this, mSource.get());
-    if (!mStarted) return OK;
+    ALOGV("stop %p(%p)", this, mSource.get());
+    if (!mStarted) {
+        return OK;
+    }
+
     if (mBuffer) {
         mBuffer->release();
         mBuffer = NULL;
     }
     mSource->stop();
-    if(mResampler != NULL) {
+    if (mResampler != NULL) {
         delete mResampler;
         mResampler = NULL;
     }
+
     mStarted = false;
     mInitialTimeStampUs = -1;
     mAccuOutBufferSize = 0;
@@ -98,13 +105,13 @@
 }
 
 sp<MetaData> VideoEditorSRC::getFormat() {
-    LOGV("VideoEditorSRC::getFormat");
+    ALOGV("getFormat");
     return mOutputFormat;
 }
 
 status_t VideoEditorSRC::read(
         MediaBuffer **buffer_out, const ReadOptions *options) {
-    LOGV("VideoEditorSRC::read %p(%p)", this, mSource.get());
+    ALOGV("read %p(%p)", this, mSource.get());
     *buffer_out = NULL;
 
     if (!mStarted) {
@@ -116,17 +123,23 @@
         int64_t seekTimeUs;
         ReadOptions::SeekMode mode = ReadOptions::SEEK_PREVIOUS_SYNC;
         if (options && options->getSeekTo(&seekTimeUs, &mode)) {
-            LOGV("read Seek %lld", seekTimeUs);
+            ALOGV("read Seek %lld", seekTimeUs);
             mSeekTimeUs = seekTimeUs;
             mSeekMode = mode;
         }
 
         // We ask for 1024 frames in output
-        const size_t outFrameCnt = 1024;
         // resampler output is always 2 channels and 32 bits
-        int32_t *pTmpBuffer = (int32_t *)calloc(1, outFrameCnt * 2 * sizeof(int32_t));
+        const size_t kOutputFrameCount = 1024;
+        const size_t kBytes = kOutputFrameCount * 2 * sizeof(int32_t);
+        int32_t *pTmpBuffer = (int32_t *)calloc(1, kBytes);
+        if (!pTmpBuffer) {
+            ALOGE("calloc failed to allocate memory: %d bytes", kBytes);
+            return NO_MEMORY;
+        }
+
         // Resample to target quality
-        mResampler->resample(pTmpBuffer, outFrameCnt, this);
+        mResampler->resample(pTmpBuffer, kOutputFrameCount, this);
 
         if (mStopPending) {
             stop();
@@ -142,13 +155,13 @@
         }
 
         // Create a new MediaBuffer
-        int32_t outBufferSize = outFrameCnt * 2 * sizeof(int16_t);
+        int32_t outBufferSize = kOutputFrameCount * 2 * sizeof(int16_t);
         MediaBuffer* outBuffer = new MediaBuffer(outBufferSize);
 
         // Convert back to 2 channels and 16 bits
-        AudioMixer::ditherAndClamp(
+        ditherAndClamp(
                 (int32_t *)((uint8_t*)outBuffer->data() + outBuffer->range_offset()),
-                pTmpBuffer, outFrameCnt);
+                pTmpBuffer, kOutputFrameCount);
         free(pTmpBuffer);
 
         // Compute and set the new timestamp
@@ -165,7 +178,7 @@
         MediaBuffer *aBuffer;
         status_t err = mSource->read(&aBuffer, options);
         if (err != OK) {
-            LOGV("read returns err = %d", err);
+            ALOGV("read returns err = %d", err);
         }
 
         if (err == INFO_FORMAT_CHANGED) {
@@ -185,9 +198,8 @@
     return OK;
 }
 
-status_t VideoEditorSRC::getNextBuffer(AudioBufferProvider::Buffer *pBuffer,
-                                       int64_t pts) {
-    LOGV("Requesting %d, chan = %d", pBuffer->frameCount, mChannelCnt);
+status_t VideoEditorSRC::getNextBuffer(AudioBufferProvider::Buffer *pBuffer, int64_t pts) {
+    ALOGV("getNextBuffer %d, chan = %d", pBuffer->frameCount, mChannelCnt);
     uint32_t done = 0;
     uint32_t want = pBuffer->frameCount * mChannelCnt * 2;
     pBuffer->raw = malloc(want);
@@ -198,7 +210,7 @@
             // if we seek, reset the initial time stamp and accumulated time
             ReadOptions options;
             if (mSeekTimeUs >= 0) {
-                LOGV("%p cacheMore_l Seek requested = %lld", this, mSeekTimeUs);
+                ALOGV("%p cacheMore_l Seek requested = %lld", this, mSeekTimeUs);
                 ReadOptions::SeekMode mode = mSeekMode;
                 options.setSeekTo(mSeekTimeUs, mode);
                 mSeekTimeUs = -1;
@@ -215,7 +227,7 @@
             }
 
             if (err == INFO_FORMAT_CHANGED) {
-                LOGV("getNextBuffer: source read returned INFO_FORMAT_CHANGED");
+                ALOGV("getNextBuffer: source read returned INFO_FORMAT_CHANGED");
                 // At this point we cannot switch to a new AudioResampler because
                 // we are in a callback called by the AudioResampler itself. So
                 // just remember the fact that the format has changed, and let
@@ -226,7 +238,7 @@
 
             // EOS or some other error
             if (err != OK) {
-                LOGV("EOS or some err: %d", err);
+                ALOGV("EOS or some err: %d", err);
                 // We cannot call stop() here because stop() will release the
                 // AudioResampler, and we are in a callback of the AudioResampler.
                 // So just remember the fact and let read() call stop().
@@ -240,7 +252,7 @@
                 int64_t curTS;
                 sp<MetaData> from = mBuffer->meta_data();
                 from->findInt64(kKeyTime, &curTS);
-                LOGV("setting mInitialTimeStampUs to %lld", mInitialTimeStampUs);
+                ALOGV("setting mInitialTimeStampUs to %lld", mInitialTimeStampUs);
                 mInitialTimeStampUs = curTS;
             }
         }
@@ -266,20 +278,22 @@
     }
 
     pBuffer->frameCount = done / (mChannelCnt * 2);
-    LOGV("getNextBuffer done %d", pBuffer->frameCount);
+    ALOGV("getNextBuffer done %d", pBuffer->frameCount);
     return OK;
 }
 
 
 void VideoEditorSRC::releaseBuffer(AudioBufferProvider::Buffer *pBuffer) {
+    ALOGV("releaseBuffer: %p", pBuffers);
     free(pBuffer->raw);
     pBuffer->raw = NULL;
     pBuffer->frameCount = 0;
 }
 
 void VideoEditorSRC::checkAndSetResampler() {
-    LOGV("checkAndSetResampler");
+    ALOGV("checkAndSetResampler");
 
+    static const uint16_t kUnityGain = 0x1000;
     sp<MetaData> format = mSource->getFormat();
     const char *mime;
     CHECK(format->findCString(kKeyMIMEType, &mime));
@@ -301,7 +315,7 @@
     }
 
     if (mSampleRate != mOutputSampleRate || mChannelCnt != 2) {
-        LOGV("Resampling required (in rate %d, out rate %d, in channel %d)",
+        ALOGV("Resampling required (%d => %d Hz, # channels = %d)",
             mSampleRate, mOutputSampleRate, mChannelCnt);
 
         mResampler = AudioResampler::create(
@@ -311,9 +325,10 @@
                         AudioResampler::DEFAULT);
         CHECK(mResampler);
         mResampler->setSampleRate(mSampleRate);
-        mResampler->setVolume(UNITY_GAIN, UNITY_GAIN);
+        mResampler->setVolume(kUnityGain, kUnityGain);
     } else {
-        LOGV("Resampling not required (%d = %d)", mSampleRate, mOutputSampleRate);
+        ALOGV("Resampling not required (%d => %d Hz, # channels = %d)",
+            mSampleRate, mOutputSampleRate, mChannelCnt);
     }
 }
 
diff --git a/libvideoeditor/lvpp/VideoEditorSRC.h b/libvideoeditor/lvpp/VideoEditorSRC.h
index 87784f6..2b7e9b6 100755
--- a/libvideoeditor/lvpp/VideoEditorSRC.h
+++ b/libvideoeditor/lvpp/VideoEditorSRC.h
@@ -14,13 +14,9 @@
  * limitations under the License.
  */
 
+
 #include <stdint.h>
-
-#include <utils/RefBase.h>
-#include <utils/threads.h>
-
 #include <media/stagefright/MediaSource.h>
-
 #include "AudioBufferProvider.h"
 #include "AudioResampler.h"
 
@@ -30,61 +26,62 @@
 
 class VideoEditorSRC : public MediaSource , public AudioBufferProvider {
 
-    public:
-        VideoEditorSRC(
-            const sp<MediaSource> &source);
+public:
+    VideoEditorSRC(const sp<MediaSource> &source);
 
-        virtual status_t start (MetaData *params = NULL);
-        virtual status_t stop();
-        virtual sp<MetaData> getFormat();
-        virtual status_t read (
-            MediaBuffer **buffer, const ReadOptions *options = NULL);
+    virtual status_t start (MetaData *params = NULL);
+    virtual status_t stop();
+    virtual sp<MetaData> getFormat();
+    virtual status_t read (
+                MediaBuffer **buffer, const ReadOptions *options = NULL);
 
-        virtual status_t getNextBuffer(Buffer* buffer, int64_t pts);
-        virtual void releaseBuffer(Buffer* buffer);
+    virtual status_t getNextBuffer(Buffer* buffer, int64_t pts);
+    virtual void releaseBuffer(Buffer* buffer);
 
-    enum { //Sampling freq
-        kFreq8000Hz = 8000,
+    // Sampling freqencies
+    enum {
+        kFreq8000Hz  = 8000,
         kFreq11025Hz = 11025,
         kFreq12000Hz = 12000,
         kFreq16000Hz = 16000,
         kFreq22050Hz = 22050,
-        kFreq240000Hz = 24000,
+        kFreq24000Hz = 24000,
         kFreq32000Hz = 32000,
-        kFreq44100 = 44100,
-        kFreq48000 = 48000,
+        kFreq44100Hz = 44100,
+        kFreq48000Hz = 48000,
     };
 
-    static const uint16_t UNITY_GAIN = 0x1000;
-    static const int32_t DEFAULT_SAMPLING_FREQ = (int32_t)kFreq32000Hz;
+protected :
+    virtual ~VideoEditorSRC();
 
-    protected :
-        virtual ~VideoEditorSRC();
-    private:
+private:
+    AudioResampler *mResampler;
+    sp<MediaSource> mSource;
+    int mChannelCnt;
+    int mSampleRate;
+    int32_t mOutputSampleRate;
+    bool mStarted;
+    sp<MetaData> mOutputFormat;
 
-        VideoEditorSRC();
-        VideoEditorSRC &operator=(const VideoEditorSRC &);
+    MediaBuffer* mBuffer;
+    int32_t mLeftover;
+    bool mFormatChanged;
+    bool mStopPending;
 
-        void checkAndSetResampler();
+    int64_t mInitialTimeStampUs;
+    int64_t mAccuOutBufferSize;
 
-        AudioResampler        *mResampler;
-        sp<MediaSource>      mSource;
-        int mChannelCnt;
-        int mSampleRate;
-        int32_t mOutputSampleRate;
-        bool mStarted;
-        sp<MetaData> mOutputFormat;
+    int64_t mSeekTimeUs;
+    ReadOptions::SeekMode mSeekMode;
 
-        MediaBuffer* mBuffer;
-        int32_t mLeftover;
-        bool mFormatChanged;
-        bool mStopPending;
+    VideoEditorSRC();
+    void checkAndSetResampler();
 
-        int64_t mInitialTimeStampUs;
-        int64_t mAccuOutBufferSize;
+    // Don't call me
+    VideoEditorSRC(const VideoEditorSRC&);
+    VideoEditorSRC &operator=(const VideoEditorSRC &);
 
-        int64_t mSeekTimeUs;
-        ReadOptions::SeekMode mSeekMode;
 };
 
 } //namespce android
+
diff --git a/libvideoeditor/lvpp/VideoEditorTools.cpp b/libvideoeditor/lvpp/VideoEditorTools.cpp
index f1a6c58..2b9fd60 100755
--- a/libvideoeditor/lvpp/VideoEditorTools.cpp
+++ b/libvideoeditor/lvpp/VideoEditorTools.cpp
@@ -2861,7 +2861,7 @@
 
     M4OSA_UInt8 *pTmpData = (M4OSA_UInt8*) M4OSA_32bitAlignedMalloc(frameSize_argb, M4VS, (M4OSA_Char*)"Image argb data");
     if(pTmpData == M4OSA_NULL) {
-        LOGE("Failed to allocate memory for Image clip");
+        ALOGE("Failed to allocate memory for Image clip");
         return M4ERR_ALLOC;
     }
 
@@ -2870,14 +2870,14 @@
 
     if((lerr != M4NO_ERROR) || (lImageFileFp == M4OSA_NULL))
     {
-        LOGE("LVPreviewController: Can not open the file ");
+        ALOGE("LVPreviewController: Can not open the file ");
         free(pTmpData);
         return M4ERR_FILE_NOT_FOUND;
     }
     lerr = M4OSA_fileReadData(lImageFileFp, (M4OSA_MemAddr8)pTmpData, &frameSize_argb);
     if(lerr != M4NO_ERROR)
     {
-        LOGE("LVPreviewController: can not read the data ");
+        ALOGE("LVPreviewController: can not read the data ");
         M4OSA_fileReadClose(lImageFileFp);
         free(pTmpData);
         return lerr;
@@ -2888,7 +2888,7 @@
     rgbPlane.pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize, M4VS, (M4OSA_Char*)"Image clip RGB888 data");
     if(rgbPlane.pac_data == M4OSA_NULL)
     {
-        LOGE("Failed to allocate memory for Image clip");
+        ALOGE("Failed to allocate memory for Image clip");
         free(pTmpData);
         return M4ERR_ALLOC;
     }
@@ -2904,7 +2904,7 @@
 #ifdef FILE_DUMP
     FILE *fp = fopen("/sdcard/Input/test_rgb.raw", "wb");
     if(fp == NULL)
-        LOGE("Errors file can not be created");
+        ALOGE("Errors file can not be created");
     else {
         fwrite(rgbPlane.pac_data, frameSize, 1, fp);
         fclose(fp);
@@ -2940,15 +2940,15 @@
         //err = M4VIFI_BGR888toYUV420(M4OSA_NULL, &rgbPlane, yuvPlane);
         if(err != M4NO_ERROR)
         {
-            LOGE("error when converting from RGB to YUV: 0x%x\n", (unsigned int)err);
+            ALOGE("error when converting from RGB to YUV: 0x%x\n", (unsigned int)err);
         }
         free(rgbPlane.pac_data);
 
-        //LOGE("RGB to YUV done");
+        //ALOGE("RGB to YUV done");
 #ifdef FILE_DUMP
         FILE *fp1 = fopen("/sdcard/Input/test_yuv.raw", "wb");
         if(fp1 == NULL)
-            LOGE("Errors file can not be created");
+            ALOGE("Errors file can not be created");
         else {
             fwrite(yuvPlane[0].pac_data, yuvPlane[0].u_height * yuvPlane[0].u_width * 1.5, 1, fp1);
             fclose(fp1);
@@ -3167,7 +3167,7 @@
 
     }
     else {
-        LOGV(" YUV buffer reuse");
+        ALOGV(" YUV buffer reuse");
         framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(
             3*sizeof(M4VIFI_ImagePlane), M4VS, (M4OSA_Char*)"YUV");
 
@@ -3218,7 +3218,7 @@
      colorEffect);
 
     if(err != M4NO_ERROR) {
-        LOGV("M4VSS3GPP_externalVideoEffectColor(%d) error %d",
+        ALOGV("M4VSS3GPP_externalVideoEffectColor(%d) error %d",
             colorEffect, err);
 
         if(NULL != buffer1) {
@@ -3249,7 +3249,7 @@
          lum_factor, NULL);
 
     if(err != M4NO_ERROR) {
-        LOGE("M4VFL_modifyLumaWithScale(%d) error %d", videoEffect, (int)err);
+        ALOGE("M4VFL_modifyLumaWithScale(%d) error %d", videoEffect, (int)err);
 
         if(NULL != buffer1) {
             free(buffer1);
@@ -3287,7 +3287,7 @@
      (M4OSA_Char*)("lvpp finalOutputBuffer"));
 
     if(finalOutputBuffer == NULL) {
-        LOGE("applyEffectsAndRenderingMode: malloc error");
+        ALOGE("applyEffectsAndRenderingMode: malloc error");
         return M4ERR_ALLOC;
     }
 
@@ -3296,7 +3296,7 @@
      ((params->videoHeight*params->videoWidth*3)>>1), M4VS, (M4OSA_Char*)("lvpp colorBuffer"));
 
     if(tempOutputBuffer == NULL) {
-        LOGE("applyEffectsAndRenderingMode: malloc error tempOutputBuffer");
+        ALOGE("applyEffectsAndRenderingMode: malloc error tempOutputBuffer");
         if(NULL != finalOutputBuffer) {
             free(finalOutputBuffer);
             finalOutputBuffer = NULL;
@@ -3420,7 +3420,7 @@
              M4xVSS_kVideoEffectType_Fifties);
 
             if(err != M4NO_ERROR) {
-                LOGE("M4VSS3GPP_externalVideoEffectFifties error 0x%x", (unsigned int)err);
+                ALOGE("M4VSS3GPP_externalVideoEffectFifties error 0x%x", (unsigned int)err);
 
                 if(NULL != finalOutputBuffer) {
                     free(finalOutputBuffer);
@@ -3545,7 +3545,7 @@
         }
     }
 
-    LOGV("doMediaRendering CALL getBuffer()");
+    ALOGV("doMediaRendering CALL getBuffer()");
     // Set the output YUV420 plane to be compatible with YV12 format
     // W & H even
     // YVU instead of YUV
@@ -3570,7 +3570,7 @@
         tempOutputBuffer = M4OSA_NULL;
     }
     if(err != M4NO_ERROR) {
-        LOGV("doVideoPostProcessing: applyRenderingMode returned err=%d",err);
+        ALOGV("doVideoPostProcessing: applyRenderingMode returned err=%d",err);
         return err;
     }
     return M4NO_ERROR;
@@ -3583,11 +3583,11 @@
     uint32_t frameWidth, frameHeight;
 
     if (pWidth == NULL) {
-        LOGE("getVideoFrameSizeByResolution invalid pointer for pWidth");
+        ALOGE("getVideoFrameSizeByResolution invalid pointer for pWidth");
         return android::BAD_VALUE;
     }
     if (pHeight == NULL) {
-        LOGE("getVideoFrameSizeByResolution invalid pointer for pHeight");
+        ALOGE("getVideoFrameSizeByResolution invalid pointer for pHeight");
         return android::BAD_VALUE;
     }
 
@@ -3663,7 +3663,7 @@
             break;
 
         default:
-            LOGE("Unsupported video resolution %d.", resolution);
+            ALOGE("Unsupported video resolution %d.", resolution);
             return android::BAD_VALUE;
     }
 
@@ -3835,7 +3835,7 @@
     M4VIFI_ImagePlane planeIn[3], planeOut[3];
 
     if (pBuffer == M4OSA_NULL) {
-        LOGE("applyVideoRotation: NULL input frame");
+        ALOGE("applyVideoRotation: NULL input frame");
         return M4ERR_PARAMETER;
     }
     M4OSA_UInt8* outPtr = (M4OSA_UInt8 *)M4OSA_32bitAlignedMalloc(
@@ -3857,7 +3857,6 @@
     switch(rotation) {
         case 90:
             M4VIFI_Rotate90RightYUV420toYUV420(M4OSA_NULL, planeIn, planeOut);
-            memset(pBuffer, 0, (width*height*1.5));
             memcpy(pBuffer, (void *)outPtr, (width*height*1.5));
             break;
 
@@ -3868,12 +3867,11 @@
 
         case 270:
             M4VIFI_Rotate90LeftYUV420toYUV420(M4OSA_NULL, planeIn, planeOut);
-            memset(pBuffer, 0, (width*height*1.5));
             memcpy(pBuffer, (void *)outPtr, (width*height*1.5));
             break;
 
         default:
-            LOGE("invalid rotation param %d", (int)rotation);
+            ALOGE("invalid rotation param %d", (int)rotation);
             err = M4ERR_PARAMETER;
             break;
     }
diff --git a/libvideoeditor/vss/mcs/src/Android.mk b/libvideoeditor/vss/mcs/src/Android.mk
index cef7217..3d6cf20 100755
--- a/libvideoeditor/vss/mcs/src/Android.mk
+++ b/libvideoeditor/vss/mcs/src/Android.mk
@@ -43,7 +43,7 @@
     $(TOP)/frameworks/media/libvideoeditor/vss/mcs/inc \
     $(TOP)/frameworks/media/libvideoeditor/vss/common/inc \
     $(TOP)/frameworks/media/libvideoeditor/vss/stagefrightshells/inc \
-    $(TOP)/frameworks/base/include/media/stagefright/openmax
+    $(TOP)/frameworks/native/include/media/openmax
 
 LOCAL_SHARED_LIBRARIES += libdl
 
diff --git a/libvideoeditor/vss/mcs/src/M4MCS_API.c b/libvideoeditor/vss/mcs/src/M4MCS_API.c
index d75a495..c056ef0 100755
--- a/libvideoeditor/vss/mcs/src/M4MCS_API.c
+++ b/libvideoeditor/vss/mcs/src/M4MCS_API.c
@@ -1337,21 +1337,8 @@
     M4OSA_DEBUG_IF2((M4OSA_NULL == instance), M4ERR_PARAMETER,
         "H264MCS_ProcessSPS_PPS: instance is M4OSA_NULL");
 
-    switch( instance->m_pDecoderSpecificInfo[4] & 0x3 )
-    {
-        case 0:
-            instance->m_Num_Bytes_NALUnitLength = 1;
-            break;
-
-        case 1:
-            instance->m_Num_Bytes_NALUnitLength = 2;
-            break;
-
-        case 3:
-            //Note: Current code supports only this...
-            instance->m_Num_Bytes_NALUnitLength = 4;
-            break;
-    }
+    instance->m_Num_Bytes_NALUnitLength =
+            (instance->m_pDecoderSpecificInfo[4] & 0x03) + 1;
 
     instance->m_encoder_SPS_Cnt = instance->m_pDecoderSpecificInfo[5] & 0x1F;
 
@@ -1428,6 +1415,12 @@
                 break;
         }
 
+        if (nal_size == 0) {
+            M4OSA_TRACE1_1("0 size nal unit at line %d", __LINE__);
+            frame_size += instance->m_Num_Bytes_NALUnitLength;
+            continue;
+        }
+
         p_bs->bitPos = 0;
         p_bs->lastTotalBits = 0;
         p_bs->numBitsInBuffer =
@@ -8866,7 +8859,7 @@
     }
 
 
-    if ((pC->EncodingVideoFormat = M4ENCODER_kNULL)
+    if ((pC->EncodingVideoFormat == M4ENCODER_kNULL)
         && (pC->bLastDecodedFrameCTS == M4OSA_FALSE)
         && (pC->uiBeginCutTime > 0)) {
 
diff --git a/libvideoeditor/vss/src/Android.mk b/libvideoeditor/vss/src/Android.mk
index 2001f0d..1beb162 100755
--- a/libvideoeditor/vss/src/Android.mk
+++ b/libvideoeditor/vss/src/Android.mk
@@ -52,7 +52,7 @@
 
 LOCAL_MODULE_TAGS := optional
 
-LOCAL_SHARED_LIBRARIES := libcutils libutils
+LOCAL_SHARED_LIBRARIES := libcutils libutils libaudioutils
 
 LOCAL_STATIC_LIBRARIES := \
     libvideoeditor_osal \
@@ -62,14 +62,16 @@
     libvideoeditor_stagefrightshells
 
 LOCAL_C_INCLUDES += \
-    $(TOP)/frameworks/base/include \
+    $(TOP)/system/media/audio_utils/include \
+    $(TOP)/system/media/audio_effects/include \
     $(TOP)/frameworks/media/libvideoeditor/osal/inc \
     $(TOP)/frameworks/media/libvideoeditor/vss/inc \
     $(TOP)/frameworks/media/libvideoeditor/vss/mcs/inc \
     $(TOP)/frameworks/media/libvideoeditor/vss/common/inc \
     $(TOP)/frameworks/media/libvideoeditor/vss/stagefrightshells/inc \
+    $(TOP)/frameworks/native/services/audioflinger \
     $(TOP)/frameworks/base/services/audioflinger \
-    $(TOP)/frameworks/base/include/media/stagefright/openmax
+    $(TOP)/frameworks/native/include/media/openmax
 
 
 LOCAL_SHARED_LIBRARIES += libdl
diff --git a/libvideoeditor/vss/src/M4VD_EXTERNAL_BitstreamParser.c b/libvideoeditor/vss/src/M4VD_EXTERNAL_BitstreamParser.c
index d1015d5..cc67e72 100755
--- a/libvideoeditor/vss/src/M4VD_EXTERNAL_BitstreamParser.c
+++ b/libvideoeditor/vss/src/M4VD_EXTERNAL_BitstreamParser.c
@@ -498,14 +498,14 @@
     }
 
     if ((DSISize <= index) || (pDSI == M4OSA_NULL)) {
-        LOGE("getAVCProfileAndLevel: DSI is invalid");
+        ALOGE("getAVCProfileAndLevel: DSI is invalid");
         *pProfile = M4VIDEOEDITING_VIDEO_UNKNOWN_PROFILE;
         *pLevel = M4VIDEOEDITING_VIDEO_UNKNOWN_LEVEL;
         return M4ERR_PARAMETER;
     }
 
     constraintSet3 = (pDSI[index+2] & 0x10);
-    LOGV("getAVCProfileAndLevel profile_byte %d, level_byte: %d constrain3flag",
+    ALOGV("getAVCProfileAndLevel profile_byte %d, level_byte: %d constrain3flag",
           pDSI[index+1], pDSI[index+3], constraintSet3);
 
     switch (pDSI[index+1]) {
@@ -586,7 +586,7 @@
         default:
             *pLevel = M4VIDEOEDITING_VIDEO_UNKNOWN_LEVEL;
     }
-    LOGV("getAVCProfileAndLevel profile %ld level %ld", *pProfile, *pLevel);
+    ALOGV("getAVCProfileAndLevel profile %ld level %ld", *pProfile, *pLevel);
     return M4NO_ERROR;
 }
 
@@ -596,17 +596,17 @@
     M4OSA_UInt16 index = 7; /* the 5th and 6th bytes contain the level and profile */
 
     if ((pProfile == M4OSA_NULL) || (pLevel == M4OSA_NULL)) {
-        LOGE("getH263ProfileAndLevel invalid pointer for pProfile");
+        ALOGE("getH263ProfileAndLevel invalid pointer for pProfile");
         return M4ERR_PARAMETER;
     }
 
     if ((DSISize < index) || (pDSI == M4OSA_NULL)) {
-        LOGE("getH263ProfileAndLevel: DSI is invalid");
+        ALOGE("getH263ProfileAndLevel: DSI is invalid");
         *pProfile = M4VIDEOEDITING_VIDEO_UNKNOWN_PROFILE;
         *pLevel = M4VIDEOEDITING_VIDEO_UNKNOWN_LEVEL;
         return M4ERR_PARAMETER;
     }
-    LOGV("getH263ProfileAndLevel profile_byte %d, level_byte",
+    ALOGV("getH263ProfileAndLevel profile_byte %d, level_byte",
           pDSI[6], pDSI[5]);
     /* get the H263 level */
     switch (pDSI[5]) {
@@ -670,7 +670,7 @@
         default:
            *pProfile = M4VIDEOEDITING_VIDEO_UNKNOWN_PROFILE;
     }
-    LOGV("getH263ProfileAndLevel profile %ld level %ld", *pProfile, *pLevel);
+    ALOGV("getH263ProfileAndLevel profile %ld level %ld", *pProfile, *pLevel);
     return M4NO_ERROR;
 }
 
@@ -682,7 +682,7 @@
     if ((pProfile == M4OSA_NULL) || (pLevel == M4OSA_NULL)) {
         return M4ERR_PARAMETER;
     }
-    LOGV("getMPEG4ProfileAndLevel profileAndLevel %d", profileAndLevel);
+    ALOGV("getMPEG4ProfileAndLevel profileAndLevel %d", profileAndLevel);
     length = sizeof(mpeg4ProfileLevelTable) /sizeof(mpeg4ProfileLevelTable[0]);
     *pProfile = M4VIDEOEDITING_VIDEO_UNKNOWN_PROFILE;
     *pLevel = M4VIDEOEDITING_VIDEO_UNKNOWN_LEVEL;
@@ -693,6 +693,6 @@
             break;
         }
     }
-    LOGV("getMPEG4ProfileAndLevel profile %ld level %ld", *pProfile, *pLevel);
+    ALOGV("getMPEG4ProfileAndLevel profile %ld level %ld", *pProfile, *pLevel);
     return M4NO_ERROR;
 }
diff --git a/libvideoeditor/vss/src/M4VSS3GPP_EditVideo.c b/libvideoeditor/vss/src/M4VSS3GPP_EditVideo.c
index ee69fd3..59d57e5 100755
--- a/libvideoeditor/vss/src/M4VSS3GPP_EditVideo.c
+++ b/libvideoeditor/vss/src/M4VSS3GPP_EditVideo.c
@@ -1182,8 +1182,6 @@
         * Render Clip1 */
         if( pC->pC1->isRenderDup == M4OSA_FALSE )
         {
-            pC->bIssecondClip = M4OSA_FALSE;
-
             err = M4VSS3GPP_intRenderFrameWithEffect(pC, pC->pC1, ts, M4OSA_TRUE,
                                                 pTempPlaneClip1, pTemp1,
                                                 pPlaneOut);
@@ -3594,9 +3592,11 @@
         }
 
         if (bIsClip1 == M4OSA_TRUE) {
+            pC->bIssecondClip = M4OSA_FALSE;
             numEffects = pC->nbActiveEffects;
         } else {
             numEffects = pC->nbActiveEffects1;
+            pC->bIssecondClip = M4OSA_TRUE;
         }
 
         if ( numEffects > 0) {
diff --git a/libvideoeditor/vss/src/VideoEditorResampler.cpp b/libvideoeditor/vss/src/VideoEditorResampler.cpp
index cd752ce..38dffb7 100755
--- a/libvideoeditor/vss/src/VideoEditorResampler.cpp
+++ b/libvideoeditor/vss/src/VideoEditorResampler.cpp
@@ -15,6 +15,7 @@
  */
 
 #define LOG_NDEBUG 1
+#include <audio_utils/primitives.h>
 #include <utils/Log.h>
 #include "AudioMixer.h"
 #include "VideoEditorResampler.h"
@@ -34,7 +35,7 @@
      kFreq12000Hz = 12000,
      kFreq16000Hz = 16000,
      kFreq22050Hz = 22050,
-     kFreq240000Hz = 24000,
+     kFreq24000Hz = 24000,
      kFreq32000Hz = 32000,
      kFreq44100 = 44100,
      kFreq48000 = 48000,
@@ -52,8 +53,7 @@
 
 #define MAX_SAMPLEDURATION_FOR_CONVERTION 40 //ms
 
-status_t VideoEditorResampler::getNextBuffer(AudioBufferProvider::Buffer *pBuffer,
-                                             int64_t pts) {
+status_t VideoEditorResampler::getNextBuffer(AudioBufferProvider::Buffer *pBuffer, int64_t pts) {
 
     uint32_t dataSize = pBuffer->frameCount * this->nbChannels * sizeof(int16_t);
     mTmpInBuffer = (int16_t*)malloc(dataSize);
@@ -162,7 +162,7 @@
     context->mResampler->resample((int32_t *)pTmpBuffer,
        (size_t)outFrameCount, (VideoEditorResampler *)resamplerContext);
     // Convert back to 16 bits
-    AudioMixer::ditherAndClamp((int32_t*)out, pTmpBuffer, outFrameCount);
+    ditherAndClamp((int32_t*)out, pTmpBuffer, outFrameCount);
     free(pTmpBuffer);
     pTmpBuffer = NULL;
 }
diff --git a/libvideoeditor/vss/stagefrightshells/inc/VideoEditorUtils.h b/libvideoeditor/vss/stagefrightshells/inc/VideoEditorUtils.h
index 3e61291..a21b21d 100755
--- a/libvideoeditor/vss/stagefrightshells/inc/VideoEditorUtils.h
+++ b/libvideoeditor/vss/stagefrightshells/inc/VideoEditorUtils.h
@@ -44,7 +44,7 @@
 #define VIDEOEDITOR_CHECK(test, errCode) \
 { \
     if( !(test) ) { \
-        LOGV("!!! %s (L%d) check failed : " #test ", yields error 0x%.8x", \
+        ALOGV("!!! %s (L%d) check failed : " #test ", yields error 0x%.8x", \
             __FILE__, __LINE__, errCode); \
         err = (errCode); \
         goto cleanUp; \
diff --git a/libvideoeditor/vss/stagefrightshells/src/Android.mk b/libvideoeditor/vss/stagefrightshells/src/Android.mk
index 398750e..5b8e270 100755
--- a/libvideoeditor/vss/stagefrightshells/src/Android.mk
+++ b/libvideoeditor/vss/stagefrightshells/src/Android.mk
@@ -18,6 +18,7 @@
 include $(CLEAR_VARS)
 
 LOCAL_SRC_FILES:= \
+    MediaBufferPuller.cpp \
     VideoEditorVideoDecoder.cpp \
     VideoEditorAudioDecoder.cpp \
     VideoEditorMp3Reader.cpp \
@@ -29,22 +30,20 @@
 
 LOCAL_C_INCLUDES += \
     $(TOP)/frameworks/base/core/jni \
-    $(TOP)/frameworks/base/include \
-    $(TOP)/frameworks/base/include/media \
     $(TOP)/frameworks/base/media/libmediaplayerservice \
     $(TOP)/frameworks/base/media/libstagefright \
     $(TOP)/frameworks/base/media/libstagefright/include \
     $(TOP)/frameworks/base/media/libstagefright/rtsp \
     $(JNI_H_INCLUDE) \
     $(call include-path-for, corecg graphics) \
-    $(TOP)/frameworks/base/include/media/stagefright/openmax \
+    $(TOP)/frameworks/native/include/media/openmax \
     $(TOP)/frameworks/base/core/jni/mediaeditor \
     $(TOP)/frameworks/media/libvideoeditor/vss/inc \
     $(TOP)/frameworks/media/libvideoeditor/vss/common/inc \
     $(TOP)/frameworks/media/libvideoeditor/vss/mcs/inc \
     $(TOP)/frameworks/media/libvideoeditor/lvpp \
     $(TOP)/frameworks/media/libvideoeditor/osal/inc \
-    $(TOP)/frameworks/media/libvideoeditor/include \
+    $(TOP)/frameworks/native/include/media/editor \
     $(TOP)/frameworks/media/libvideoeditor/vss/stagefrightshells/inc
 
 LOCAL_SHARED_LIBRARIES := \
@@ -55,6 +54,7 @@
     libmedia \
     libbinder \
     libstagefright \
+    libstagefright_foundation \
     libstagefright_omx \
     libgui \
     libvideoeditorplayer
diff --git a/libvideoeditor/vss/stagefrightshells/src/MediaBufferPuller.cpp b/libvideoeditor/vss/stagefrightshells/src/MediaBufferPuller.cpp
new file mode 100644
index 0000000..acc8268
--- /dev/null
+++ b/libvideoeditor/vss/stagefrightshells/src/MediaBufferPuller.cpp
@@ -0,0 +1,179 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaBufferPuller"
+#include <utils/Log.h>
+
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaDefs.h>
+#include "MediaBufferPuller.h"
+
+namespace android {
+
+
+MediaBufferPuller::MediaBufferPuller(const sp<MediaSource>& source)
+    : mSource(source),
+      mAskToStart(false),
+      mAskToStop(false),
+      mAcquireStopped(false),
+      mReleaseStopped(false),
+      mSourceError(OK) {
+
+    androidCreateThread(acquireThreadStart, this);
+    androidCreateThread(releaseThreadStart, this);
+}
+
+MediaBufferPuller::~MediaBufferPuller() {
+    stop();
+}
+
+bool MediaBufferPuller::hasMediaSourceReturnedError() const {
+    Mutex::Autolock autolock(mLock);
+    return ((mSourceError != OK) ? true : false);
+}
+void MediaBufferPuller::start() {
+    Mutex::Autolock autolock(mLock);
+    mAskToStart = true;
+    mAcquireCond.signal();
+    mReleaseCond.signal();
+}
+
+void MediaBufferPuller::stop() {
+    Mutex::Autolock autolock(mLock);
+    mAskToStop = true;
+    mAcquireCond.signal();
+    mReleaseCond.signal();
+    while (!mAcquireStopped || !mReleaseStopped) {
+        mUserCond.wait(mLock);
+    }
+
+    // Release remaining buffers
+    for (size_t i = 0; i < mBuffers.size(); i++) {
+        mBuffers.itemAt(i)->release();
+    }
+
+    for (size_t i = 0; i < mReleaseBuffers.size(); i++) {
+        mReleaseBuffers.itemAt(i)->release();
+    }
+
+    mBuffers.clear();
+    mReleaseBuffers.clear();
+}
+
+MediaBuffer* MediaBufferPuller::getBufferNonBlocking() {
+    Mutex::Autolock autolock(mLock);
+    if (mBuffers.empty()) {
+        return NULL;
+    } else {
+        MediaBuffer* b = mBuffers.itemAt(0);
+        mBuffers.removeAt(0);
+        return b;
+    }
+}
+
+MediaBuffer* MediaBufferPuller::getBufferBlocking() {
+    Mutex::Autolock autolock(mLock);
+    while (mBuffers.empty() && !mAcquireStopped) {
+        mUserCond.wait(mLock);
+    }
+
+    if (mBuffers.empty()) {
+        return NULL;
+    } else {
+        MediaBuffer* b = mBuffers.itemAt(0);
+        mBuffers.removeAt(0);
+        return b;
+    }
+}
+
+void MediaBufferPuller::putBuffer(MediaBuffer* buffer) {
+    Mutex::Autolock autolock(mLock);
+    mReleaseBuffers.push(buffer);
+    mReleaseCond.signal();
+}
+
+int MediaBufferPuller::acquireThreadStart(void* arg) {
+    MediaBufferPuller* self = (MediaBufferPuller*)arg;
+    self->acquireThreadFunc();
+    return 0;
+}
+
+int MediaBufferPuller::releaseThreadStart(void* arg) {
+    MediaBufferPuller* self = (MediaBufferPuller*)arg;
+    self->releaseThreadFunc();
+    return 0;
+}
+
+void MediaBufferPuller::acquireThreadFunc() {
+    mLock.lock();
+
+    // Wait for the start signal
+    while (!mAskToStart && !mAskToStop) {
+        mAcquireCond.wait(mLock);
+    }
+
+    // Loop until we are asked to stop, or there is nothing more to read
+    while (!mAskToStop) {
+        MediaBuffer* pBuffer;
+        mLock.unlock();
+        status_t result = mSource->read(&pBuffer, NULL);
+        mLock.lock();
+        mSourceError = result;
+        if (result != OK) {
+            break;
+        }
+        mBuffers.push(pBuffer);
+        mUserCond.signal();
+    }
+
+    mAcquireStopped = true;
+    mUserCond.signal();
+    mLock.unlock();
+}
+
+void MediaBufferPuller::releaseThreadFunc() {
+    mLock.lock();
+
+    // Wait for the start signal
+    while (!mAskToStart && !mAskToStop) {
+        mReleaseCond.wait(mLock);
+    }
+
+    // Loop until we are asked to stop
+    while (1) {
+        if (mReleaseBuffers.empty()) {
+            if (mAskToStop) {
+                break;
+            } else {
+                mReleaseCond.wait(mLock);
+                continue;
+            }
+        }
+        MediaBuffer* pBuffer = mReleaseBuffers.itemAt(0);
+        mReleaseBuffers.removeAt(0);
+        mLock.unlock();
+        pBuffer->release();
+        mLock.lock();
+    }
+
+    mReleaseStopped = true;
+    mUserCond.signal();
+    mLock.unlock();
+}
+
+};  // namespace android
diff --git a/libvideoeditor/vss/stagefrightshells/src/MediaBufferPuller.h b/libvideoeditor/vss/stagefrightshells/src/MediaBufferPuller.h
new file mode 100644
index 0000000..ed72a53
--- /dev/null
+++ b/libvideoeditor/vss/stagefrightshells/src/MediaBufferPuller.h
@@ -0,0 +1,90 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MEDIA_BUFFER_PULLER_H
+#define _MEDIA_BUFFER_PULLER_H
+
+#include <utils/threads.h>
+#include <utils/Vector.h>
+
+
+namespace android {
+
+struct MediaSource;
+struct MediaBuffer;
+
+/*
+ * An object of this class can pull a list of media buffers
+ * from a MediaSource repeatedly. The user can then get the
+ * buffers from that list.
+ */
+struct MediaBufferPuller {
+public:
+    MediaBufferPuller(const sp<MediaSource>& source);
+    ~MediaBufferPuller();
+
+    // Start to build up the list of the buffers.
+    void start();
+
+    // Release the list of the available buffers, and stop
+    // pulling buffers from the MediaSource.
+    void stop();
+
+    // Get a buffer from the list. If there is no buffer available
+    // at the time this method is called, NULL is returned.
+    MediaBuffer* getBufferBlocking();
+
+    // Get a buffer from the list. If there is no buffer available
+    // at the time this method is called, it blocks waiting for
+    // a buffer to become available or until stop() is called.
+    MediaBuffer* getBufferNonBlocking();
+
+    // Add a buffer to the end of the list available media buffers
+    void putBuffer(MediaBuffer* buffer);
+
+    // Check whether the source returned an error or not.
+    bool hasMediaSourceReturnedError() const;
+
+private:
+    static int acquireThreadStart(void* arg);
+    void acquireThreadFunc();
+
+    static int releaseThreadStart(void* arg);
+    void releaseThreadFunc();
+
+    sp<MediaSource> mSource;
+    Vector<MediaBuffer*> mBuffers;
+    Vector<MediaBuffer*> mReleaseBuffers;
+
+    mutable Mutex mLock;
+    Condition mUserCond;     // for the user of this class
+    Condition mAcquireCond;  // for the acquire thread
+    Condition mReleaseCond;  // for the release thread
+
+    bool mAskToStart;      // Asks the threads to start
+    bool mAskToStop;       // Asks the threads to stop
+    bool mAcquireStopped;  // The acquire thread has stopped
+    bool mReleaseStopped;  // The release thread has stopped
+    status_t mSourceError; // Error returned by MediaSource read
+
+    // Don't call me!
+    MediaBufferPuller(const MediaBufferPuller&);
+    MediaBufferPuller& operator=(const MediaBufferPuller&);
+};
+
+}  // namespace android
+
+#endif  // _MEDIA_BUFFER_PULLER_H
diff --git a/libvideoeditor/vss/stagefrightshells/src/VideoEditor3gpReader.cpp b/libvideoeditor/vss/stagefrightshells/src/VideoEditor3gpReader.cpp
index 3c27673..c4c4d84 100755
--- a/libvideoeditor/vss/stagefrightshells/src/VideoEditor3gpReader.cpp
+++ b/libvideoeditor/vss/stagefrightshells/src/VideoEditor3gpReader.cpp
@@ -46,13 +46,13 @@
 
 #include "ESDS.h"
 #include "utils/Log.h"
+#include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/MediaBufferGroup.h>
 #include <media/stagefright/DataSource.h>
 #include <media/stagefright/FileSource.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaExtractor.h>
-#include <media/stagefright/MediaDebug.h>
 #include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MetaData.h>
 
@@ -387,7 +387,7 @@
     M4OSA_ERR err = M4NO_ERROR;
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext , M4ERR_PARAMETER);
 
-    LOGV("VideoEditor3gpReader_create begin");
+    ALOGV("VideoEditor3gpReader_create begin");
 
     /* Context allocation & initialization */
     SAFE_MALLOC(pC, VideoEditor3gpReader_Context, 1, "VideoEditor3gpReader");
@@ -411,11 +411,11 @@
 
 cleanUp:
     if ( M4NO_ERROR == err ) {
-        LOGV("VideoEditor3gpReader_create no error");
+        ALOGV("VideoEditor3gpReader_create no error");
     } else {
-        LOGV("VideoEditor3gpReader_create ERROR 0x%X", err);
+        ALOGV("VideoEditor3gpReader_create ERROR 0x%X", err);
     }
-    LOGV("VideoEditor3gpReader_create end ");
+    ALOGV("VideoEditor3gpReader_create end ");
     return err;
 }
 
@@ -433,7 +433,7 @@
     M4OSA_ERR err = M4NO_ERROR;
     VideoEditor3gpReader_Context* pC = M4OSA_NULL;
 
-    LOGV("VideoEditor3gpReader_destroy begin");
+    ALOGV("VideoEditor3gpReader_destroy begin");
 
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
     pC = (VideoEditor3gpReader_Context*)pContext;
@@ -447,14 +447,14 @@
 
 cleanUp:
     if( M4NO_ERROR == err ) {
-        LOGV("VideoEditor3gpReader_destroy no error");
+        ALOGV("VideoEditor3gpReader_destroy no error");
     }
     else
     {
-        LOGV("VideoEditor3gpReader_destroy ERROR 0x%X", err);
+        ALOGV("VideoEditor3gpReader_destroy ERROR 0x%X", err);
     }
 
-    LOGV("VideoEditor3gpReader_destroy end ");
+    ALOGV("VideoEditor3gpReader_destroy end ");
     return err;
 }
 
@@ -477,19 +477,19 @@
     VideoEditor3gpReader_Context* pC = (VideoEditor3gpReader_Context*)pContext;
     M4OSA_ERR err = M4NO_ERROR;
 
-    LOGV("VideoEditor3gpReader_open start ");
+    ALOGV("VideoEditor3gpReader_open start ");
     M4OSA_DEBUG_IF1((M4OSA_NULL == pC),  M4ERR_PARAMETER,
         "VideoEditor3gpReader_open: invalid context pointer");
     M4OSA_DEBUG_IF1((M4OSA_NULL == pFileDescriptor), M4ERR_PARAMETER,
         "VideoEditor3gpReader_open: invalid pointer pFileDescriptor");
 
-    LOGV("VideoEditor3gpReader_open Datasource start %s",
+    ALOGV("VideoEditor3gpReader_open Datasource start %s",
         (char*)pFileDescriptor);
     //pC->mDataSource = DataSource::CreateFromURI((char*)pFileDescriptor);
     pC->mDataSource = new FileSource ((char*)pFileDescriptor);
 
     if (pC->mDataSource == NULL) {
-        LOGV("VideoEditor3gpReader_open Datasource error");
+        ALOGV("VideoEditor3gpReader_open Datasource error");
         return M4ERR_PARAMETER;
     }
 
@@ -497,7 +497,7 @@
         MEDIA_MIMETYPE_CONTAINER_MPEG4);
 
     if (pC->mExtractor == NULL) {
-        LOGV("VideoEditor3gpReader_open extractor error");
+        ALOGV("VideoEditor3gpReader_open extractor error");
         return M4ERR_PARAMETER;
     }
 
@@ -505,11 +505,11 @@
     sp<MetaData> meta = pC->mExtractor->getMetaData();
     meta->findInt32(kKeyIsDRM, &isDRMProtected);
     if (isDRMProtected) {
-        LOGV("VideoEditorMp3Reader_open error - DRM Protected");
+        ALOGV("VideoEditorMp3Reader_open error - DRM Protected");
         return M4ERR_UNSUPPORTED_MEDIA_TYPE;
     }
 
-    LOGV("VideoEditor3gpReader_open end ");
+    ALOGV("VideoEditor3gpReader_open end ");
     return err;
 }
 
@@ -529,13 +529,13 @@
     M4_AccessUnit *pAU;
     M4OSA_ERR err = M4NO_ERROR;
 
-    LOGV("VideoEditor3gpReader_close begin");
+    ALOGV("VideoEditor3gpReader_close begin");
 
     M4OSA_DEBUG_IF1((M4OSA_NULL == pC), M4ERR_PARAMETER,
         "VideoEditor3gpReader_close: invalid context pointer");
 
     if (pC->mAudioStreamHandler) {
-        LOGV("VideoEditor3gpReader_close Audio");
+        ALOGV("VideoEditor3gpReader_close Audio");
 
         if (M4OSA_NULL != pC->mAudioStreamHandler->m_pDecoderSpecificInfo) {
             free(pC->mAudioStreamHandler->\
@@ -575,7 +575,7 @@
         pC->mAudioSource.clear();
     }
     if (pC->mVideoStreamHandler) {
-        LOGV("VideoEditor3gpReader_close Video ");
+        ALOGV("VideoEditor3gpReader_close Video ");
 
         if(M4OSA_NULL != pC->mVideoStreamHandler->m_pDecoderSpecificInfo) {
             free(pC->mVideoStreamHandler->\
@@ -607,7 +607,7 @@
     pC->mExtractor.clear();
     pC->mDataSource.clear();
 
-    LOGV("VideoEditor3gpReader_close end");
+    ALOGV("VideoEditor3gpReader_close end");
     return err;
 }
 
@@ -635,7 +635,7 @@
     VideoEditor3gpReader_Context* pC = (VideoEditor3gpReader_Context*)context;
     M4OSA_ERR err = M4NO_ERROR;
 
-    LOGV("VideoEditor3gpReader_getOption begin %d", optionId);
+    ALOGV("VideoEditor3gpReader_getOption begin %d", optionId);
 
     M4OSA_DEBUG_IF1((M4OSA_NULL == pC), M4ERR_PARAMETER,
         "invalid context pointer");
@@ -645,23 +645,23 @@
     switch (optionId) {
     case M4READER_kOptionID_Duration:
         {
-            LOGV("VideoEditor3gpReader_getOption duration %d",pC->mMaxDuration);
+            ALOGV("VideoEditor3gpReader_getOption duration %d",pC->mMaxDuration);
             *(M4OSA_Time*)pValue = pC->mMaxDuration;
         }
         break;
     case M4READER_kOptionID_Version:
         /* not used */
-        LOGV("VideoEditor3gpReader_getOption: M4READER_kOptionID_Version");
+        ALOGV("VideoEditor3gpReader_getOption: M4READER_kOptionID_Version");
         break;
 
     case M4READER_kOptionID_Copyright:
         /* not used */
-        LOGV(">>>>>>>   M4READER_kOptionID_Copyright");
+        ALOGV(">>>>>>>   M4READER_kOptionID_Copyright");
         break;
 
     case M4READER_kOptionID_CreationTime:
         /* not used */
-        LOGV("VideoEditor3gpReader_getOption M4READER_kOptionID_CreationTime");
+        ALOGV("VideoEditor3gpReader_getOption M4READER_kOptionID_CreationTime");
     break;
 
     case M4READER_kOptionID_Bitrate:
@@ -672,13 +672,13 @@
                 M4OSA_UInt32 ui32Tmp = (M4OSA_UInt32)pC->mMaxDuration;
                 *pBitrate = (M4OSA_UInt32)(pC->mFileSize * 8000.0 / pC->mMaxDuration);
             }
-            LOGV("VideoEditor3gpReader_getOption bitrate %ld", *pBitrate);
+            ALOGV("VideoEditor3gpReader_getOption bitrate %ld", *pBitrate);
         }
     break;
     case M4READER_3GP_kOptionID_H263Properties:
         {
             if(M4OSA_NULL == pC->mVideoStreamHandler) {
-                LOGV("VideoEditor3gpReader_getOption no videoStream retrieved");
+                ALOGV("VideoEditor3gpReader_getOption no videoStream retrieved");
 
                 err = M4ERR_NO_VIDEO_STREAM_RETRIEVED_YET;
                 break;
@@ -686,7 +686,7 @@
             if((M4DA_StreamTypeVideoH263 != pC->mVideoStreamHandler->\
                 m_streamType) || (pC->mVideoStreamHandler->\
                 m_decoderSpecificInfoSize < 7)) {
-                LOGV("VideoEditor3gpReader_getOption DSI Size %d",
+                ALOGV("VideoEditor3gpReader_getOption DSI Size %d",
                     pC->mVideoStreamHandler->m_decoderSpecificInfoSize);
 
                 err = M4ERR_VIDEO_NOT_H263;
@@ -699,50 +699,50 @@
                 pC->mVideoStreamHandler->m_pDecoderSpecificInfo[6];
             ((M4READER_3GP_H263Properties *)pValue)->uiLevel =
                 pC->mVideoStreamHandler->m_pDecoderSpecificInfo[5];
-            LOGV("VideoEditor3gpReader_getOption M4READER_3GP_kOptionID_\
+            ALOGV("VideoEditor3gpReader_getOption M4READER_3GP_kOptionID_\
             H263Properties end");
         }
         break;
     case M4READER_3GP_kOptionID_PurpleLabsDrm:
-        LOGV("VideoEditor3gpReaderOption M4READER_3GP_kOptionID_PurpleLabsDrm");
+        ALOGV("VideoEditor3gpReaderOption M4READER_3GP_kOptionID_PurpleLabsDrm");
         /* not used */
         break;
 
     case M4READER_kOptionID_GetNumberOfAudioAu:
         /* not used */
-        LOGV("VideoEditor3gpReadeOption M4READER_kOptionID_GetNumberOfAudioAu");
+        ALOGV("VideoEditor3gpReadeOption M4READER_kOptionID_GetNumberOfAudioAu");
     break;
 
     case M4READER_kOptionID_GetNumberOfVideoAu:
         /* not used */
-        LOGV("VideoEditor3gpReader_getOption :GetNumberOfVideoAu");
+        ALOGV("VideoEditor3gpReader_getOption :GetNumberOfVideoAu");
     break;
 
     case M4READER_kOptionID_GetMetadata:
         /* not used */
-        LOGV("VideoEditor3gpReader_getOption M4READER_kOptionID_GetMetadata");
+        ALOGV("VideoEditor3gpReader_getOption M4READER_kOptionID_GetMetadata");
     break;
 
     case M4READER_kOptionID_3gpFtypBox:
         /* used only for SEMC */
-        LOGV("VideoEditor3gpReader_getOption M4READER_kOptionID_3gpFtypBox");
+        ALOGV("VideoEditor3gpReader_getOption M4READER_kOptionID_3gpFtypBox");
         err = M4ERR_BAD_OPTION_ID; //check this
         break;
 
 #ifdef OPTIONID_GET_NEXT_VIDEO_CTS
     case M4READER_3GP_kOptionID_getNextVideoCTS:
         /* not used */
-        LOGV("VideoEditor3gpReader_getOption: getNextVideoCTS");
+        ALOGV("VideoEditor3gpReader_getOption: getNextVideoCTS");
         break;
 #endif
     default:
         {
             err = M4ERR_BAD_OPTION_ID;
-            LOGV("VideoEditor3gpReader_getOption M4ERR_BAD_OPTION_ID");
+            ALOGV("VideoEditor3gpReader_getOption M4ERR_BAD_OPTION_ID");
         }
         break;
     }
-    LOGV("VideoEditor3gpReader_getOption end: optionID: x%x", optionId);
+    ALOGV("VideoEditor3gpReader_getOption end: optionID: x%x", optionId);
     return err;
 }
 /**
@@ -770,7 +770,7 @@
     M4OSA_DEBUG_IF1((M4OSA_NULL == pValue), M4ERR_PARAMETER,
         "invalid value pointer");
 
-    LOGV("VideoEditor3gpReader_setOption begin %d",optionId);
+    ALOGV("VideoEditor3gpReader_setOption begin %d",optionId);
 
     switch(optionId) {
         case M4READER_kOptionID_SetOsaFileReaderFctsPtr:
@@ -790,12 +790,12 @@
 
         default:
         {
-            LOGV("VideoEditor3gpReader_setOption: returns M4ERR_BAD_OPTION_ID");
+            ALOGV("VideoEditor3gpReader_setOption: returns M4ERR_BAD_OPTION_ID");
             err = M4ERR_BAD_OPTION_ID;
         }
         break;
     }
-    LOGV("VideoEditor3gpReader_setOption end ");
+    ALOGV("VideoEditor3gpReader_setOption end ");
     return err;
 }
 /**
@@ -822,7 +822,7 @@
     M4OSA_DEBUG_IF1((pAccessUnit == 0),    M4ERR_PARAMETER,
         "VideoEditor3gpReader_fillAuStruct: invalid pointer to M4_AccessUnit");
 
-    LOGV("VideoEditor3gpReader_fillAuStruct begin");
+    ALOGV("VideoEditor3gpReader_fillAuStruct begin");
 
     /* Initialize pAccessUnit structure */
     pAccessUnit->m_size         = 0;
@@ -834,7 +834,7 @@
     pAccessUnit->m_streamID     = pStreamHandler->m_streamId;
     pAccessUnit->m_structSize   = sizeof(M4_AccessUnit);
 
-    LOGV("VideoEditor3gpReader_fillAuStruct end");
+    ALOGV("VideoEditor3gpReader_fillAuStruct end");
     return M4NO_ERROR;
 }
 
@@ -864,14 +864,14 @@
     M4OSA_DEBUG_IF1((pTime == 0), M4ERR_PARAMETER,
         "VideoEditor3gpReader_jump: invalid time pointer");
 
-    LOGV("VideoEditor3gpReader_jump begin");
+    ALOGV("VideoEditor3gpReader_jump begin");
 
     if (*pTime == (pStreamHandler->m_duration)) {
         *pTime -= 1;
     }
     time64 = (M4OSA_Time)*pTime;
 
-    LOGV("VideoEditor3gpReader_jump time us %ld ", time64);
+    ALOGV("VideoEditor3gpReader_jump time us %ld ", time64);
 
     if ((pC->mAudioStreamHandler != M4OSA_NULL) &&
             (pStreamHandler->m_streamId == pC->mAudioStreamHandler->m_streamId))
@@ -883,7 +883,7 @@
         time64 = time64 * 1000; /* Convert the time into micro sec */
         pC->mAudioSeeking = M4OSA_TRUE;
         pC->mAudioSeekTime = time64;
-        LOGV("VideoEditor3gpReader_jump AUDIO time us %ld ", time64);
+        ALOGV("VideoEditor3gpReader_jump AUDIO time us %ld ", time64);
     } else if ((pC->mVideoStreamHandler != M4OSA_NULL) &&
             (pStreamHandler->m_streamId == pC->mVideoStreamHandler->m_streamId))
             {
@@ -894,17 +894,17 @@
         time64 = time64 * 1000; /* Convert the time into micro sec */
         pC->mVideoSeeking = M4OSA_TRUE;
         pC->mVideoSeekTime = time64;
-        LOGV("VideoEditor3gpReader_jump VIDEO time us %ld ", time64);
+        ALOGV("VideoEditor3gpReader_jump VIDEO time us %ld ", time64);
     } else {
-        LOGV("VideoEditor3gpReader_jump passed StreamHandler is not known\n");
+        ALOGV("VideoEditor3gpReader_jump passed StreamHandler is not known\n");
         return M4ERR_PARAMETER;
     }
     time64 = time64 / 1000; /* Convert the time into milli sec */
-    LOGV("VideoEditor3gpReader_jump time ms before seekset %ld ", time64);
+    ALOGV("VideoEditor3gpReader_jump time ms before seekset %ld ", time64);
 
     *pTime = (M4OSA_Int32)time64;
 
-    LOGV("VideoEditor3gpReader_jump end");
+    ALOGV("VideoEditor3gpReader_jump end");
     err = M4NO_ERROR;
     return err;
 }
@@ -931,21 +931,21 @@
     M4OSA_DEBUG_IF1((pStreamHandler == 0), M4ERR_PARAMETER,
         "VideoEditor3gpReader_reset: invalid pointer to M4_StreamHandler");
 
-    LOGV("VideoEditor3gpReader_reset begin");
+    ALOGV("VideoEditor3gpReader_reset begin");
 
     if (pStreamHandler == (M4_StreamHandler*)pC->mAudioStreamHandler) {
         pAu = &pC->mAudioAu;
     } else if (pStreamHandler == (M4_StreamHandler*)pC->mVideoStreamHandler) {
         pAu = &pC->mVideoAu;
     } else {
-        LOGV("VideoEditor3gpReader_reset passed StreamHandler is not known\n");
+        ALOGV("VideoEditor3gpReader_reset passed StreamHandler is not known\n");
         return M4ERR_PARAMETER;
     }
 
     pAu->CTS = time64;
     pAu->DTS = time64;
 
-    LOGV("VideoEditor3gpReader_reset end");
+    ALOGV("VideoEditor3gpReader_reset end");
     return err;
 }
 
@@ -982,13 +982,13 @@
     M4OSA_DEBUG_IF1((pAccessUnit == 0),    M4ERR_PARAMETER,
         "VideoEditor3gpReader_getNextAu: invalid pointer to M4_AccessUnit");
 
-    LOGV("VideoEditor3gpReader_getNextAu begin");
+    ALOGV("VideoEditor3gpReader_getNextAu begin");
 
     if (pStreamHandler == (M4_StreamHandler*)pC->mAudioStreamHandler) {
-        LOGV("VideoEditor3gpReader_getNextAu audio stream");
+        ALOGV("VideoEditor3gpReader_getNextAu audio stream");
         pAu = &pC->mAudioAu;
         if (pC->mAudioSeeking == M4OSA_TRUE) {
-            LOGV("VideoEditor3gpReader_getNextAu audio seek time: %ld",
+            ALOGV("VideoEditor3gpReader_getNextAu audio seek time: %ld",
                 pC->mAudioSeekTime);
             options.setSeekTo(pC->mAudioSeekTime);
             pC->mAudioSource->read(&mMediaBuffer, &options);
@@ -999,7 +999,7 @@
             pC->mAudioSeeking = M4OSA_FALSE;
             flag = M4OSA_TRUE;
         } else {
-            LOGV("VideoEditor3gpReader_getNextAu audio no seek:");
+            ALOGV("VideoEditor3gpReader_getNextAu audio no seek:");
             pC->mAudioSource->read(&mMediaBuffer, &options);
             if (mMediaBuffer != NULL) {
                 mMediaBuffer->meta_data()->findInt64(kKeyTime,
@@ -1007,27 +1007,27 @@
             }
         }
     } else if (pStreamHandler == (M4_StreamHandler*)pC->mVideoStreamHandler) {
-        LOGV("VideoEditor3gpReader_getNextAu video steram ");
+        ALOGV("VideoEditor3gpReader_getNextAu video steram ");
         pAu = &pC->mVideoAu;
         if(pC->mVideoSeeking == M4OSA_TRUE) {
             flag = M4OSA_TRUE;
-            LOGV("VideoEditor3gpReader_getNextAu seek: %ld",pC->mVideoSeekTime);
+            ALOGV("VideoEditor3gpReader_getNextAu seek: %ld",pC->mVideoSeekTime);
             options.setSeekTo(pC->mVideoSeekTime,
                 MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
             do
             {
                 if (mMediaBuffer != NULL) {
-                    LOGV("VideoEditor3gpReader_getNextAu free the MediaBuffer");
+                    ALOGV("VideoEditor3gpReader_getNextAu free the MediaBuffer");
                     mMediaBuffer->release();
                 }
                 error = pC->mVideoSource->read(&mMediaBuffer, &options);
-                LOGV("VE3gpReader_getNextAu MediaBuffer %x , error %d",
+                ALOGV("VE3gpReader_getNextAu MediaBuffer %x , error %d",
                     mMediaBuffer, error);
                 if (mMediaBuffer != NULL)
                 {
                     if (mMediaBuffer->meta_data()->findInt32(kKeyIsSyncFrame,
                         &i32Tmp) && i32Tmp) {
-                            LOGV("SYNC FRAME FOUND--%d", i32Tmp);
+                            ALOGV("SYNC FRAME FOUND--%d", i32Tmp);
                         pAu->attribute = AU_RAP;
                     }
                     else {
@@ -1041,17 +1041,17 @@
                 options.clearSeekTo();
             } while(tempTime64 < pC->mVideoSeekTime);
 
-            LOGV("VE3gpReader_getNextAu: video  time with seek  = %lld:",
+            ALOGV("VE3gpReader_getNextAu: video  time with seek  = %lld:",
                 tempTime64);
             pC->mVideoSeeking = M4OSA_FALSE;
         } else {
-            LOGV("VideoEditor3gpReader_getNextAu video no seek:");
+            ALOGV("VideoEditor3gpReader_getNextAu video no seek:");
             pC->mVideoSource->read(&mMediaBuffer, &options);
 
             if(mMediaBuffer != NULL) {
                 if (mMediaBuffer->meta_data()->findInt32(kKeyIsSyncFrame,
                     &i32Tmp) && i32Tmp) {
-                    LOGV("SYNC FRAME FOUND--%d", i32Tmp);
+                    ALOGV("SYNC FRAME FOUND--%d", i32Tmp);
                     pAu->attribute = AU_RAP;
                 }
                 else {
@@ -1059,14 +1059,14 @@
                 }
                 mMediaBuffer->meta_data()->findInt64(kKeyTime,
                     (int64_t*)&tempTime64);
-                LOGV("VE3gpReader_getNextAu: video no seek time = %lld:",
+                ALOGV("VE3gpReader_getNextAu: video no seek time = %lld:",
                     tempTime64);
             }else {
-                LOGV("VE3gpReader_getNextAu:video no seek time buffer is NULL");
+                ALOGV("VE3gpReader_getNextAu:video no seek time buffer is NULL");
             }
         }
     } else {
-        LOGV("VideoEditor3gpReader_getNextAu M4ERR_PARAMETER");
+        ALOGV("VideoEditor3gpReader_getNextAu M4ERR_PARAMETER");
         return M4ERR_PARAMETER;
     }
 
@@ -1077,14 +1077,14 @@
                 free((M4OSA_Int32*)pAu->dataAddress);
                 pAu->dataAddress = NULL;
             }
-            LOGV("Buffer lenght = %d ,%d",(mMediaBuffer->range_length() +\
+            ALOGV("Buffer lenght = %d ,%d",(mMediaBuffer->range_length() +\
                 3) & ~0x3,(mMediaBuffer->range_length()));
 
             pAu->dataAddress = (M4OSA_Int32*)M4OSA_32bitAlignedMalloc(
                 (mMediaBuffer->range_length() + 3) & ~0x3,M4READER_3GP,
                     (M4OSA_Char*)"pAccessUnit->m_dataAddress" );
             if(pAu->dataAddress == NULL) {
-                LOGV("VideoEditor3gpReader_getNextAu malloc failed");
+                ALOGV("VideoEditor3gpReader_getNextAu malloc failed");
                 return M4ERR_ALLOC;
             }
         }
@@ -1100,7 +1100,7 @@
             M4OSA_UInt8 *lbuffer;
 
             lbuffer = (M4OSA_UInt8 *) pAu->dataAddress;
-            LOGV("pAccessUnit->m_dataAddress size = %x",size);
+            ALOGV("pAccessUnit->m_dataAddress size = %x",size);
 
             lbuffer[0] = (size >> 24) & 0xFF;
             lbuffer[1] = (size >> 16) & 0xFF;
@@ -1111,7 +1111,7 @@
         pAu->CTS = tempTime64;
 
         pAu->CTS = pAu->CTS / 1000; //converting the microsec to millisec
-        LOGV("VideoEditor3gpReader_getNextAu CTS = %ld",pAu->CTS);
+        ALOGV("VideoEditor3gpReader_getNextAu CTS = %ld",pAu->CTS);
 
         pAu->DTS  = pAu->CTS;
         if (pStreamHandler == (M4_StreamHandler*)pC->mAudioStreamHandler) {
@@ -1127,7 +1127,7 @@
         pAccessUnit->m_attribute = pAu->attribute;
 
     } else {
-        LOGV("VideoEditor3gpReader_getNextAu: M4WAR_NO_MORE_AU (EOS) reached");
+        ALOGV("VideoEditor3gpReader_getNextAu: M4WAR_NO_MORE_AU (EOS) reached");
         pAccessUnit->m_size = 0;
         err = M4WAR_NO_MORE_AU;
     }
@@ -1135,7 +1135,7 @@
 
     pAu->nbFrag = 0;
     mMediaBuffer = NULL;
-    LOGV("VideoEditor3gpReader_getNextAu end ");
+    ALOGV("VideoEditor3gpReader_getNextAu end ");
 
     return err;
 }
@@ -1425,7 +1425,7 @@
     M4OSA_Int32  ptempTime;
     M4OSA_Int32  avgFPS=0;
 
-    LOGV("VideoEditor3gpReader_getNextStreamHandler begin");
+    ALOGV("VideoEditor3gpReader_getNextStreamHandler begin");
 
     M4OSA_DEBUG_IF1((pC == 0), M4ERR_PARAMETER,
         "VideoEditor3gpReader_getNextStreamHandler: invalid context");
@@ -1438,7 +1438,7 @@
     temp = pC->mCurrTrack;
 
     if(temp >= trackCount) {
-        LOGV("VideoEditor3gpReader_getNextStreamHandler error = %d",
+        ALOGV("VideoEditor3gpReader_getNextStreamHandler error = %d",
             M4WAR_NO_MORE_STREAM);
         return (M4WAR_NO_MORE_STREAM);
     } else {
@@ -1452,7 +1452,7 @@
 
             *pMediaFamily = M4READER_kMediaFamilyVideo;
             haveVideo = true;
-            LOGV("VideoEditor3gpReader_getNextStreamHandler getTrack called");
+            ALOGV("VideoEditor3gpReader_getNextStreamHandler getTrack called");
             if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
                 streamType = M4DA_StreamTypeVideoMpeg4Avc;
             } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_H263)) {
@@ -1460,9 +1460,9 @@
             } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4)) {
                 streamType = M4DA_StreamTypeVideoMpeg4;
             } else {
-                LOGV("VideoEditor3gpReaderGetNextStreamHandler streamTypeNONE");
+                ALOGV("VideoEditor3gpReaderGetNextStreamHandler streamTypeNONE");
             }
-            LOGV("VideoEditor3gpReader_getNextStreamHandler: stream type: %d ",
+            ALOGV("VideoEditor3gpReader_getNextStreamHandler: stream type: %d ",
                 streamType);
 
             if(streamType != M4DA_StreamTypeUnknown) {
@@ -1488,14 +1488,14 @@
                 ((*pStreamHandler)->m_duration) =
                     (int32_t)((Duration)/1000); // conversion to mS
                 pC->mMaxDuration = ((*pStreamHandler)->m_duration);
-                LOGV("VideoEditor3gpReader_getNextStreamHandler m_duration %d",
+                ALOGV("VideoEditor3gpReader_getNextStreamHandler m_duration %d",
                     (*pStreamHandler)->m_duration);
 
                 off64_t fileSize = 0;
                 pC->mDataSource->getSize(&fileSize);
                 pC->mFileSize  = fileSize;
 
-                LOGV("VideoEditor3gpReader_getNextStreamHandler m_fileSize %d",
+                ALOGV("VideoEditor3gpReader_getNextStreamHandler m_fileSize %d",
                     pC->mFileSize);
 
                 meta->findInt32(kKeyMaxInputSize, (int32_t*)&(maxAUSize));
@@ -1503,22 +1503,22 @@
                     maxAUSize = 70000;
                 }
                 (*pStreamHandler)->m_maxAUSize = maxAUSize;
-                LOGV("<<<<<<<<<<   video: mMaxAUSize from MP4 extractor: %d",
+                ALOGV("<<<<<<<<<<   video: mMaxAUSize from MP4 extractor: %d",
                     (*pStreamHandler)->m_maxAUSize);
 
                 ((M4_StreamHandler*)pVideoStreamHandler)->m_averageBitRate =
                         (pC->mFileSize * 8000)/pC->mMaxDuration;
-                LOGV("VideoEditor3gpReader_getNextStreamHandler m_averageBitrate %d",
+                ALOGV("VideoEditor3gpReader_getNextStreamHandler m_averageBitrate %d",
                     ((M4_StreamHandler*)pVideoStreamHandler)->m_averageBitRate);
 
 
                 meta->findInt32(kKeyFrameRate,
                     (int32_t*)&(avgFPS));
-                LOGV("<<<<<<<<<<   video: Average FPS from MP4 extractor: %d",
+                ALOGV("<<<<<<<<<<   video: Average FPS from MP4 extractor: %d",
                     avgFPS);
 
                 pVideoStreamHandler->m_averageFrameRate =(M4OSA_Float) avgFPS;
-                LOGV("<<<<<<<<<<   video: Average FPS from MP4 extractor in FLOAT: %f",
+                ALOGV("<<<<<<<<<<   video: Average FPS from MP4 extractor in FLOAT: %f",
                     pVideoStreamHandler->m_averageFrameRate);
 
                 // Get the video rotation degree
@@ -1557,7 +1557,7 @@
                         (*pStreamHandler)->m_pH264DecoderSpecificInfo = M4OSA_NULL;
                         (*pStreamHandler)->m_H264decoderSpecificInfoSize = 0;
                     } else {
-                        LOGV("VE_getNextStreamHandler: H263 dsi not found");
+                        ALOGV("VE_getNextStreamHandler: H263 dsi not found");
                         (*pStreamHandler)->m_pDecoderSpecificInfo = M4OSA_NULL;
                         (*pStreamHandler)->m_decoderSpecificInfoSize = 0;
                         (*pStreamHandler)->m_H264decoderSpecificInfoSize = 0;
@@ -1575,13 +1575,13 @@
                                 decoderSpecificInfoSize, M4READER_3GP,
                                 (M4OSA_Char*)"H264 DecoderSpecific" );
                             if (M4OSA_NULL == DecoderSpecificInfo) {
-                                LOGV("VideoEditor3gp_getNextStream is NULL ");
+                                ALOGV("VideoEditor3gp_getNextStream is NULL ");
                                 return M4ERR_ALLOC;
                             }
                             memcpy((void *)DecoderSpecificInfo,
                                 (void *)data, decoderSpecificInfoSize);
                         } else {
-                            LOGV("DSI Size %d", decoderSpecificInfoSize);
+                            ALOGV("DSI Size %d", decoderSpecificInfoSize);
                             DecoderSpecificInfo = M4OSA_NULL;
                         }
                     }
@@ -1594,7 +1594,7 @@
                     if (M4NO_ERROR != err) {
                         return err;
                     }
-                    LOGV("decsize %d, h264decsize %d: %d", (*pStreamHandler)->\
+                    ALOGV("decsize %d, h264decsize %d: %d", (*pStreamHandler)->\
                         m_decoderSpecificInfoSize, (*pStreamHandler)->\
                         m_H264decoderSpecificInfoSize);
 
@@ -1605,7 +1605,7 @@
                 } else if( (M4DA_StreamTypeVideoMpeg4 == streamType) ) {
                     if (meta->findData(kKeyESDS, &type, &data, &size)) {
                         ESDS esds((const char *)data, size);
-                        CHECK_EQ(esds.InitCheck(), OK);
+                        CHECK_EQ(esds.InitCheck(), (status_t)OK);
 
                         (*pStreamHandler)->m_ESDSInfoSize = size;
                         (*pStreamHandler)->m_pESDSInfo = (M4OSA_UInt8*)\
@@ -1619,7 +1619,7 @@
 
                         esds.getCodecSpecificInfo(&codec_specific_data,
                             &codec_specific_data_size);
-                        LOGV("VE MP4 dsisize: %d, %x", codec_specific_data_size,
+                        ALOGV("VE MP4 dsisize: %d, %x", codec_specific_data_size,
                             codec_specific_data);
 
                         (*pStreamHandler)->m_decoderSpecificInfoSize =
@@ -1646,17 +1646,17 @@
                         (*pStreamHandler)->m_H264decoderSpecificInfoSize = 0;
                     }
                 } else {
-                    LOGV("VideoEditor3gpReader_getNextStream NO video stream");
+                    ALOGV("VideoEditor3gpReader_getNextStream NO video stream");
                     return M4ERR_READER_UNKNOWN_STREAM_TYPE;
                 }
             }
             else {
-                LOGV("VideoEditor3gpReader_getNextStream NO video stream");
+                ALOGV("VideoEditor3gpReader_getNextStream NO video stream");
                 return M4ERR_READER_UNKNOWN_STREAM_TYPE;
             }
 
         } else if (!haveAudio && !strncasecmp(mime, "audio/", 6)) {
-            LOGV("VideoEditor3gpReader_getNextStream audio getTrack called");
+            ALOGV("VideoEditor3gpReader_getNextStream audio getTrack called");
             pC->mAudioSource = pC->mExtractor->getTrack(pC->mCurrTrack);
             pC->mAudioSource->start();
             *pMediaFamily = M4READER_kMediaFamilyAudio;
@@ -1669,13 +1669,13 @@
             else if(!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
                 streamType = M4DA_StreamTypeAudioAac;
             } else {
-                LOGV("VideoEditor3gpReader_getNextStrea streamtype Unknown ");
+                ALOGV("VideoEditor3gpReader_getNextStrea streamtype Unknown ");
             }
             if(streamType != M4DA_StreamTypeUnknown) {
                 pC->mStreamType = streamType;
                 pC->mStreamId = pC->mCurrTrack;
 
-                LOGV("VE streamtype %d ,id %d",  streamType, pC->mCurrTrack);
+                ALOGV("VE streamtype %d ,id %d",  streamType, pC->mCurrTrack);
 
                 pAudioStreamHandler = (M4_AudioStreamHandler*)M4OSA_32bitAlignedMalloc
                     (sizeof(M4_AudioStreamHandler), M4READER_3GP,
@@ -1703,7 +1703,7 @@
                     maxAUSize = 70000;
                 }
                 (*pStreamHandler)->m_maxAUSize = maxAUSize;
-                LOGV("VE Audio mMaxAUSize from MP4 extractor: %d", maxAUSize);
+                ALOGV("VE Audio mMaxAUSize from MP4 extractor: %d", maxAUSize);
             }
             if((M4DA_StreamTypeAudioAmrNarrowBand == streamType) ||
                 (M4DA_StreamTypeAudioAmrWideBand == streamType)) {
@@ -1718,7 +1718,7 @@
 
                 if (meta->findData(kKeyESDS, &type, &data, &size)) {
                     ESDS esds((const char *)data, size);
-                    CHECK_EQ(esds.InitCheck(), OK);
+                    CHECK_EQ(esds.InitCheck(), (status_t)OK);
 
                     esds.getCodecSpecificInfo(&codec_specific_data,
                         &codec_specific_data_size);
@@ -1765,7 +1765,7 @@
             } else if((M4DA_StreamTypeAudioAac == streamType)) {
                 if (meta->findData(kKeyESDS, &type, &data, &size)) {
                     ESDS esds((const char *)data, size);
-                    CHECK_EQ(esds.InitCheck(), OK);
+                    CHECK_EQ(esds.InitCheck(), (status_t)OK);
 
                     (*pStreamHandler)->m_ESDSInfoSize = size;
                     (*pStreamHandler)->m_pESDSInfo = (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc(
@@ -1779,7 +1779,7 @@
                     esds.getCodecSpecificInfo(&codec_specific_data,
                         &codec_specific_data_size);
 
-                    LOGV("VEdsi %d,%x",codec_specific_data_size,
+                    ALOGV("VEdsi %d,%x",codec_specific_data_size,
                         codec_specific_data);
 
                     (*pStreamHandler)->m_decoderSpecificInfoSize =
@@ -1801,15 +1801,15 @@
                     }
                 }
             } else {
-                LOGV("VideoEditor3gpReader_getNextStream mStreamType: none ");
+                ALOGV("VideoEditor3gpReader_getNextStream mStreamType: none ");
                 return M4ERR_READER_UNKNOWN_STREAM_TYPE;
             }
         } else {
-            LOGV("VE noaudio-video stream:pC->mCurrTrack = %d ",pC->mCurrTrack);
+            ALOGV("VE noaudio-video stream:pC->mCurrTrack = %d ",pC->mCurrTrack);
             pC->mCurrTrack++; //Increment current track to get the next track
             return M4ERR_READER_UNKNOWN_STREAM_TYPE;
         }
-        LOGV("VE StreamType: %d, stremhandler %x",streamType, *pStreamHandler );
+        ALOGV("VE StreamType: %d, stremhandler %x",streamType, *pStreamHandler );
         (*pStreamHandler)->m_streamType = streamType;
         (*pStreamHandler)->m_streamId   = pC->mStreamId;
         (*pStreamHandler)->m_pUserData  = M4OSA_NULL;
@@ -1822,7 +1822,7 @@
         (*pStreamHandler)->m_duration = (int32_t)(Duration / 1000);
 
         pC->mMaxDuration = ((*pStreamHandler)->m_duration);
-        LOGV("VE str duration duration: %d ", (*pStreamHandler)->m_duration);
+        ALOGV("VE str duration duration: %d ", (*pStreamHandler)->m_duration);
 
         /* In AAC case: Put the first AU in pAudioStreamHandler->m_pUserData
          *since decoder has to know if stream contains SBR data(Implicit sig) */
@@ -1877,14 +1877,14 @@
         }
     }
     pC->mCurrTrack++; //Increment the current track to get next track
-    LOGV("pC->mCurrTrack = %d",pC->mCurrTrack);
+    ALOGV("pC->mCurrTrack = %d",pC->mCurrTrack);
 
     if (!haveAudio && !haveVideo) {
         *pMediaFamily=M4READER_kMediaFamilyUnknown;
         return M4ERR_READER_UNKNOWN_STREAM_TYPE;
     }
 Error:
-    LOGV("VideoEditor3gpReader_getNextStreamHandler end error = %d",err);
+    ALOGV("VideoEditor3gpReader_getNextStreamHandler end error = %d",err);
     return err;
 }
 
@@ -1899,7 +1899,7 @@
     int64_t tempTime64 = 0;
     status_t error;
 
-    LOGV("VideoEditor3gpReader_getPrevRapTime begin");
+    ALOGV("VideoEditor3gpReader_getPrevRapTime begin");
 
     M4OSA_DEBUG_IF1((pC == 0), M4ERR_PARAMETER,
         "VideoEditor3gpReader_getPrevRapTime: invalid context");
@@ -1913,7 +1913,7 @@
 
     time64 = (M4OSA_Time)*pTime * 1000;
 
-    LOGV("VideoEditor3gpReader_getPrevRapTime seek time: %ld",time64);
+    ALOGV("VideoEditor3gpReader_getPrevRapTime seek time: %ld",time64);
     options.setSeekTo(time64, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
     error = pC->mVideoSource->read(&mMediaBuffer, &options);
     if (error != OK) {
@@ -1923,13 +1923,13 @@
     }
 
     mMediaBuffer->meta_data()->findInt64(kKeyTime, (int64_t*)&tempTime64);
-    LOGV("VideoEditor3gpReader_getPrevRapTime read time %ld, %x", tempTime64,
+    ALOGV("VideoEditor3gpReader_getPrevRapTime read time %ld, %x", tempTime64,
         mMediaBuffer);
 
     *pTime = (M4OSA_Int32)(tempTime64 / 1000);
 
     if(mMediaBuffer != M4OSA_NULL) {
-        LOGV(" mMediaBuffer size = %d length %d", mMediaBuffer->size(),
+        ALOGV(" mMediaBuffer size = %d length %d", mMediaBuffer->size(),
             mMediaBuffer->range_length());
         mMediaBuffer->release();
         mMediaBuffer = M4OSA_NULL;
@@ -1937,11 +1937,11 @@
     options.clearSeekTo();
 
     if(error != OK) {
-        LOGV("VideoEditor3gpReader_getPrevRapTime end \
+        ALOGV("VideoEditor3gpReader_getPrevRapTime end \
             M4WAR_READER_INFORMATION_NOT_PRESENT");
         return M4WAR_READER_INFORMATION_NOT_PRESENT;
     } else {
-        LOGV("VideoEditor3gpReader_getPrevRapTime end: err %x", err);
+        ALOGV("VideoEditor3gpReader_getPrevRapTime end: err %x", err);
         err = M4NO_ERROR;
         return err;
     }
@@ -1958,8 +1958,8 @@
     VIDEOEDITOR_CHECK(M4OSA_NULL != pRdrGlobalInterface, M4ERR_PARAMETER);
     VIDEOEDITOR_CHECK(M4OSA_NULL != pRdrDataInterface, M4ERR_PARAMETER);
 
-    LOGV("VideoEditor3gpReader_getInterface begin");
-    LOGV("VideoEditor3gpReader_getInterface %d 0x%x 0x%x", *pMediaType,
+    ALOGV("VideoEditor3gpReader_getInterface begin");
+    ALOGV("VideoEditor3gpReader_getInterface %d 0x%x 0x%x", *pMediaType,
         *pRdrGlobalInterface,*pRdrDataInterface);
 
     SAFE_MALLOC(*pRdrGlobalInterface, M4READER_GlobalInterface, 1,
@@ -1990,14 +1990,14 @@
 
 cleanUp:
     if( M4NO_ERROR == err ) {
-        LOGV("VideoEditor3gpReader_getInterface no error");
+        ALOGV("VideoEditor3gpReader_getInterface no error");
     } else {
         SAFE_FREE(*pRdrGlobalInterface);
         SAFE_FREE(*pRdrDataInterface);
 
-        LOGV("VideoEditor3gpReader_getInterface ERROR 0x%X", err);
+        ALOGV("VideoEditor3gpReader_getInterface ERROR 0x%X", err);
     }
-    LOGV("VideoEditor3gpReader_getInterface end");
+    ALOGV("VideoEditor3gpReader_getInterface end");
     return err;
 }
 
diff --git a/libvideoeditor/vss/stagefrightshells/src/VideoEditorAudioDecoder.cpp b/libvideoeditor/vss/stagefrightshells/src/VideoEditorAudioDecoder.cpp
index 38e667c..9b35d07 100755
--- a/libvideoeditor/vss/stagefrightshells/src/VideoEditorAudioDecoder.cpp
+++ b/libvideoeditor/vss/stagefrightshells/src/VideoEditorAudioDecoder.cpp
@@ -30,8 +30,8 @@
 
 #include "utils/Log.h"
 #include "utils/Vector.h"
+#include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/MediaSource.h>
-#include <media/stagefright/MediaDebug.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/OMXClient.h>
@@ -136,14 +136,14 @@
     status_t err = OK;
 
     if( CREATED != mState ) {
-        LOGV("VideoEditorAudioDecoderSource::start: invalid state %d", mState);
+        ALOGV("VideoEditorAudioDecoderSource::start: invalid state %d", mState);
         return UNKNOWN_ERROR;
     }
 
     mState = STARTED;
 
 cleanUp:
-    LOGV("VideoEditorAudioDecoderSource::start END (0x%x)", err);
+    ALOGV("VideoEditorAudioDecoderSource::start END (0x%x)", err);
     return err;
 }
 
@@ -151,10 +151,10 @@
     Mutex::Autolock autolock(mLock);
     status_t err = OK;
 
-    LOGV("VideoEditorAudioDecoderSource::stop begin");
+    ALOGV("VideoEditorAudioDecoderSource::stop begin");
 
     if( STARTED != mState ) {
-        LOGV("VideoEditorAudioDecoderSource::stop: invalid state %d", mState);
+        ALOGV("VideoEditorAudioDecoderSource::stop: invalid state %d", mState);
         return UNKNOWN_ERROR;
     }
 
@@ -163,19 +163,19 @@
         for (int i = 0; i < n; i++) {
             mBuffers.itemAt(i)->release();
         }
-        LOGW("VideoEditorAudioDecoderSource::stop : %d buffer remained", n);
+        ALOGW("VideoEditorAudioDecoderSource::stop : %d buffer remained", n);
         mBuffers.clear();
     }
 
     mState = CREATED;
 
-    LOGV("VideoEditorAudioDecoderSource::stop END (0x%x)", err);
+    ALOGV("VideoEditorAudioDecoderSource::stop END (0x%x)", err);
     return err;
 }
 
 sp<MetaData> VideoEditorAudioDecoderSource::getFormat() {
 
-    LOGV("VideoEditorAudioDecoderSource::getFormat");
+    ALOGV("VideoEditorAudioDecoderSource::getFormat");
     return mFormat;
 }
 
@@ -191,7 +191,7 @@
                pAccessUnit);
 
     if (lerr == M4WAR_NO_MORE_AU) {
-        LOGV("readBufferFromReader : EOS");
+        ALOGV("readBufferFromReader : EOS");
         return NULL;
     }
 
@@ -213,7 +213,7 @@
      (VideoEditorAudioDecoder_Context *)mDecShellContext;
 
     if ( STARTED != mState ) {
-        LOGV("VideoEditorAudioDecoderSource::read invalid state %d", mState);
+        ALOGV("VideoEditorAudioDecoderSource::read invalid state %d", mState);
         return UNKNOWN_ERROR;
     }
 
@@ -238,7 +238,7 @@
     VideoEditorAudioDecoder_Context* pDecContext =
      (VideoEditorAudioDecoder_Context *)mDecShellContext;
 
-    LOGV("VideoEditorAudioDecoderSource::storeBuffer begin");
+    ALOGV("VideoEditorAudioDecoderSource::storeBuffer begin");
 
     // If the user didn't give us a buffer, get it from the reader.
     if(buffer == NULL) {
@@ -251,7 +251,7 @@
     }
 
     mBuffers.push(buffer);
-    LOGV("VideoEditorAudioDecoderSource::storeBuffer END");
+    ALOGV("VideoEditorAudioDecoderSource::storeBuffer END");
 }
 
 /********************
@@ -278,7 +278,7 @@
     VIDEOEDITOR_CHECK(32 >= nbBits, M4ERR_PARAMETER);
     VIDEOEDITOR_CHECK((*pOffset + nbBits) <= 8*dataSize, M4ERR_PARAMETER);
 
-    LOGV("VideoEditorAudioDecoder_getBits begin");
+    ALOGV("VideoEditorAudioDecoder_getBits begin");
 
     startByte   = (*pOffset) >> 3;
     endByte     = (*pOffset + nbBits) >> 3;
@@ -308,11 +308,11 @@
 
 cleanUp:
     if( M4NO_ERROR == err ) {
-        LOGV("VideoEditorAudioDecoder_getBits no error");
+        ALOGV("VideoEditorAudioDecoder_getBits no error");
     } else {
-        LOGV("VideoEditorAudioDecoder_getBits ERROR 0x%X", err);
+        ALOGV("VideoEditorAudioDecoder_getBits ERROR 0x%X", err);
     }
-    LOGV("VideoEditorAudioDecoder_getBits end");
+    ALOGV("VideoEditorAudioDecoder_getBits end");
     return err;
 }
 
@@ -330,7 +330,7 @@
     M4OSA_UInt32 offset = 0;
     M4OSA_Int32 result = 0;
 
-    LOGV("VideoEditorAudioDecoder_parse_AAC_DSI begin");
+    ALOGV("VideoEditorAudioDecoder_parse_AAC_DSI begin");
 
     // Input parameters check
     VIDEOEDITOR_CHECK(M4OSA_NULL != pDSI, M4ERR_PARAMETER);
@@ -356,7 +356,7 @@
             pProperties->aSBRPresent = 1;
             break;
         default:
-            LOGV("parse_AAC_DSI ERROR : object type %d is not supported",
+            ALOGV("parse_AAC_DSI ERROR : object type %d is not supported",
                 result);
             VIDEOEDITOR_CHECK(!"invalid AAC object type", M4ERR_BAD_OPTION_ID);
             break;
@@ -381,11 +381,11 @@
 
 cleanUp:
     if( M4NO_ERROR == err ) {
-        LOGV("VideoEditorAudioDecoder_parse_AAC_DSI no error");
+        ALOGV("VideoEditorAudioDecoder_parse_AAC_DSI no error");
     } else {
-        LOGV("VideoEditorAudioDecoder_parse_AAC_DSI ERROR 0x%X", err);
+        ALOGV("VideoEditorAudioDecoder_parse_AAC_DSI ERROR 0x%X", err);
     }
-    LOGV("VideoEditorAudioDecoder_parse_AAC_DSI end");
+    ALOGV("VideoEditorAudioDecoder_parse_AAC_DSI end");
     return err;
 }
 
@@ -397,7 +397,7 @@
     M4OSA_ERR err = M4NO_ERROR;
     VideoEditorAudioDecoder_Context* pDecoderContext = M4OSA_NULL;
 
-    LOGV("VideoEditorAudioDecoder_destroy begin");
+    ALOGV("VideoEditorAudioDecoder_destroy begin");
     // Input parameters check
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
 
@@ -415,15 +415,15 @@
 
     SAFE_FREE(pDecoderContext);
     pContext = M4OSA_NULL;
-    LOGV("VideoEditorAudioDecoder_destroy : DONE");
+    ALOGV("VideoEditorAudioDecoder_destroy : DONE");
 
 cleanUp:
     if( M4NO_ERROR == err ) {
-        LOGV("VideoEditorAudioDecoder_destroy no error");
+        ALOGV("VideoEditorAudioDecoder_destroy no error");
     } else {
-        LOGV("VideoEditorAudioDecoder_destroy ERROR 0x%X", err);
+        ALOGV("VideoEditorAudioDecoder_destroy ERROR 0x%X", err);
     }
-    LOGV("VideoEditorAudioDecoder_destroy : end");
+    ALOGV("VideoEditorAudioDecoder_destroy : end");
     return err;
 }
 
@@ -438,7 +438,7 @@
     const char* mime = NULL;
     uint32_t codecFlags = 0;
 
-    LOGV("VideoEditorAudioDecoder_create begin: decoderType %d", decoderType);
+    ALOGV("VideoEditorAudioDecoder_create begin: decoderType %d", decoderType);
 
     // Input parameters check
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext,       M4ERR_PARAMETER);
@@ -455,7 +455,7 @@
     pDecoderContext->readerErrCode = M4NO_ERROR;
     pDecoderContext->timeStampMs = -1;
 
-    LOGV("VideoEditorAudioDecoder_create : maxAUSize %d",
+    ALOGV("VideoEditorAudioDecoder_create : maxAUSize %d",
         pDecoderContext->mAudioStreamHandler->m_basicProperties.m_maxAUSize);
 
     // Create the meta data for the decoder
@@ -564,7 +564,7 @@
     // Get the output channels, the decoder might overwrite the input metadata
     pDecoderContext->mDecoder->getFormat()->findInt32(kKeyChannelCount,
         &pDecoderContext->mNbOutputChannels);
-    LOGV("VideoEditorAudioDecoder_create : output chan %d",
+    ALOGV("VideoEditorAudioDecoder_create : output chan %d",
         pDecoderContext->mNbOutputChannels);
 
     // Start the decoder
@@ -572,15 +572,15 @@
     VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
 
     *pContext = pDecoderContext;
-    LOGV("VideoEditorAudioDecoder_create : DONE");
+    ALOGV("VideoEditorAudioDecoder_create : DONE");
 
 cleanUp:
     if( M4NO_ERROR == err ) {
-        LOGV("VideoEditorAudioDecoder_create no error");
+        ALOGV("VideoEditorAudioDecoder_create no error");
     } else {
         VideoEditorAudioDecoder_destroy(pDecoderContext);
         *pContext = M4OSA_NULL;
-        LOGV("VideoEditorAudioDecoder_create ERROR 0x%X", err);
+        ALOGV("VideoEditorAudioDecoder_create ERROR 0x%X", err);
     }
     return err;
 }
@@ -622,7 +622,7 @@
     VideoEditorAudioDecoder_Context* pDecoderContext = M4OSA_NULL;
     MediaBuffer* buffer = NULL;
 
-    LOGV("VideoEditorAudioDecoder_processInputBuffer begin");
+    ALOGV("VideoEditorAudioDecoder_processInputBuffer begin");
     // Input parameters check
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
 
@@ -639,11 +639,11 @@
 
 cleanUp:
     if( M4NO_ERROR == err ) {
-        LOGV("VideoEditorAudioDecoder_processInputBuffer no error");
+        ALOGV("VideoEditorAudioDecoder_processInputBuffer no error");
     } else {
-        LOGV("VideoEditorAudioDecoder_processInputBuffer ERROR 0x%X", err);
+        ALOGV("VideoEditorAudioDecoder_processInputBuffer ERROR 0x%X", err);
     }
-    LOGV("VideoEditorAudioDecoder_processInputBuffer end");
+    ALOGV("VideoEditorAudioDecoder_processInputBuffer end");
     return err;
 }
 
@@ -655,7 +655,7 @@
     int64_t i64Tmp = 0;
     status_t result = OK;
 
-    LOGV("VideoEditorAudioDecoder_processOutputBuffer begin");
+    ALOGV("VideoEditorAudioDecoder_processOutputBuffer begin");
     // Input parameters check
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
     VIDEOEDITOR_CHECK(M4OSA_NULL != buffer, M4ERR_PARAMETER);
@@ -701,12 +701,12 @@
     // Release the buffer
     buffer->release();
     if( M4NO_ERROR == err ) {
-        LOGV("VideoEditorAudioDecoder_processOutputBuffer no error");
+        ALOGV("VideoEditorAudioDecoder_processOutputBuffer no error");
     } else {
         pOuputBuffer->m_bufferSize = 0;
-        LOGV("VideoEditorAudioDecoder_processOutputBuffer ERROR 0x%X", err);
+        ALOGV("VideoEditorAudioDecoder_processOutputBuffer ERROR 0x%X", err);
     }
-    LOGV("VideoEditorAudioDecoder_processOutputBuffer end");
+    ALOGV("VideoEditorAudioDecoder_processOutputBuffer end");
     return err;
 }
 
@@ -718,7 +718,7 @@
     status_t result = OK;
     MediaBuffer* outputBuffer = NULL;
 
-    LOGV("VideoEditorAudioDecoder_step begin");
+    ALOGV("VideoEditorAudioDecoder_step begin");
     // Input parameters check
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
 
@@ -733,7 +733,7 @@
     // Read
     result = pDecoderContext->mDecoder->read(&outputBuffer, NULL);
     if (INFO_FORMAT_CHANGED == result) {
-        LOGV("VideoEditorAudioDecoder_step: Audio decoder \
+        ALOGV("VideoEditorAudioDecoder_step: Audio decoder \
          returned INFO_FORMAT_CHANGED");
         CHECK(outputBuffer == NULL);
         sp<MetaData> meta = pDecoderContext->mDecoder->getFormat();
@@ -741,8 +741,8 @@
 
         CHECK(meta->findInt32(kKeySampleRate, &sampleRate));
         CHECK(meta->findInt32(kKeyChannelCount, &channelCount));
-        LOGV("VideoEditorAudioDecoder_step: samplingFreq = %d", sampleRate);
-        LOGV("VideoEditorAudioDecoder_step: channelCnt = %d", channelCount);
+        ALOGV("VideoEditorAudioDecoder_step: samplingFreq = %d", sampleRate);
+        ALOGV("VideoEditorAudioDecoder_step: channelCnt = %d", channelCount);
         pDecoderContext->mAudioStreamHandler->m_samplingFrequency =
          (uint32_t)sampleRate;
         pDecoderContext->mAudioStreamHandler->m_nbChannels =
@@ -751,7 +751,7 @@
 
         return M4WAR_INFO_FORMAT_CHANGE;
     } else if (ERROR_END_OF_STREAM == result) {
-        LOGV("VideoEditorAudioDecoder_step: Audio decoder \
+        ALOGV("VideoEditorAudioDecoder_step: Audio decoder \
          returned ERROR_END_OF_STREAM");
         pDecoderContext->readerErrCode = M4WAR_NO_MORE_AU;
         return M4WAR_NO_MORE_AU;
@@ -766,18 +766,18 @@
 
 cleanUp:
     if( M4NO_ERROR == err ) {
-        LOGV("VideoEditorAudioDecoder_step no error");
+        ALOGV("VideoEditorAudioDecoder_step no error");
     } else {
-        LOGV("VideoEditorAudioDecoder_step ERROR 0x%X", err);
+        ALOGV("VideoEditorAudioDecoder_step ERROR 0x%X", err);
     }
-    LOGV("VideoEditorAudioDecoder_step end");
+    ALOGV("VideoEditorAudioDecoder_step end");
     return err;
 }
 
 M4OSA_ERR VideoEditorAudioDecoder_getVersion(M4_VersionInfo* pVersionInfo) {
     M4OSA_ERR err = M4NO_ERROR;
 
-    LOGV("VideoEditorAudioDecoder_getVersion begin");
+    ALOGV("VideoEditorAudioDecoder_getVersion begin");
     // Input parameters check
     VIDEOEDITOR_CHECK(M4OSA_NULL != pVersionInfo, M4ERR_PARAMETER);
 
@@ -788,11 +788,11 @@
 
 cleanUp:
     if( M4NO_ERROR == err ) {
-        LOGV("VideoEditorAudioDecoder_getVersion no error");
+        ALOGV("VideoEditorAudioDecoder_getVersion no error");
     } else {
-        LOGV("VideoEditorAudioDecoder_getVersion ERROR 0x%X", err);
+        ALOGV("VideoEditorAudioDecoder_getVersion ERROR 0x%X", err);
     }
-    LOGV("VideoEditorAudioDecoder_getVersion end");
+    ALOGV("VideoEditorAudioDecoder_getVersion end");
     return err;
 }
 
@@ -802,7 +802,7 @@
     M4OSA_ERR err = M4NO_ERROR;
     VideoEditorAudioDecoder_Context* pDecoderContext = M4OSA_NULL;
 
-    LOGV("VideoEditorAudioDecoder_setOption begin 0x%X", optionID);
+    ALOGV("VideoEditorAudioDecoder_setOption begin 0x%X", optionID);
     // Input parameters check
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
 
@@ -810,24 +810,24 @@
 
     switch( optionID ) {
         case M4AD_kOptionID_UserParam:
-            LOGV("VideoEditorAudioDecodersetOption UserParam is not supported");
+            ALOGV("VideoEditorAudioDecodersetOption UserParam is not supported");
             err = M4ERR_NOT_IMPLEMENTED;
             break;
 
         case M4AD_kOptionID_3gpReaderInterface:
-            LOGV("VideoEditorAudioDecodersetOption 3gpReaderInterface");
+            ALOGV("VideoEditorAudioDecodersetOption 3gpReaderInterface");
             pDecoderContext->m_pReader =
              (M4READER_DataInterface *)optionValue;
             break;
 
         case M4AD_kOptionID_AudioAU:
-            LOGV("VideoEditorAudioDecodersetOption AudioAU");
+            ALOGV("VideoEditorAudioDecodersetOption AudioAU");
             pDecoderContext->m_pNextAccessUnitToDecode =
              (M4_AccessUnit *)optionValue;
             break;
 
         default:
-            LOGV("VideoEditorAudioDecoder_setOption  unsupported optionId 0x%X",
+            ALOGV("VideoEditorAudioDecoder_setOption  unsupported optionId 0x%X",
                 optionID);
             VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_BAD_OPTION_ID);
             break;
@@ -835,11 +835,11 @@
 
 cleanUp:
     if( ((M4OSA_UInt32)M4NO_ERROR == err) || ((M4OSA_UInt32)M4ERR_NOT_IMPLEMENTED == err) ) {
-        LOGV("VideoEditorAudioDecoder_setOption error 0x%X", err);
+        ALOGV("VideoEditorAudioDecoder_setOption error 0x%X", err);
     } else {
-        LOGV("VideoEditorAudioDecoder_setOption ERROR 0x%X", err);
+        ALOGV("VideoEditorAudioDecoder_setOption ERROR 0x%X", err);
     }
-    LOGV("VideoEditorAudioDecoder_setOption end");
+    ALOGV("VideoEditorAudioDecoder_setOption end");
     return err;
 }
 
@@ -849,7 +849,7 @@
     M4OSA_ERR err = M4NO_ERROR;
     VideoEditorAudioDecoder_Context* pDecoderContext = M4OSA_NULL;
 
-    LOGV("VideoEditorAudioDecoder_getOption begin: optionID 0x%X", optionID);
+    ALOGV("VideoEditorAudioDecoder_getOption begin: optionID 0x%X", optionID);
     // Input parameters check
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
 
@@ -876,7 +876,7 @@
             break;
 
         default:
-            LOGV("VideoEditorAudioDecoder_getOption unsupported optionId 0x%X",
+            ALOGV("VideoEditorAudioDecoder_getOption unsupported optionId 0x%X",
                 optionID);
             VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_BAD_OPTION_ID);
             break;
@@ -884,11 +884,11 @@
 
 cleanUp:
     if( M4NO_ERROR == err ) {
-        LOGV("VideoEditorAudioDecoder_getOption no error");
+        ALOGV("VideoEditorAudioDecoder_getOption no error");
     } else {
-        LOGV("VideoEditorAudioDecoder_getOption ERROR 0x%X", err);
+        ALOGV("VideoEditorAudioDecoder_getOption ERROR 0x%X", err);
     }
-    LOGV("VideoEditorAudioDecoder_getOption end");
+    ALOGV("VideoEditorAudioDecoder_getOption end");
     return err;
 }
 
@@ -901,7 +901,7 @@
     VIDEOEDITOR_CHECK(M4OSA_NULL != pDecoderType, M4ERR_PARAMETER);
     VIDEOEDITOR_CHECK(M4OSA_NULL != pDecoderInterface, M4ERR_PARAMETER);
 
-    LOGV("VideoEditorAudioDecoder_getInterface begin %d 0x%x 0x%x",
+    ALOGV("VideoEditorAudioDecoder_getInterface begin %d 0x%x 0x%x",
         decoderType, pDecoderType, pDecoderInterface);
 
     SAFE_MALLOC(*pDecoderInterface, M4AD_Interface, 1,
@@ -927,7 +927,7 @@
                 VideoEditorAudioDecoder_create_MP3;
             break;
         default:
-            LOGV("VEAD_getInterface ERROR: unsupported type %d", decoderType);
+            ALOGV("VEAD_getInterface ERROR: unsupported type %d", decoderType);
             VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_PARAMETER);
         break;
     }
@@ -946,12 +946,12 @@
 
 cleanUp:
     if( M4NO_ERROR == err ) {
-        LOGV("VideoEditorAudioDecoder_getInterface no error");
+        ALOGV("VideoEditorAudioDecoder_getInterface no error");
     } else {
         *pDecoderInterface = M4OSA_NULL;
-        LOGV("VideoEditorAudioDecoder_getInterface ERROR 0x%X", err);
+        ALOGV("VideoEditorAudioDecoder_getInterface ERROR 0x%X", err);
     }
-    LOGV("VideoEditorAudioDecoder_getInterface end");
+    ALOGV("VideoEditorAudioDecoder_getInterface end");
     return err;
 }
 
@@ -960,14 +960,14 @@
 
 M4OSA_ERR VideoEditorAudioDecoder_getInterface_AAC(M4AD_Type* pDecoderType,
         M4AD_Interface** pDecoderInterface) {
-    LOGV("TEST: AAC VideoEditorAudioDecoder_getInterface no error");
+    ALOGV("TEST: AAC VideoEditorAudioDecoder_getInterface no error");
     return VideoEditorAudioDecoder_getInterface(
         M4AD_kTypeAAC, pDecoderType, pDecoderInterface);
 }
 
 M4OSA_ERR VideoEditorAudioDecoder_getInterface_AMRNB(M4AD_Type* pDecoderType,
         M4AD_Interface** pDecoderInterface) {
-    LOGV("TEST: AMR VideoEditorAudioDecoder_getInterface no error");
+    ALOGV("TEST: AMR VideoEditorAudioDecoder_getInterface no error");
     return VideoEditorAudioDecoder_getInterface(
         M4AD_kTypeAMRNB, pDecoderType, pDecoderInterface);
 }
diff --git a/libvideoeditor/vss/stagefrightshells/src/VideoEditorAudioEncoder.cpp b/libvideoeditor/vss/stagefrightshells/src/VideoEditorAudioEncoder.cpp
index 588428e..a91f3ee 100755
--- a/libvideoeditor/vss/stagefrightshells/src/VideoEditorAudioEncoder.cpp
+++ b/libvideoeditor/vss/stagefrightshells/src/VideoEditorAudioEncoder.cpp
@@ -28,8 +28,8 @@
 #include "VideoEditorUtils.h"
 
 #include "utils/Log.h"
+#include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/MediaSource.h>
-#include <media/stagefright/MediaDebug.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/OMXClient.h>
@@ -82,7 +82,7 @@
 sp<VideoEditorAudioEncoderSource> VideoEditorAudioEncoderSource::Create(
     const sp<MetaData> &format) {
 
-    LOGV("VideoEditorAudioEncoderSource::Create");
+    ALOGV("VideoEditorAudioEncoderSource::Create");
     sp<VideoEditorAudioEncoderSource> aSource =
         new VideoEditorAudioEncoderSource(format);
 
@@ -96,12 +96,12 @@
         mNbBuffer(0),
         mState(CREATED),
         mEncFormat(format) {
-    LOGV("VideoEditorAudioEncoderSource::VideoEditorAudioEncoderSource");
+    ALOGV("VideoEditorAudioEncoderSource::VideoEditorAudioEncoderSource");
 }
 
 
 VideoEditorAudioEncoderSource::~VideoEditorAudioEncoderSource() {
-    LOGV("VideoEditorAudioEncoderSource::~VideoEditorAudioEncoderSource");
+    ALOGV("VideoEditorAudioEncoderSource::~VideoEditorAudioEncoderSource");
 
     if( STARTED == mState ) {
         stop();
@@ -111,10 +111,10 @@
 status_t VideoEditorAudioEncoderSource::start(MetaData *meta) {
     status_t err = OK;
 
-    LOGV("VideoEditorAudioEncoderSource::start");
+    ALOGV("VideoEditorAudioEncoderSource::start");
 
     if( CREATED != mState ) {
-        LOGV("VideoEditorAudioEncoderSource::start ERROR : invalid state %d",
+        ALOGV("VideoEditorAudioEncoderSource::start ERROR : invalid state %d",
             mState);
         return UNKNOWN_ERROR;
     }
@@ -122,17 +122,17 @@
     mState = STARTED;
 
 cleanUp:
-    LOGV("VideoEditorAudioEncoderSource::start END (0x%x)", err);
+    ALOGV("VideoEditorAudioEncoderSource::start END (0x%x)", err);
     return err;
 }
 
 status_t VideoEditorAudioEncoderSource::stop() {
     status_t err = OK;
 
-    LOGV("VideoEditorAudioEncoderSource::stop");
+    ALOGV("VideoEditorAudioEncoderSource::stop");
 
     if( STARTED != mState ) {
-        LOGV("VideoEditorAudioEncoderSource::stop ERROR: invalid state %d",
+        ALOGV("VideoEditorAudioEncoderSource::stop ERROR: invalid state %d",
             mState);
         return UNKNOWN_ERROR;
     }
@@ -145,18 +145,18 @@
         mFirstBufferLink = mFirstBufferLink->nextLink;
         delete tmpLink;
     }
-    LOGV("VideoEditorAudioEncoderSource::stop : %d buffer remained", i);
+    ALOGV("VideoEditorAudioEncoderSource::stop : %d buffer remained", i);
     mFirstBufferLink = NULL;
     mLastBufferLink = NULL;
 
     mState = CREATED;
 
-    LOGV("VideoEditorAudioEncoderSource::stop END (0x%x)", err);
+    ALOGV("VideoEditorAudioEncoderSource::stop END (0x%x)", err);
     return err;
 }
 
 sp<MetaData> VideoEditorAudioEncoderSource::getFormat() {
-    LOGV("VideoEditorAudioEncoderSource::getFormat");
+    ALOGV("VideoEditorAudioEncoderSource::getFormat");
     return mEncFormat;
 }
 
@@ -166,17 +166,17 @@
     status_t err = OK;
     MediaBufferChain* tmpLink = NULL;
 
-    LOGV("VideoEditorAudioEncoderSource::read");
+    ALOGV("VideoEditorAudioEncoderSource::read");
 
     if ( STARTED != mState ) {
-        LOGV("VideoEditorAudioEncoderSource::read ERROR : invalid state %d",
+        ALOGV("VideoEditorAudioEncoderSource::read ERROR : invalid state %d",
             mState);
         return UNKNOWN_ERROR;
     }
 
     if( NULL == mFirstBufferLink ) {
         *buffer = NULL;
-        LOGV("VideoEditorAudioEncoderSource::read : EOS");
+        ALOGV("VideoEditorAudioEncoderSource::read : EOS");
         return ERROR_END_OF_STREAM;
     }
     *buffer = mFirstBufferLink->buffer;
@@ -189,14 +189,14 @@
     delete tmpLink;
     mNbBuffer--;
 
-    LOGV("VideoEditorAudioEncoderSource::read END (0x%x)", err);
+    ALOGV("VideoEditorAudioEncoderSource::read END (0x%x)", err);
     return err;
 }
 
 int32_t VideoEditorAudioEncoderSource::storeBuffer(MediaBuffer *buffer) {
     status_t err = OK;
 
-    LOGV("VideoEditorAudioEncoderSource::storeBuffer");
+    ALOGV("VideoEditorAudioEncoderSource::storeBuffer");
 
     MediaBufferChain* newLink = new MediaBufferChain;
     newLink->buffer = buffer;
@@ -209,7 +209,7 @@
     mLastBufferLink = newLink;
     mNbBuffer++;
 
-    LOGV("VideoEditorAudioEncoderSource::storeBuffer END");
+    ALOGV("VideoEditorAudioEncoderSource::storeBuffer END");
     return mNbBuffer;
 }
 
@@ -241,7 +241,7 @@
     M4OSA_ERR err = M4NO_ERROR;
     VideoEditorAudioEncoder_Context* pEncoderContext = M4OSA_NULL;
 
-    LOGV("VideoEditorAudioEncoder_cleanup begin");
+    ALOGV("VideoEditorAudioEncoder_cleanup begin");
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
     pEncoderContext = (VideoEditorAudioEncoder_Context*)pContext;
 
@@ -251,11 +251,11 @@
 
 cleanUp:
     if( M4NO_ERROR == err ) {
-        LOGV("VideoEditorAudioEncoder_cleanup no error");
+        ALOGV("VideoEditorAudioEncoder_cleanup no error");
     } else {
-        LOGV("VideoEditorAudioEncoder_cleanup ERROR 0x%X", err);
+        ALOGV("VideoEditorAudioEncoder_cleanup ERROR 0x%X", err);
     }
-    LOGV("VideoEditorAudioEncoder_cleanup end");
+    ALOGV("VideoEditorAudioEncoder_cleanup end");
     return err;
 }
 
@@ -265,7 +265,7 @@
     M4OSA_ERR err = M4NO_ERROR;
     VideoEditorAudioEncoder_Context* pEncoderContext = M4OSA_NULL;
 
-    LOGV(" VideoEditorAudioEncoder_init begin: format %d", format);
+    ALOGV(" VideoEditorAudioEncoder_init begin: format %d", format);
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
 
     SAFE_MALLOC(pEncoderContext, VideoEditorAudioEncoder_Context, 1,
@@ -276,13 +276,13 @@
 
 cleanUp:
     if( M4NO_ERROR == err ) {
-        LOGV("VideoEditorAudioEncoder_init no error");
+        ALOGV("VideoEditorAudioEncoder_init no error");
     } else {
         VideoEditorAudioEncoder_cleanup(pEncoderContext);
         *pContext = M4OSA_NULL;
-        LOGV("VideoEditorAudioEncoder_init ERROR 0x%X", err);
+        ALOGV("VideoEditorAudioEncoder_init ERROR 0x%X", err);
     }
-    LOGV("VideoEditorAudioEncoder_init end");
+    ALOGV("VideoEditorAudioEncoder_init end");
     return err;
 }
 
@@ -306,7 +306,7 @@
     M4OSA_ERR err = M4NO_ERROR;
     VideoEditorAudioEncoder_Context* pEncoderContext = M4OSA_NULL;
 
-    LOGV("VideoEditorAudioEncoder_close begin");
+    ALOGV("VideoEditorAudioEncoder_close begin");
 
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
     pEncoderContext = (VideoEditorAudioEncoder_Context*)pContext;
@@ -318,23 +318,23 @@
     pEncoderContext->mClient.disconnect();
     pEncoderContext->mEncoderSource.clear();
 
-    LOGV("AudioEncoder_close:IN %d frames,OUT %d frames from %lld to %lld",
+    ALOGV("AudioEncoder_close:IN %d frames,OUT %d frames from %lld to %lld",
         pEncoderContext->mNbInputFrames,
         pEncoderContext->mNbOutputFrames, pEncoderContext->mFirstOutputCts,
         pEncoderContext->mLastOutputCts);
 
     if( pEncoderContext->mNbInputFrames != pEncoderContext->mNbInputFrames ) {
-        LOGV("VideoEditorAudioEncoder_close:some frames were not encoded %d %d",
+        ALOGV("VideoEditorAudioEncoder_close:some frames were not encoded %d %d",
             pEncoderContext->mNbInputFrames, pEncoderContext->mNbInputFrames);
     }
 
 cleanUp:
     if( M4NO_ERROR == err ) {
-        LOGV("VideoEditorAudioEncoder_close no error");
+        ALOGV("VideoEditorAudioEncoder_close no error");
     } else {
-        LOGV("VideoEditorAudioEncoder_close ERROR 0x%X", err);
+        ALOGV("VideoEditorAudioEncoder_close ERROR 0x%X", err);
     }
-    LOGV("VideoEditorAudioEncoder_close begin end");
+    ALOGV("VideoEditorAudioEncoder_close begin end");
     return err;
 }
 
@@ -350,7 +350,7 @@
     int32_t iNbChannel = 0;
     uint32_t codecFlags = 0;
 
-    LOGV("VideoEditorAudioEncoder_open begin");
+    ALOGV("VideoEditorAudioEncoder_open begin");
 
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
     VIDEOEDITOR_CHECK(M4OSA_NULL != pParams,  M4ERR_PARAMETER);
@@ -366,7 +366,7 @@
     pEncoderContext->mLastOutputCts  = -1;
 
     // Allocate & initialize the encoding parameters
-    LOGV("VideoEditorAudioEncoder_open : params F=%d CN=%d BR=%d F=%d",
+    ALOGV("VideoEditorAudioEncoder_open : params F=%d CN=%d BR=%d F=%d",
         pParams->Frequency, pParams->ChannelNum, pParams->Bitrate,
         pParams->Format);
     SAFE_MALLOC(pEncoderContext->mCodecParams, M4ENCODER_AudioParams, 1,
@@ -443,9 +443,16 @@
 #ifdef VIDEOEDITOR_FORCECODEC
     codecFlags |= OMXCodec::VIDEOEDITOR_FORCECODEC;
 #endif /* VIDEOEDITOR_FORCECODEC */
+    // FIXME:
+    // We are moving away to use software AACEncoder and instead use OMX-based
+    // software AAC audio encoder. We want to use AACEncoder for now. After we
+    // fix the interface issue with the OMX-based AAC audio encoder, we should
+    // then set the component name back to NULL to allow the system to pick up
+    // the right AAC audio encoder.
     pEncoderContext->mEncoder = OMXCodec::Create(
-        pEncoderContext->mClient.interface(), encoderMetadata, true,
-        pEncoderContext->mEncoderSource, NULL, codecFlags);
+            pEncoderContext->mClient.interface(), encoderMetadata, true,
+            pEncoderContext->mEncoderSource, "AACEncoder" /* component name */,
+            codecFlags);
     VIDEOEDITOR_CHECK(NULL != pEncoderContext->mEncoder.get(), M4ERR_STATE);
 
     // Start the graph
@@ -475,16 +482,16 @@
         buffer->release();
         *pDSI = pEncoderContext->mDSI;
     }
-    LOGV("VideoEditorAudioEncoder_open : DONE");
+    ALOGV("VideoEditorAudioEncoder_open : DONE");
 
 cleanUp:
     if( M4NO_ERROR == err ) {
-        LOGV("VideoEditorAudioEncoder_open no error");
+        ALOGV("VideoEditorAudioEncoder_open no error");
     } else {
         VideoEditorAudioEncoder_close(pEncoderContext);
-        LOGV("VideoEditorAudioEncoder_open ERROR 0x%X", err);
+        ALOGV("VideoEditorAudioEncoder_open ERROR 0x%X", err);
     }
-    LOGV("VideoEditorAudioEncoder_open end");
+    ALOGV("VideoEditorAudioEncoder_open end");
     return err;
 }
 
@@ -497,7 +504,7 @@
     MediaBuffer* buffer = NULL;
     int32_t nbBuffer = 0;
 
-    LOGV("VideoEditorAudioEncoder_processInputBuffer begin");
+    ALOGV("VideoEditorAudioEncoder_processInputBuffer begin");
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
 
     pEncoderContext = (VideoEditorAudioEncoder_Context*)pContext;
@@ -512,27 +519,27 @@
                 pInBuffer->pTableBufferSize[0]);
             break;
         default:
-            LOGV("VEAE_processInputBuffer unsupported channel configuration %d",
+            ALOGV("VEAE_processInputBuffer unsupported channel configuration %d",
                 pEncoderContext->mCodecParams->ChannelNum);
             VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_PARAMETER);
             break;
     }
 
-    LOGV("VideoEditorAudioEncoder_processInputBuffer : store %d bytes",
+    ALOGV("VideoEditorAudioEncoder_processInputBuffer : store %d bytes",
         buffer->range_length());
     // Push the buffer to the source
     nbBuffer = pEncoderContext->mEncoderSource->storeBuffer(buffer);
 
 cleanUp:
     if( M4NO_ERROR == err ) {
-        LOGV("VideoEditorAudioEncoder_processInputBuffer no error");
+        ALOGV("VideoEditorAudioEncoder_processInputBuffer no error");
     } else {
         if( NULL != buffer ) {
             buffer->release();
         }
-        LOGV("VideoEditorAudioEncoder_processInputBuffer ERROR 0x%X", err);
+        ALOGV("VideoEditorAudioEncoder_processInputBuffer ERROR 0x%X", err);
     }
-    LOGV("VideoEditorAudioEncoder_processInputBuffer end");
+    ALOGV("VideoEditorAudioEncoder_processInputBuffer end");
     return err;
 }
 
@@ -546,7 +553,7 @@
     int64_t i64Tmp = 0;
     status_t result = OK;
 
-    LOGV("VideoEditorAudioEncoder_processOutputBuffer begin");
+    ALOGV("VideoEditorAudioEncoder_processOutputBuffer begin");
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext,   M4ERR_PARAMETER);
     VIDEOEDITOR_CHECK(M4OSA_NULL != buffer,     M4ERR_PARAMETER);
     VIDEOEDITOR_CHECK(M4OSA_NULL != pOutBuffer, M4ERR_PARAMETER);
@@ -556,7 +563,7 @@
     // Process the returned AU
     if( 0 == buffer->range_length() ) {
         // Encoder has no data yet, nothing unusual
-        LOGV("VideoEditorAudioEncoder_processOutputBuffer : buffer is empty");
+        ALOGV("VideoEditorAudioEncoder_processOutputBuffer : buffer is empty");
         pOutBuffer->pTableBufferSize[0] = 0;
         goto cleanUp;
     }
@@ -587,11 +594,11 @@
     // Release the buffer
     buffer->release();
     if( M4NO_ERROR == err ) {
-        LOGV("VideoEditorAudioEncoder_processOutputBuffer no error");
+        ALOGV("VideoEditorAudioEncoder_processOutputBuffer no error");
     } else {
-        LOGV("VideoEditorAudioEncoder_processOutputBuffer ERROR 0x%X", err);
+        ALOGV("VideoEditorAudioEncoder_processOutputBuffer ERROR 0x%X", err);
     }
-    LOGV("VideoEditorAudioEncoder_processOutputBuffer end");
+    ALOGV("VideoEditorAudioEncoder_processOutputBuffer end");
     return err;
 }
 
@@ -602,7 +609,7 @@
     status_t result = OK;
     MediaBuffer* buffer = NULL;
 
-    LOGV("VideoEditorAudioEncoder_step begin");
+    ALOGV("VideoEditorAudioEncoder_step begin");
 
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext,   M4ERR_PARAMETER);
     VIDEOEDITOR_CHECK(M4OSA_NULL != pInBuffer,  M4ERR_PARAMETER);
@@ -626,11 +633,11 @@
 
 cleanUp:
     if( M4NO_ERROR == err ) {
-        LOGV("VideoEditorAudioEncoder_step no error");
+        ALOGV("VideoEditorAudioEncoder_step no error");
     } else {
-        LOGV("VideoEditorAudioEncoder_step ERROR 0x%X", err);
+        ALOGV("VideoEditorAudioEncoder_step ERROR 0x%X", err);
     }
-    LOGV("VideoEditorAudioEncoder_step end");
+    ALOGV("VideoEditorAudioEncoder_step end");
     return err;
 }
 
@@ -639,14 +646,14 @@
     M4OSA_ERR err = M4NO_ERROR;
     VideoEditorAudioEncoder_Context* pEncoderContext = M4OSA_NULL;
 
-    LOGV("VideoEditorAudioEncoder_getOption begin optionID 0x%X", optionID);
+    ALOGV("VideoEditorAudioEncoder_getOption begin optionID 0x%X", optionID);
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
 
     pEncoderContext = (VideoEditorAudioEncoder_Context*)pContext;
 
     switch( optionID ) {
         default:
-            LOGV("VideoEditorAudioEncoder_getOption: unsupported optionId 0x%X",
+            ALOGV("VideoEditorAudioEncoder_getOption: unsupported optionId 0x%X",
                 optionID);
             VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_BAD_OPTION_ID);
             break;
@@ -654,11 +661,11 @@
 
 cleanUp:
     if( M4NO_ERROR == err ) {
-        LOGV("VideoEditorAudioEncoder_getOption no error");
+        ALOGV("VideoEditorAudioEncoder_getOption no error");
     } else {
-        LOGV("VideoEditorAudioEncoder_getOption ERROR 0x%X", err);
+        ALOGV("VideoEditorAudioEncoder_getOption ERROR 0x%X", err);
     }
-    LOGV("VideoEditorAudioEncoder_getOption end");
+    ALOGV("VideoEditorAudioEncoder_getOption end");
     return err;
 }
 
@@ -671,7 +678,7 @@
     VIDEOEDITOR_CHECK(M4OSA_NULL != pFormat,           M4ERR_PARAMETER);
     VIDEOEDITOR_CHECK(M4OSA_NULL != pEncoderInterface, M4ERR_PARAMETER);
 
-    LOGV("VideoEditorAudioEncoder_getInterface 0x%x 0x%x",pFormat,
+    ALOGV("VideoEditorAudioEncoder_getInterface 0x%x 0x%x",pFormat,
         pEncoderInterface);
     SAFE_MALLOC(*pEncoderInterface, M4ENCODER_AudioGlobalInterface, 1,
         "AudioEncoder");
@@ -696,7 +703,7 @@
         }
         default:
         {
-            LOGV("VideoEditorAudioEncoder_getInterface: unsupported format %d",
+            ALOGV("VideoEditorAudioEncoder_getInterface: unsupported format %d",
                 format);
             VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_PARAMETER);
         break;
@@ -710,10 +717,10 @@
 
 cleanUp:
     if( M4NO_ERROR == err ) {
-        LOGV("VideoEditorAudioEncoder_getInterface no error");
+        ALOGV("VideoEditorAudioEncoder_getInterface no error");
     } else {
         *pEncoderInterface = M4OSA_NULL;
-        LOGV("VideoEditorAudioEncoder_getInterface ERROR 0x%X", err);
+        ALOGV("VideoEditorAudioEncoder_getInterface ERROR 0x%X", err);
     }
     return err;
 }
@@ -737,7 +744,7 @@
 M4OSA_ERR VideoEditorAudioEncoder_getInterface_MP3(
         M4ENCODER_AudioFormat* pFormat,
         M4ENCODER_AudioGlobalInterface** pEncoderInterface) {
-    LOGV("VideoEditorAudioEncoder_getInterface_MP3 no error");
+    ALOGV("VideoEditorAudioEncoder_getInterface_MP3 no error");
 
     return VideoEditorAudioEncoder_getInterface(
         M4ENCODER_kMP3, pFormat, pEncoderInterface);
diff --git a/libvideoeditor/vss/stagefrightshells/src/VideoEditorBuffer.c b/libvideoeditor/vss/stagefrightshells/src/VideoEditorBuffer.c
index edd1e7c..98919d2 100755
--- a/libvideoeditor/vss/stagefrightshells/src/VideoEditorBuffer.c
+++ b/libvideoeditor/vss/stagefrightshells/src/VideoEditorBuffer.c
@@ -54,7 +54,7 @@
     M4OSA_ERR lerr = M4NO_ERROR;
     VIDEOEDITOR_BUFFER_Pool* pool;
 
-    LOGV("VIDEOEDITOR_BUFFER_allocatePool : ppool = 0x%x nbBuffers = %d ",
+    ALOGV("VIDEOEDITOR_BUFFER_allocatePool : ppool = 0x%x nbBuffers = %d ",
         ppool, nbBuffers);
 
     pool = M4OSA_NULL;
@@ -67,7 +67,7 @@
         goto VIDEOEDITOR_BUFFER_allocatePool_Cleanup;
     }
 
-    LOGV("VIDEOEDITOR_BUFFER_allocatePool : Allocating Pool buffers");
+    ALOGV("VIDEOEDITOR_BUFFER_allocatePool : Allocating Pool buffers");
     pool->pNXPBuffer = M4OSA_NULL;
     pool->pNXPBuffer = (VIDEOEDITOR_BUFFER_Buffer*)M4OSA_32bitAlignedMalloc(
                             sizeof(VIDEOEDITOR_BUFFER_Buffer)*nbBuffers,
@@ -79,7 +79,7 @@
         goto VIDEOEDITOR_BUFFER_allocatePool_Cleanup;
     }
 
-    LOGV("VIDEOEDITOR_BUFFER_allocatePool : Allocating Pool name buffer");
+    ALOGV("VIDEOEDITOR_BUFFER_allocatePool : Allocating Pool name buffer");
     pool->poolName = M4OSA_NULL;
     pool->poolName = (M4OSA_Char*)M4OSA_32bitAlignedMalloc(
         VIDEOEDITOR_BUFFEPOOL_MAX_NAME_SIZE,VIDEOEDITOR_BUFFER_EXTERNAL,
@@ -90,7 +90,7 @@
         goto VIDEOEDITOR_BUFFER_allocatePool_Cleanup;
     }
 
-    LOGV("VIDEOEDITOR_BUFFER_allocatePool : Assigning Pool name buffer");
+    ALOGV("VIDEOEDITOR_BUFFER_allocatePool : Assigning Pool name buffer");
 
     memset((void *)pool->poolName, 0,VIDEOEDITOR_BUFFEPOOL_MAX_NAME_SIZE);
     memcpy((void *)pool->poolName, (void *)poolName,
@@ -106,7 +106,7 @@
         VIDEOEDITOR_SAFE_FREE(pool);
     }
     *ppool = pool;
-    LOGV("VIDEOEDITOR_BUFFER_allocatePool END");
+    ALOGV("VIDEOEDITOR_BUFFER_allocatePool END");
 
     return lerr;
 }
@@ -125,7 +125,7 @@
     M4OSA_ERR err;
     M4OSA_UInt32  j = 0;
 
-    LOGV("VIDEOEDITOR_BUFFER_freePool : ppool = 0x%x", ppool);
+    ALOGV("VIDEOEDITOR_BUFFER_freePool : ppool = 0x%x", ppool);
 
     err = M4NO_ERROR;
 
@@ -168,7 +168,7 @@
     M4OSA_Bool bFound = M4OSA_FALSE;
     M4OSA_UInt32 i, ibuf;
 
-    LOGV("VIDEOEDITOR_BUFFER_getBuffer from %s in state=%d",
+    ALOGV("VIDEOEDITOR_BUFFER_getBuffer from %s in state=%d",
         ppool->poolName, desiredState);
 
     ibuf = 0;
@@ -185,7 +185,7 @@
 
     if(!bFound)
     {
-        LOGV("VIDEOEDITOR_BUFFER_getBuffer No buffer available in state %d",
+        ALOGV("VIDEOEDITOR_BUFFER_getBuffer No buffer available in state %d",
             desiredState);
         *pNXPBuffer = M4OSA_NULL;
         return M4ERR_NO_BUFFER_AVAILABLE;
@@ -194,7 +194,7 @@
     /* case where a buffer has been found */
     *pNXPBuffer = &(ppool->pNXPBuffer[ibuf]);
 
-    LOGV("VIDEOEDITOR_BUFFER_getBuffer: idx = %d", ibuf);
+    ALOGV("VIDEOEDITOR_BUFFER_getBuffer: idx = %d", ibuf);
 
     return(err);
 }
@@ -258,7 +258,7 @@
     }
     if(M4OSA_FALSE == bFound)
     {
-        LOGV("VIDEOEDITOR_BUFFER_getOldestBuffer WARNING no buffer available");
+        ALOGV("VIDEOEDITOR_BUFFER_getOldestBuffer WARNING no buffer available");
         err = M4ERR_NO_BUFFER_AVAILABLE;
     }
     return err;
diff --git a/libvideoeditor/vss/stagefrightshells/src/VideoEditorMp3Reader.cpp b/libvideoeditor/vss/stagefrightshells/src/VideoEditorMp3Reader.cpp
index e9e23a3..af53c54 100755
--- a/libvideoeditor/vss/stagefrightshells/src/VideoEditorMp3Reader.cpp
+++ b/libvideoeditor/vss/stagefrightshells/src/VideoEditorMp3Reader.cpp
@@ -32,13 +32,13 @@
 #include "VideoEditorUtils.h"
 
 #include "utils/Log.h"
+#include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/MediaBufferGroup.h>
 #include <media/stagefright/DataSource.h>
 #include <media/stagefright/FileSource.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaExtractor.h>
-#include <media/stagefright/MediaDebug.h>
 #include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MetaData.h>
 
@@ -88,7 +88,7 @@
 
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
 
-    LOGV("VideoEditorMp3Reader_create begin");
+    ALOGV("VideoEditorMp3Reader_create begin");
 
     /* Context allocation & initialization */
     SAFE_MALLOC(pReaderContext, VideoEditorMp3Reader_Context, 1,
@@ -101,11 +101,11 @@
 
 cleanUp:
     if (M4NO_ERROR == err) {
-        LOGV("VideoEditorMp3Reader_create no error");
+        ALOGV("VideoEditorMp3Reader_create no error");
     } else {
-        LOGV("VideoEditorMp3Reader_create ERROR 0x%X", err);
+        ALOGV("VideoEditorMp3Reader_create ERROR 0x%X", err);
     }
-    LOGV("VideoEditorMp3Reader_create end");
+    ALOGV("VideoEditorMp3Reader_create end");
     return err;
 }
 
@@ -124,16 +124,16 @@
         (VideoEditorMp3Reader_Context*)pContext;
 
     VIDEOEDITOR_CHECK(M4OSA_NULL != pReaderContext, M4ERR_PARAMETER);
-    LOGV("VideoEditorMp3Reader_destroy begin");
+    ALOGV("VideoEditorMp3Reader_destroy begin");
 
     SAFE_FREE(pReaderContext);
 cleanUp:
     if (M4NO_ERROR == err) {
-        LOGV("VideoEditorMp3Reader_destroy no error");
+        ALOGV("VideoEditorMp3Reader_destroy no error");
     } else {
-        LOGV("VideoEditorMp3Reader_destroy ERROR 0x%X", err);
+        ALOGV("VideoEditorMp3Reader_destroy ERROR 0x%X", err);
     }
-    LOGV("VideoEditorMp3Reader_destroy end");
+    ALOGV("VideoEditorMp3Reader_destroy end");
     return err;
 }
 /**
@@ -156,30 +156,30 @@
     (VideoEditorMp3Reader_Context*)context;
     M4OSA_ERR err = M4NO_ERROR;
 
-    LOGV("VideoEditorMp3Reader_open begin");
+    ALOGV("VideoEditorMp3Reader_open begin");
     /* Check function parameters*/
     M4OSA_DEBUG_IF1((M4OSA_NULL == pReaderContext),  M4ERR_PARAMETER,
         "VideoEditorMp3Reader_open: invalid context pointer");
     M4OSA_DEBUG_IF1((M4OSA_NULL == pFileDescriptor), M4ERR_PARAMETER,
         "VideoEditorMp3Reader_open: invalid pointer pFileDescriptor");
 
-    LOGV("VideoEditorMp3Reader_open Datasource start %s",
+    ALOGV("VideoEditorMp3Reader_open Datasource start %s",
         (char*)pFileDescriptor);
     pReaderContext->mDataSource = new FileSource ((char*)pFileDescriptor);
-    LOGV("VideoEditorMp3Reader_open Datasource end");
+    ALOGV("VideoEditorMp3Reader_open Datasource end");
 
     if (pReaderContext->mDataSource == NULL) {
-        LOGV("VideoEditorMp3Reader_open Datasource error");
+        ALOGV("VideoEditorMp3Reader_open Datasource error");
         return UNKNOWN_ERROR;
     }
 
-    LOGV("VideoEditorMp3Reader_open extractor start");
+    ALOGV("VideoEditorMp3Reader_open extractor start");
     pReaderContext->mExtractor = MediaExtractor::Create(
         pReaderContext->mDataSource,MEDIA_MIMETYPE_AUDIO_MPEG);
-    LOGV("VideoEditorMp3Reader_open extractor end");
+    ALOGV("VideoEditorMp3Reader_open extractor end");
 
     if (pReaderContext->mExtractor == NULL)    {
-        LOGV("VideoEditorMp3Reader_open extractor error");
+        ALOGV("VideoEditorMp3Reader_open extractor error");
         return UNKNOWN_ERROR;
     }
     pReaderContext->mStreamNumber = 0;
@@ -188,11 +188,11 @@
     sp<MetaData> meta = pReaderContext->mExtractor->getMetaData();
     meta->findInt32(kKeyIsDRM, &isDRMProtected);
     if (isDRMProtected) {
-        LOGV("VideoEditorMp3Reader_open error - DRM Protected");
+        ALOGV("VideoEditorMp3Reader_open error - DRM Protected");
         return M4ERR_UNSUPPORTED_MEDIA_TYPE;
     }
 
-    LOGV("VideoEditorMp3Reader_open end");
+    ALOGV("VideoEditorMp3Reader_open end");
     return err;
 }
 /**
@@ -209,7 +209,7 @@
         (VideoEditorMp3Reader_Context*)context;
     M4OSA_ERR err = M4NO_ERROR;
 
-    LOGV("VideoEditorMp3Reader_close begin");
+    ALOGV("VideoEditorMp3Reader_close begin");
     /* Check function parameters */
     M4OSA_DEBUG_IF1((M4OSA_NULL == pReaderContext), M4ERR_PARAMETER,
             "VideoEditorMp3Reader_close: invalid context pointer");
@@ -240,7 +240,7 @@
     pReaderContext->mExtractor.clear();
     pReaderContext->mDataSource.clear();
 
-    LOGV("VideoEditorMp3Reader_close end ");
+    ALOGV("VideoEditorMp3Reader_close end ");
     return err;
 }
 /**
@@ -265,7 +265,7 @@
         (VideoEditorMp3Reader_Context*)context;
     M4OSA_ERR err = M4NO_ERROR;
 
-    LOGV("VideoEditorMp3Reader_getOption begin: optionId= %d ",(int)optionId);
+    ALOGV("VideoEditorMp3Reader_getOption begin: optionId= %d ",(int)optionId);
 
     M4OSA_DEBUG_IF1((M4OSA_NULL == pReaderContext), M4ERR_PARAMETER,
         "invalid value pointer");
@@ -275,7 +275,7 @@
     switch(optionId) {
     case M4READER_kOptionID_Duration:
         {
-            LOGV("Mp3Reader duration=%ld",pReaderContext->mMaxDuration);
+            ALOGV("Mp3Reader duration=%ld",pReaderContext->mMaxDuration);
             *(M4OSA_Time*)pValue = pReaderContext->mMaxDuration;
         }
         break;
@@ -304,11 +304,11 @@
 
     default :
         {
-            LOGV("VideoEditorMp3Reader_getOption:  M4ERR_BAD_OPTION_ID");
+            ALOGV("VideoEditorMp3Reader_getOption:  M4ERR_BAD_OPTION_ID");
             err = M4ERR_BAD_OPTION_ID;
         }
     }
-    LOGV("VideoEditorMp3Reader_getOption end ");
+    ALOGV("VideoEditorMp3Reader_getOption end ");
     return err;
 }
 /**
@@ -334,7 +334,7 @@
         (VideoEditorMp3Reader_Context*)context;
     M4OSA_ERR err = M4NO_ERROR;
 
-    LOGV("VideoEditorMp3Reader_Context begin: optionId: %d Value: %d ",
+    ALOGV("VideoEditorMp3Reader_Context begin: optionId: %d Value: %d ",
         (int)optionId,(int)pValue);
 
     M4OSA_DEBUG_IF1((M4OSA_NULL == pReaderContext), M4ERR_PARAMETER,
@@ -349,7 +349,7 @@
             err = M4NO_ERROR;
         }
     }
-    LOGV("VideoEditorMp3Reader_Context end ");
+    ALOGV("VideoEditorMp3Reader_Context end ");
     return err;
 }
 /**
@@ -373,7 +373,7 @@
     M4SYS_AccessUnit* pAu;
     M4OSA_Time time64 = (M4OSA_Time)*pTime;
 
-    LOGV("VideoEditorMp3Reader_jump begin");
+    ALOGV("VideoEditorMp3Reader_jump begin");
     M4OSA_DEBUG_IF1((pReaderContext == 0), M4ERR_PARAMETER,
         "VideoEditorMp3Reader_jump: invalid context");
     M4OSA_DEBUG_IF1((pStreamHandler == 0), M4ERR_PARAMETER,
@@ -385,27 +385,27 @@
         mAudioStreamHandler){
         pAu = &pReaderContext->mAudioAu;
     } else {
-        LOGV("VideoEditorMp3Reader_jump: passed StreamHandler is not known");
+        ALOGV("VideoEditorMp3Reader_jump: passed StreamHandler is not known");
         return M4ERR_PARAMETER;
     }
 
     streamIdArray[0] = pStreamHandler->m_streamId;
     streamIdArray[1] = 0;
 
-    LOGV("VideoEditorMp3Reader_jump time ms %ld ", time64);
+    ALOGV("VideoEditorMp3Reader_jump time ms %ld ", time64);
 
     pAu->CTS = time64;
     pAu->DTS = time64;
 
     time64 = time64 * 1000; /* Convert the time into micro sec */
-    LOGV("VideoEditorMp3Reader_jump time us %ld ", time64);
+    ALOGV("VideoEditorMp3Reader_jump time us %ld ", time64);
 
     pReaderContext->mSeeking = M4OSA_TRUE;
     pReaderContext->mSeekTime = time64;
 
     time64 = time64 / 1000; /* Convert the time into milli sec */
     *pTime = (M4OSA_Int32)time64;
-    LOGV("VideoEditorMp3Reader_jump end ");
+    ALOGV("VideoEditorMp3Reader_jump end ");
     return err;
 }
 /**
@@ -439,7 +439,7 @@
     sp<MetaData> meta = NULL;
     int64_t Duration;
 
-    LOGV("VideoEditorMp3Reader_getNextStream begin");
+    ALOGV("VideoEditorMp3Reader_getNextStream begin");
     M4OSA_DEBUG_IF1((pReaderContext == 0),      M4ERR_PARAMETER,
         "VideoEditorMp3Reader_getNextStream: invalid context");
     M4OSA_DEBUG_IF1((pMediaFamily == 0),        M4ERR_PARAMETER,
@@ -447,14 +447,14 @@
     M4OSA_DEBUG_IF1((pStreamHandlerParam == 0), M4ERR_PARAMETER,
         "VideoEditorMp3Reader_getNextStream: invalid pointer to StreamHandler");
 
-    LOGV("VideoEditorMp3Reader_getNextStream stream number = %d",
+    ALOGV("VideoEditorMp3Reader_getNextStream stream number = %d",
         pReaderContext->mStreamNumber);
     if (pReaderContext->mStreamNumber >= 1) {
-        LOGV("VideoEditorMp3Reader_getNextStream max number of stream reached");
+        ALOGV("VideoEditorMp3Reader_getNextStream max number of stream reached");
         return M4WAR_NO_MORE_STREAM;
     }
     pReaderContext->mStreamNumber = pReaderContext->mStreamNumber + 1;
-    LOGV("VideoEditorMp3Reader_getNextStream number of Tracks%d",
+    ALOGV("VideoEditorMp3Reader_getNextStream number of Tracks%d",
         pReaderContext->mExtractor->countTracks());
     for (temp = 0; temp < pReaderContext->mExtractor->countTracks(); temp++) {
         meta = pReaderContext->mExtractor->getTrackMetaData(temp);
@@ -474,7 +474,7 @@
     }
 
     if (!haveAudio) {
-        LOGV("VideoEditorMp3Reader_getNextStream no more stream ");
+        ALOGV("VideoEditorMp3Reader_getNextStream no more stream ");
         pReaderContext->mDataSource.clear();
         return M4WAR_NO_MORE_STREAM;
     }
@@ -487,7 +487,7 @@
 
     meta->findInt32(kKeyBitRate, (int32_t*)&streamDesc.averageBitrate);
     meta->findInt32(kKeySampleRate, (int32_t*)&streamDesc.timeScale);
-    LOGV("Bitrate = %d, SampleRate = %d duration = %lld",
+    ALOGV("Bitrate = %d, SampleRate = %d duration = %lld",
         streamDesc.averageBitrate,streamDesc.timeScale,Duration/1000);
 
     streamDesc.streamType = M4SYS_kMP3;
@@ -503,7 +503,7 @@
         (M4OSA_Char*)"M4_AudioStreamHandler");
 
     if (pAudioStreamHandler == M4OSA_NULL) {
-        LOGV("VideoEditorMp3Reader_getNextStream malloc failed");
+        ALOGV("VideoEditorMp3Reader_getNextStream malloc failed");
         pReaderContext->mMediaSource->stop();
         pReaderContext->mMediaSource.clear();
         pReaderContext->mDataSource.clear();
@@ -517,7 +517,7 @@
     pAudioStreamHandler->m_structSize = sizeof(M4_AudioStreamHandler);
 
     if (meta == NULL) {
-        LOGV("VideoEditorMp3Reader_getNextStream meta is NULL");
+        ALOGV("VideoEditorMp3Reader_getNextStream meta is NULL");
     }
 
     pAudioStreamHandler->m_samplingFrequency = streamDesc.timeScale;
@@ -540,7 +540,7 @@
     pStreamHandler->m_maxAUSize = 0;
     pStreamHandler->m_streamType = M4DA_StreamTypeAudioMp3;
 
-    LOGV("VideoEditorMp3Reader_getNextStream end ");
+    ALOGV("VideoEditorMp3Reader_getNextStream end ");
     return err;
 }
 
@@ -569,12 +569,12 @@
     M4OSA_DEBUG_IF1((pAccessUnit == 0),    M4ERR_PARAMETER,
         "VideoEditorMp3Reader_fillAuStruct: invalid pointer to M4_AccessUnit");
 
-    LOGV("VideoEditorMp3Reader_fillAuStruct start ");
+    ALOGV("VideoEditorMp3Reader_fillAuStruct start ");
     if(pStreamHandler == (M4_StreamHandler*)pReaderContext->\
         mAudioStreamHandler){
         pAu = &pReaderContext->mAudioAu;
     } else {
-        LOGV("VideoEditorMp3Reader_fillAuStruct StreamHandler is not known");
+        ALOGV("VideoEditorMp3Reader_fillAuStruct StreamHandler is not known");
         return M4ERR_PARAMETER;
     }
 
@@ -596,7 +596,7 @@
     pAccessUnit->m_streamID     = pStreamHandler->m_streamId;
     pAccessUnit->m_structSize   = sizeof(M4_AccessUnit);
 
-    LOGV("VideoEditorMp3Reader_fillAuStruct end");
+    ALOGV("VideoEditorMp3Reader_fillAuStruct end");
     return M4NO_ERROR;
 }
 
@@ -620,7 +620,7 @@
     M4SYS_AccessUnit* pAu;
     M4OSA_Time time64 = 0;
 
-    LOGV("VideoEditorMp3Reader_reset start");
+    ALOGV("VideoEditorMp3Reader_reset start");
     M4OSA_DEBUG_IF1((pReaderContext == 0), M4ERR_PARAMETER,
         "VideoEditorMp3Reader_reset: invalid context");
     M4OSA_DEBUG_IF1((pStreamHandler == 0), M4ERR_PARAMETER,
@@ -630,7 +630,7 @@
         mAudioStreamHandler) {
         pAu = &pReaderContext->mAudioAu;
     } else {
-        LOGV("VideoEditorMp3Reader_reset StreamHandler is not known");
+        ALOGV("VideoEditorMp3Reader_reset StreamHandler is not known");
         return M4ERR_PARAMETER;
     }
     streamIdArray[0] = pStreamHandler->m_streamId;
@@ -641,7 +641,7 @@
     pReaderContext->mSeeking = M4OSA_TRUE;
     pReaderContext->mSeekTime = time64;
 
-    LOGV("VideoEditorMp3Reader_reset end");
+    ALOGV("VideoEditorMp3Reader_reset end");
     return err;
 }
 /**
@@ -667,7 +667,7 @@
     MediaBuffer *mAudioBuffer;
     MediaSource::ReadOptions options;
 
-    LOGV("VideoEditorMp3Reader_getNextAu start");
+    ALOGV("VideoEditorMp3Reader_getNextAu start");
     M4OSA_DEBUG_IF1((pReaderContext == 0), M4ERR_PARAMETER,
         "VideoEditorMp3Reader_getNextAu: invalid context");
     M4OSA_DEBUG_IF1((pStreamHandler == 0), M4ERR_PARAMETER,
@@ -679,7 +679,7 @@
         mAudioStreamHandler) {
         pAu = &pReaderContext->mAudioAu;
     } else {
-        LOGV("VideoEditorMp3Reader_getNextAu: StreamHandler is not known\n");
+        ALOGV("VideoEditorMp3Reader_getNextAu: StreamHandler is not known\n");
         return M4ERR_PARAMETER;
     }
 
@@ -701,7 +701,7 @@
                 M4READER_MP3, (M4OSA_Char*)"pAccessUnit->m_dataAddress" );
 
             if (pAu->dataAddress == NULL) {
-                LOGV("VideoEditorMp3Reader_getNextAu malloc failed");
+                ALOGV("VideoEditorMp3Reader_getNextAu malloc failed");
                 pReaderContext->mMediaSource->stop();
                 pReaderContext->mMediaSource.clear();
                 pReaderContext->mDataSource.clear();
@@ -722,7 +722,7 @@
         pAu->attribute = M4SYS_kFragAttrOk;
         mAudioBuffer->release();
 
-        LOGV("VideoEditorMp3Reader_getNextAu AU CTS = %ld",pAu->CTS);
+        ALOGV("VideoEditorMp3Reader_getNextAu AU CTS = %ld",pAu->CTS);
 
         pAccessUnit->m_dataAddress = (M4OSA_Int8*) pAu->dataAddress;
         pAccessUnit->m_size = pAu->size;
@@ -730,7 +730,7 @@
         pAccessUnit->m_DTS = pAu->DTS;
         pAccessUnit->m_attribute = pAu->attribute;
     } else {
-        LOGV("VideoEditorMp3Reader_getNextAu EOS reached.");
+        ALOGV("VideoEditorMp3Reader_getNextAu EOS reached.");
         pAccessUnit->m_size=0;
         err = M4WAR_NO_MORE_AU;
     }
@@ -739,7 +739,7 @@
     options.clearSeekTo();
     pReaderContext->mSeeking = M4OSA_FALSE;
     mAudioBuffer = NULL;
-    LOGV("VideoEditorMp3Reader_getNextAu end");
+    ALOGV("VideoEditorMp3Reader_getNextAu end");
 
     return err;
 }
@@ -752,7 +752,7 @@
         M4READER_DataInterface **pRdrDataInterface) {
     M4OSA_ERR err = M4NO_ERROR;
 
-    LOGV("VideoEditorMp3Reader_getInterface: begin");
+    ALOGV("VideoEditorMp3Reader_getInterface: begin");
     /* Input parameters check */
     VIDEOEDITOR_CHECK(M4OSA_NULL != pMediaType,      M4ERR_PARAMETER);
     VIDEOEDITOR_CHECK(M4OSA_NULL != pRdrGlobalInterface, M4ERR_PARAMETER);
@@ -787,16 +787,16 @@
 cleanUp:
     if( M4NO_ERROR == err )
     {
-        LOGV("VideoEditorMp3Reader_getInterface no error");
+        ALOGV("VideoEditorMp3Reader_getInterface no error");
     }
     else
     {
         SAFE_FREE(*pRdrGlobalInterface);
         SAFE_FREE(*pRdrDataInterface);
 
-        LOGV("VideoEditorMp3Reader_getInterface ERROR 0x%X", err);
+        ALOGV("VideoEditorMp3Reader_getInterface ERROR 0x%X", err);
     }
-    LOGV("VideoEditorMp3Reader_getInterface: end");
+    ALOGV("VideoEditorMp3Reader_getInterface: end");
     return err;
 }
 }  /* extern "C" */
diff --git a/libvideoeditor/vss/stagefrightshells/src/VideoEditorUtils.cpp b/libvideoeditor/vss/stagefrightshells/src/VideoEditorUtils.cpp
index 70d864b..5309bd4 100755
--- a/libvideoeditor/vss/stagefrightshells/src/VideoEditorUtils.cpp
+++ b/libvideoeditor/vss/stagefrightshells/src/VideoEditorUtils.cpp
@@ -26,8 +26,8 @@
 
 #include "VideoEditorUtils.h"
 
+#include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaDebug.h>
 #include <media/stagefright/MediaExtractor.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MetaData.h>
@@ -40,9 +40,9 @@
 /*---------------------*/
 /*  DEBUG LEVEL SETUP  */
 /*---------------------*/
-#define LOG1 LOGE    /*ERRORS Logging*/
-#define LOG2 LOGI    /*WARNING Logging*/
-#define LOG3 //LOGV  /*COMMENTS Logging*/
+#define LOG1 ALOGE    /*ERRORS Logging*/
+#define LOG2 ALOGI    /*WARNING Logging*/
+#define LOG3 //ALOGV  /*COMMENTS Logging*/
 
 namespace android {
 
@@ -232,14 +232,14 @@
     const uint8_t *nextStartCode = &data[length - bytesLeft];
     *paramSetLen = nextStartCode - data;
     if (*paramSetLen == 0) {
-        LOGE("Param set is malformed, since its length is 0");
+        ALOGE("Param set is malformed, since its length is 0");
         return NULL;
     }
 
     AVCParamSet paramSet(*paramSetLen, data);
     if (type == kNalUnitTypeSeqParamSet) {
         if (*paramSetLen < 4) {
-            LOGE("Seq parameter set malformed");
+            ALOGE("Seq parameter set malformed");
             return NULL;
         }
         if (pC->mSeqParamSets.empty()) {
@@ -250,7 +250,7 @@
             if (pC->mProfileIdc != data[1] ||
                 pC->mProfileCompatible != data[2] ||
                 pC->mLevelIdc != data[3]) {
-                LOGV("Inconsistent profile/level found in seq parameter sets");
+                ALOGV("Inconsistent profile/level found in seq parameter sets");
                 return NULL;
             }
         }
@@ -264,20 +264,20 @@
 status_t buildAVCCodecSpecificData(uint8_t **pOutputData, size_t *pOutputSize,
         const uint8_t *data, size_t size, MetaData *param)
 {
-    //LOGV("buildAVCCodecSpecificData");
+    //ALOGV("buildAVCCodecSpecificData");
 
     if ( (pOutputData == NULL) || (pOutputSize == NULL) ) {
-        LOGE("output is invalid");
+        ALOGE("output is invalid");
         return ERROR_MALFORMED;
     }
 
     if (*pOutputData != NULL) {
-        LOGE("Already have codec specific data");
+        ALOGE("Already have codec specific data");
         return ERROR_MALFORMED;
     }
 
     if (size < 4) {
-        LOGE("Codec specific data length too short: %d", size);
+        ALOGE("Codec specific data length too short: %d", size);
         return ERROR_MALFORMED;
     }
 
@@ -286,7 +286,7 @@
         // 2 bytes for each of the parameter set length field
         // plus the 7 bytes for the header
         if (size < 4 + 7) {
-            LOGE("Codec specific data length too short: %d", size);
+            ALOGE("Codec specific data length too short: %d", size);
             return ERROR_MALFORMED;
         }
 
@@ -313,7 +313,7 @@
         type = (*(tmp + 4)) & 0x1F;
         if (type == kNalUnitTypeSeqParamSet) {
             if (gotPps) {
-                LOGE("SPS must come before PPS");
+                ALOGE("SPS must come before PPS");
                 return ERROR_MALFORMED;
             }
             if (!gotSps) {
@@ -323,7 +323,7 @@
                 &paramSetLen);
         } else if (type == kNalUnitTypePicParamSet) {
             if (!gotSps) {
-                LOGE("SPS must come before PPS");
+                ALOGE("SPS must come before PPS");
                 return ERROR_MALFORMED;
             }
             if (!gotPps) {
@@ -332,7 +332,7 @@
             nextStartCode = parseParamSet(&ctx, tmp + 4, bytesLeft - 4, type,
                 &paramSetLen);
         } else {
-            LOGE("Only SPS and PPS Nal units are expected");
+            ALOGE("Only SPS and PPS Nal units are expected");
             return ERROR_MALFORMED;
         }
 
@@ -350,12 +350,12 @@
         // Check on the number of seq parameter sets
         size_t nSeqParamSets = ctx.mSeqParamSets.size();
         if (nSeqParamSets == 0) {
-            LOGE("Cound not find sequence parameter set");
+            ALOGE("Cound not find sequence parameter set");
             return ERROR_MALFORMED;
         }
 
         if (nSeqParamSets > 0x1F) {
-            LOGE("Too many seq parameter sets (%d) found", nSeqParamSets);
+            ALOGE("Too many seq parameter sets (%d) found", nSeqParamSets);
             return ERROR_MALFORMED;
         }
     }
@@ -364,11 +364,11 @@
         // Check on the number of pic parameter sets
         size_t nPicParamSets = ctx.mPicParamSets.size();
         if (nPicParamSets == 0) {
-            LOGE("Cound not find picture parameter set");
+            ALOGE("Cound not find picture parameter set");
             return ERROR_MALFORMED;
         }
         if (nPicParamSets > 0xFF) {
-            LOGE("Too many pic parameter sets (%d) found", nPicParamSets);
+            ALOGE("Too many pic parameter sets (%d) found", nPicParamSets);
             return ERROR_MALFORMED;
         }
     }
@@ -402,7 +402,7 @@
         uint16_t seqParamSetLength = it->mLength;
         header[0] = seqParamSetLength >> 8;
         header[1] = seqParamSetLength & 0xff;
-        //LOGE("### SPS %d %d %d", seqParamSetLength, header[0], header[1]);
+        //ALOGE("### SPS %d %d %d", seqParamSetLength, header[0], header[1]);
 
         // SPS NAL unit (sequence parameter length bytes)
         memcpy(&header[2], it->mData, seqParamSetLength);
@@ -419,7 +419,7 @@
         uint16_t picParamSetLength = it->mLength;
         header[0] = picParamSetLength >> 8;
         header[1] = picParamSetLength & 0xff;
-//LOGE("### PPS %d %d %d", picParamSetLength, header[0], header[1]);
+//ALOGE("### PPS %d %d %d", picParamSetLength, header[0], header[1]);
 
         // PPS Nal unit (picture parameter set length bytes)
         memcpy(&header[2], it->mData, picParamSetLength);
diff --git a/libvideoeditor/vss/stagefrightshells/src/VideoEditorVideoDecoder.cpp b/libvideoeditor/vss/stagefrightshells/src/VideoEditorVideoDecoder.cpp
index 82e149b..aa26252 100755
--- a/libvideoeditor/vss/stagefrightshells/src/VideoEditorVideoDecoder.cpp
+++ b/libvideoeditor/vss/stagefrightshells/src/VideoEditorVideoDecoder.cpp
@@ -29,9 +29,9 @@
 #include "VideoEditorUtils.h"
 #include "M4VD_Tools.h"
 
+#include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MediaDebug.h>
 /********************
  *   DEFINITIONS    *
  ********************/
@@ -100,13 +100,13 @@
 
     if (!mStarted) {
         if (mFormat->findInt32(kKeyMaxInputSize, &mMaxAUSize) == false) {
-            LOGE("Could not find kKeyMaxInputSize");
+            ALOGE("Could not find kKeyMaxInputSize");
             return ERROR_MALFORMED;
         }
 
         mGroup = new MediaBufferGroup;
         if (mGroup == NULL) {
-            LOGE("FATAL: memory limitation ! ");
+            ALOGE("FATAL: memory limitation ! ");
             return NO_MEMORY;
         }
 
@@ -124,7 +124,7 @@
             // FIXME:
             // Why do we need to check on the ref count?
             int ref_count = mBuffer->refcount();
-            LOGV("MediaBuffer refcount is %d",ref_count);
+            ALOGV("MediaBuffer refcount is %d",ref_count);
             for (int i = 0; i < ref_count; ++i) {
                 mBuffer->release();
             }
@@ -153,7 +153,7 @@
         MediaSource::ReadOptions::SeekMode mode;
         options->getSeekTo(&time_us, &mode);
         if (mode != MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC) {
-            LOGE("Unexpected read options");
+            ALOGE("Unexpected read options");
             return BAD_VALUE;
         }
 
@@ -171,7 +171,7 @@
             rapTime -= 40000;
             if(rapTime < 0) rapTime = 0;
         } else if (err != OK) {
-            LOGE("get rap time error = 0x%x\n", (uint32_t)err);
+            ALOGE("get rap time error = 0x%x\n", (uint32_t)err);
             return UNKNOWN_ERROR;
         }
 
@@ -181,7 +181,7 @@
                    &rapTime);
 
         if (err != OK) {
-            LOGE("jump err = 0x%x\n", (uint32_t)err);
+            ALOGE("jump err = 0x%x\n", (uint32_t)err);
             return BAD_VALUE;
         }
     }
@@ -642,13 +642,13 @@
     VideoProfileLevel *pProfileLevel = NULL;
     pDecoder = decoders->decoder;
     for (size_t i = 0; i< decoders->decoderNumber; i++) {
-        LOGV("Supported Codec[%d] :%d", i, pDecoder->codec);
+        ALOGV("Supported Codec[%d] :%d", i, pDecoder->codec);
         pOmxComponents = pDecoder->component;
         for(size_t j = 0; j <  pDecoder->componentNumber; j++) {
            pProfileLevel = pOmxComponents->profileLevel;
-           LOGV("-->component %d", j);
+           ALOGV("-->component %d", j);
            for(size_t k = 0; k < pOmxComponents->profileNumber; k++) {
-               LOGV("-->profile:%ld maxLevel:%ld", pProfileLevel->mProfile,
+               ALOGV("-->profile:%ld maxLevel:%ld", pProfileLevel->mProfile,
                    pProfileLevel->mLevel);
                pProfileLevel++;
            }
@@ -691,17 +691,17 @@
             if (results.size()) {
                 SAFE_MALLOC(pOmxComponents, VideoComponentCapabilities,
                     results.size(), "VideoComponentCapabilities");
-                LOGV("K=%d",k);
+                ALOGV("K=%d",k);
                 pDecoder->component = pOmxComponents;
                 pDecoder->componentNumber = results.size();
             }
 
             for (size_t i = 0; i < results.size(); ++i) {
-                LOGV("  decoder '%s' supports ",
+                ALOGV("  decoder '%s' supports ",
                        results[i].mComponentName.string());
 
                 if (results[i].mProfileLevels.size() == 0) {
-                    LOGV("NOTHING.\n");
+                    ALOGV("NOTHING.\n");
                     continue;
                 }
 
@@ -710,7 +710,7 @@
                 // We should ignore the software codecs and make IsSoftwareCodec()
                 // part of pubic API from OMXCodec.cpp
                 if (IsSoftwareCodec(results[i].mComponentName.string())) {
-                    LOGV("Ignore software codec %s", results[i].mComponentName.string());
+                    ALOGV("Ignore software codec %s", results[i].mComponentName.string());
                     continue;
                 }
 #endif
@@ -744,7 +744,7 @@
                         maxLevel = profileLevel.mLevel;
                         pProfileLevel->mProfile = profile;
                         pProfileLevel->mLevel = maxLevel;
-                        LOGV("%d profile: %ld, max level: %ld",
+                        ALOGV("%d profile: %ld, max level: %ld",
                             __LINE__, pProfileLevel->mProfile, pProfileLevel->mLevel);
                     }
                     if (profileLevel.mProfile != profile) {
@@ -754,12 +754,12 @@
                         pProfileLevel++;
                         pProfileLevel->mProfile = profile;
                         pProfileLevel->mLevel = maxLevel;
-                        LOGV("%d profile: %ld, max level: %ld",
+                        ALOGV("%d profile: %ld, max level: %ld",
                             __LINE__, pProfileLevel->mProfile, pProfileLevel->mLevel);
                     } else if (profileLevel.mLevel > maxLevel) {
                         maxLevel = profileLevel.mLevel;
                         pProfileLevel->mLevel = maxLevel;
-                        LOGV("%d profile: %ld, max level: %ld",
+                        ALOGV("%d profile: %ld, max level: %ld",
                             __LINE__, pProfileLevel->mProfile, pProfileLevel->mLevel);
                     }
 
@@ -798,7 +798,7 @@
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
     VIDEOEDITOR_CHECK(M4OSA_NULL != meta,     M4ERR_PARAMETER);
 
-    LOGV("VideoEditorVideoDecoder_configureFromMetadata begin");
+    ALOGV("VideoEditorVideoDecoder_configureFromMetadata begin");
 
     pDecShellContext = (VideoEditorVideoDecoder_Context*)pContext;
 
@@ -807,7 +807,7 @@
     success = meta->findInt32(kKeyHeight, &vHeight);
     VIDEOEDITOR_CHECK(TRUE == success, M4ERR_PARAMETER);
 
-    LOGV("vWidth = %d, vHeight = %d", vWidth, vHeight);
+    ALOGV("vWidth = %d, vHeight = %d", vWidth, vHeight);
 
     pDecShellContext->mGivenWidth = vWidth;
     pDecShellContext->mGivenHeight = vHeight;
@@ -819,9 +819,9 @@
         cropRight = vWidth - 1;
         cropBottom = vHeight - 1;
 
-        LOGV("got dimensions only %d x %d", width, height);
+        ALOGV("got dimensions only %d x %d", width, height);
     } else {
-        LOGV("got crop rect %d, %d, %d, %d",
+        ALOGV("got crop rect %d, %d, %d, %d",
              cropLeft, cropTop, cropRight, cropBottom);
     }
 
@@ -833,7 +833,7 @@
     width = cropRight - cropLeft + 1;
     height = cropBottom - cropTop + 1;
 
-    LOGV("VideoDecoder_configureFromMetadata : W=%d H=%d", width, height);
+    ALOGV("VideoDecoder_configureFromMetadata : W=%d H=%d", width, height);
     VIDEOEDITOR_CHECK((0 != width) && (0 != height), M4ERR_PARAMETER);
 
     if( (M4OSA_NULL != pDecShellContext->m_pDecBufferPool) &&
@@ -844,7 +844,7 @@
         // No need to reconfigure
         goto cleanUp;
     }
-    LOGV("VideoDecoder_configureFromMetadata  reset: W=%d H=%d", width, height);
+    ALOGV("VideoDecoder_configureFromMetadata  reset: W=%d H=%d", width, height);
     // Update the stream handler parameters
     pDecShellContext->m_pVideoStreamhandler->m_videoWidth  = width;
     pDecShellContext->m_pVideoStreamhandler->m_videoHeight = height;
@@ -852,7 +852,7 @@
 
     // Configure the buffer pool
     if( M4OSA_NULL != pDecShellContext->m_pDecBufferPool ) {
-        LOGV("VideoDecoder_configureFromMetadata : reset the buffer pool");
+        ALOGV("VideoDecoder_configureFromMetadata : reset the buffer pool");
         VIDEOEDITOR_BUFFER_freePool(pDecShellContext->m_pDecBufferPool);
         pDecShellContext->m_pDecBufferPool = M4OSA_NULL;
     }
@@ -866,15 +866,15 @@
 
 cleanUp:
     if( M4NO_ERROR == err ) {
-        LOGV("VideoEditorVideoDecoder_configureFromMetadata no error");
+        ALOGV("VideoEditorVideoDecoder_configureFromMetadata no error");
     } else {
         if( M4OSA_NULL != pDecShellContext->m_pDecBufferPool ) {
             VIDEOEDITOR_BUFFER_freePool(pDecShellContext->m_pDecBufferPool);
             pDecShellContext->m_pDecBufferPool = M4OSA_NULL;
         }
-        LOGV("VideoEditorVideoDecoder_configureFromMetadata ERROR 0x%X", err);
+        ALOGV("VideoEditorVideoDecoder_configureFromMetadata ERROR 0x%X", err);
     }
-    LOGV("VideoEditorVideoDecoder_configureFromMetadata end");
+    ALOGV("VideoEditorVideoDecoder_configureFromMetadata end");
     return err;
 }
 
@@ -884,7 +884,7 @@
         (VideoEditorVideoDecoder_Context*)pContext;
 
     // Input parameters check
-    LOGV("VideoEditorVideoDecoder_destroy begin");
+    ALOGV("VideoEditorVideoDecoder_destroy begin");
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
 
     // Release the color converter
@@ -892,7 +892,7 @@
 
     // Destroy the graph
     if( pDecShellContext->mVideoDecoder != NULL ) {
-        LOGV("### VideoEditorVideoDecoder_destroy : releasing decoder");
+        ALOGV("### VideoEditorVideoDecoder_destroy : releasing decoder");
         pDecShellContext->mVideoDecoder->stop();
         pDecShellContext->mVideoDecoder.clear();
     }
@@ -909,11 +909,11 @@
 
 cleanUp:
     if( M4NO_ERROR == err ) {
-        LOGV("VideoEditorVideoDecoder_destroy no error");
+        ALOGV("VideoEditorVideoDecoder_destroy no error");
     } else {
-        LOGV("VideoEditorVideoDecoder_destroy ERROR 0x%X", err);
+        ALOGV("VideoEditorVideoDecoder_destroy ERROR 0x%X", err);
     }
-    LOGV("VideoEditorVideoDecoder_destroy end");
+    ALOGV("VideoEditorVideoDecoder_destroy end");
     return err;
 }
 
@@ -931,7 +931,7 @@
     sp<MetaData> decoderMetadata = NULL;
     int decoderOutput = OMX_COLOR_FormatYUV420Planar;
 
-    LOGV("VideoEditorVideoDecoder_create begin");
+    ALOGV("VideoEditorVideoDecoder_create begin");
     // Input parameters check
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext,             M4ERR_PARAMETER);
     VIDEOEDITOR_CHECK(M4OSA_NULL != pStreamHandler,       M4ERR_PARAMETER);
@@ -1055,7 +1055,7 @@
         pDecShellContext->mI420ColorConverter = NULL;
     }
 
-    LOGI("decoder output format = 0x%X\n", decoderOutput);
+    ALOGI("decoder output format = 0x%X\n", decoderOutput);
 
     // Configure the buffer pool from the metadata
     err = VideoEditorVideoDecoder_configureFromMetadata(pDecShellContext,
@@ -1070,13 +1070,13 @@
 
 cleanUp:
     if( M4NO_ERROR == err ) {
-        LOGV("VideoEditorVideoDecoder_create no error");
+        ALOGV("VideoEditorVideoDecoder_create no error");
     } else {
         VideoEditorVideoDecoder_destroy(pDecShellContext);
         *pContext = M4OSA_NULL;
-        LOGV("VideoEditorVideoDecoder_create ERROR 0x%X", err);
+        ALOGV("VideoEditorVideoDecoder_create ERROR 0x%X", err);
     }
-    LOGV("VideoEditorVideoDecoder_create : DONE");
+    ALOGV("VideoEditorVideoDecoder_create : DONE");
     return err;
 }
 
@@ -1093,7 +1093,7 @@
     M4OSA_UInt32 size = 0;
     sp<MetaData> decoderMetadata = NULL;
 
-    LOGV("VideoEditorVideoDecoder_create begin");
+    ALOGV("VideoEditorVideoDecoder_create begin");
     // Input parameters check
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext,             M4ERR_PARAMETER);
     VIDEOEDITOR_CHECK(M4OSA_NULL != pStreamHandler,       M4ERR_PARAMETER);
@@ -1187,7 +1187,7 @@
     status = pDecShellContext->mClient.connect();
     VIDEOEDITOR_CHECK(OK == status, M4ERR_SF_DECODER_RSRC_FAIL);
 
-     LOGI("Using software codecs only");
+     ALOGI("Using software codecs only");
     // Create the decoder
     pDecShellContext->mVideoDecoder = OMXCodec::Create(
         pDecShellContext->mClient.interface(),
@@ -1219,13 +1219,13 @@
 
 cleanUp:
     if( M4NO_ERROR == err ) {
-        LOGV("VideoEditorVideoDecoder_create no error");
+        ALOGV("VideoEditorVideoDecoder_create no error");
     } else {
         VideoEditorVideoDecoder_destroy(pDecShellContext);
         *pContext = M4OSA_NULL;
-        LOGV("VideoEditorVideoDecoder_create ERROR 0x%X", err);
+        ALOGV("VideoEditorVideoDecoder_create ERROR 0x%X", err);
     }
-    LOGV("VideoEditorVideoDecoder_create : DONE");
+    ALOGV("VideoEditorVideoDecoder_create : DONE");
     return err;
 }
 
@@ -1242,7 +1242,7 @@
     M4DECODER_AVCProfileLevel* profile;
     M4DECODER_MPEG4_DecoderConfigInfo* pDecConfInfo;
 
-    LOGV("VideoEditorVideoDecoder_getOption begin");
+    ALOGV("VideoEditorVideoDecoder_getOption begin");
 
     switch (optionId) {
         case M4DECODER_kOptionID_AVCLastDecodedFrameCTS:
@@ -1266,7 +1266,7 @@
                 (int32_t*)(&pVideoSize->m_uiWidth));
             pDecShellContext->mVideoDecoder->getFormat()->findInt32(kKeyHeight,
                 (int32_t*)(&pVideoSize->m_uiHeight));
-            LOGV("VideoEditorVideoDecoder_getOption : W=%d H=%d",
+            ALOGV("VideoEditorVideoDecoder_getOption : W=%d H=%d",
                 pVideoSize->m_uiWidth, pVideoSize->m_uiHeight);
             break;
 
@@ -1289,7 +1289,7 @@
 
     }
 
-    LOGV("VideoEditorVideoDecoder_getOption: end with err = 0x%x", lerr);
+    ALOGV("VideoEditorVideoDecoder_getOption: end with err = 0x%x", lerr);
     return lerr;
 }
 
@@ -1299,7 +1299,7 @@
     VideoEditorVideoDecoder_Context *pDecShellContext =
         (VideoEditorVideoDecoder_Context*) context;
 
-    LOGV("VideoEditorVideoDecoder_setOption begin");
+    ALOGV("VideoEditorVideoDecoder_setOption begin");
 
     switch (optionId) {
         case M4DECODER_kOptionID_OutputFilter: {
@@ -1319,7 +1319,7 @@
             break;
     }
 
-    LOGV("VideoEditorVideoDecoder_setOption: end with err = 0x%x", lerr);
+    ALOGV("VideoEditorVideoDecoder_setOption: end with err = 0x%x", lerr);
     return lerr;
 }
 
@@ -1334,21 +1334,21 @@
     status_t errStatus;
     bool needSeek = bJump;
 
-    LOGV("VideoEditorVideoDecoder_decode begin");
+    ALOGV("VideoEditorVideoDecoder_decode begin");
 
     if( M4OSA_TRUE == pDecShellContext->mReachedEOS ) {
         // Do not call read(), it could lead to a freeze
-        LOGV("VideoEditorVideoDecoder_decode : EOS already reached");
+        ALOGV("VideoEditorVideoDecoder_decode : EOS already reached");
         lerr = M4WAR_NO_MORE_AU;
         goto VIDEOEDITOR_VideoDecode_cleanUP;
     }
     if(pDecShellContext->m_lastDecodedCTS >= *pTime) {
-        LOGV("VideoDecoder_decode: Already decoded up to this time CTS = %lf.",
+        ALOGV("VideoDecoder_decode: Already decoded up to this time CTS = %lf.",
             pDecShellContext->m_lastDecodedCTS);
         goto VIDEOEDITOR_VideoDecode_cleanUP;
     }
     if(M4OSA_TRUE == bJump) {
-        LOGV("VideoEditorVideoDecoder_decode: Jump called");
+        ALOGV("VideoEditorVideoDecoder_decode: Jump called");
         pDecShellContext->m_lastDecodedCTS = -1;
         pDecShellContext->m_lastRenderCts = -1;
     }
@@ -1360,7 +1360,7 @@
     pDecShellContext->mLastInputCts = *pTime;
 
     while (pDecoderBuffer == NULL || pDecShellContext->m_lastDecodedCTS + tolerance < *pTime) {
-        LOGV("VideoEditorVideoDecoder_decode, frameCTS = %lf, DecodeUpTo = %lf",
+        ALOGV("VideoEditorVideoDecoder_decode, frameCTS = %lf, DecodeUpTo = %lf",
             pDecShellContext->m_lastDecodedCTS, *pTime);
 
         // Read the buffer from the stagefright decoder
@@ -1376,7 +1376,7 @@
 
         // Handle EOS and format change
         if (errStatus == ERROR_END_OF_STREAM) {
-            LOGV("End of stream reached, returning M4WAR_NO_MORE_AU ");
+            ALOGV("End of stream reached, returning M4WAR_NO_MORE_AU ");
             pDecShellContext->mReachedEOS = M4OSA_TRUE;
             lerr = M4WAR_NO_MORE_AU;
             // If we decoded a buffer before EOS, we still need to put it
@@ -1386,18 +1386,18 @@
             }
             goto VIDEOEDITOR_VideoDecode_cleanUP;
         } else if (INFO_FORMAT_CHANGED == errStatus) {
-            LOGV("VideoDecoder_decode : source returns INFO_FORMAT_CHANGED");
+            ALOGV("VideoDecoder_decode : source returns INFO_FORMAT_CHANGED");
             lerr = VideoEditorVideoDecoder_configureFromMetadata(
                 pDecShellContext,
                 pDecShellContext->mVideoDecoder->getFormat().get());
             if( M4NO_ERROR != lerr ) {
-                LOGV("!!! VideoEditorVideoDecoder_decode ERROR : "
+                ALOGV("!!! VideoEditorVideoDecoder_decode ERROR : "
                     "VideoDecoder_configureFromMetadata returns 0x%X", lerr);
                 break;
             }
             continue;
         } else if (errStatus != OK) {
-            LOGE("VideoEditorVideoDecoder_decode ERROR:0x%x(%d)",
+            ALOGE("VideoEditorVideoDecoder_decode ERROR:0x%x(%d)",
                 errStatus,errStatus);
             lerr = errStatus;
             goto VIDEOEDITOR_VideoDecode_cleanUP;
@@ -1420,7 +1420,7 @@
         // Record the timestamp of last decoded buffer
         pDecoderBuffer->meta_data()->findInt64(kKeyTime, &lFrameTime);
         pDecShellContext->m_lastDecodedCTS = (M4_MediaTime)(lFrameTime/1000);
-        LOGV("VideoEditorVideoDecoder_decode,decoded frametime = %lf,size = %d",
+        ALOGV("VideoEditorVideoDecoder_decode,decoded frametime = %lf,size = %d",
             (M4_MediaTime)lFrameTime, pDecoderBuffer->size() );
 
         // If bJump is false, we need to save every decoded buffer
@@ -1453,7 +1453,7 @@
         pDecoderBuffer = NULL;
     }
 
-    LOGV("VideoEditorVideoDecoder_decode: end with 0x%x", lerr);
+    ALOGV("VideoEditorVideoDecoder_decode: end with 0x%x", lerr);
     return lerr;
 }
 
@@ -1485,7 +1485,7 @@
             pDecShellContext->mGivenHeight,  // decoderHeight
             pDecShellContext->mCropRect,  // decoderRect
             tmpDecBuffer->pData /* dstBits */) < 0) {
-            LOGE("convertDecoderOutputToI420 failed");
+            ALOGE("convertDecoderOutputToI420 failed");
             lerr = M4ERR_NOT_IMPLEMENTED;
         }
     } else if (pDecShellContext->decOuputColorFormat == OMX_COLOR_FormatYUV420Planar) {
@@ -1540,7 +1540,7 @@
             }
         }
     } else {
-        LOGE("VideoDecoder_decode: unexpected color format 0x%X",
+        ALOGE("VideoDecoder_decode: unexpected color format 0x%X",
             pDecShellContext->decOuputColorFormat);
         lerr = M4ERR_PARAMETER;
     }
@@ -1566,7 +1566,7 @@
     M4_MediaTime candidateTimeStamp = -1;
     M4OSA_Bool bFound = M4OSA_FALSE;
 
-    LOGV("VideoEditorVideoDecoder_render begin");
+    ALOGV("VideoEditorVideoDecoder_render begin");
     // Input parameters check
     VIDEOEDITOR_CHECK(M4OSA_NULL != context, M4ERR_PARAMETER);
     VIDEOEDITOR_CHECK(M4OSA_NULL != pTime, M4ERR_PARAMETER);
@@ -1575,11 +1575,11 @@
     // The output buffer is already allocated, just copy the data
     if ( (*pTime <= pDecShellContext->m_lastRenderCts) &&
             (M4OSA_FALSE == bForceRender) ) {
-        LOGV("VIDEOEDITOR_VIDEO_render Frame in the past");
+        ALOGV("VIDEOEDITOR_VIDEO_render Frame in the past");
         err = M4WAR_VIDEORENDERER_NO_NEW_FRAME;
         goto cleanUp;
     }
-    LOGV("VideoDecoder_render: lastRendered time = %lf,requested render time = "
+    ALOGV("VideoDecoder_render: lastRendered time = %lf,requested render time = "
         "%lf", pDecShellContext->m_lastRenderCts, *pTime);
 
     /**
@@ -1602,7 +1602,7 @@
                 bFound = M4OSA_TRUE;
                 pRenderVIDEOEDITORBuffer = pTmpVIDEOEDITORBuffer;
                 candidateTimeStamp = pTmpVIDEOEDITORBuffer->buffCTS;
-                LOGV("VideoDecoder_render: found a buffer with timestamp = %lf",
+                ALOGV("VideoDecoder_render: found a buffer with timestamp = %lf",
                     candidateTimeStamp);
             }
         }
@@ -1612,7 +1612,7 @@
         goto cleanUp;
     }
 
-    LOGV("VideoEditorVideoDecoder_render 3 ouput %d %d %d %d",
+    ALOGV("VideoEditorVideoDecoder_render 3 ouput %d %d %d %d",
         pOutputPlane[0].u_width, pOutputPlane[0].u_height,
         pOutputPlane[0].u_topleft, pOutputPlane[0].u_stride);
 
@@ -1642,7 +1642,7 @@
         tmpPlane[2].pac_data  = tmpPlane[1].pac_data +
             (tmpPlane[1].u_stride * tmpPlane[1].u_height);
 
-        LOGV("VideoEditorVideoDecoder_render w = %d H = %d",
+        ALOGV("VideoEditorVideoDecoder_render w = %d H = %d",
             tmpPlane[0].u_width,tmpPlane[0].u_height);
         pDecShellContext->m_pFilter(M4OSA_NULL, &tmpPlane[0], pOutputPlane);
     } else {
@@ -1673,11 +1673,11 @@
 cleanUp:
     if( M4NO_ERROR == err ) {
         *pTime = pDecShellContext->m_lastRenderCts;
-        LOGV("VideoEditorVideoDecoder_render no error");
+        ALOGV("VideoEditorVideoDecoder_render no error");
     } else {
-        LOGV("VideoEditorVideoDecoder_render ERROR 0x%X", err);
+        ALOGV("VideoEditorVideoDecoder_render ERROR 0x%X", err);
     }
-    LOGV("VideoEditorVideoDecoder_render end");
+    ALOGV("VideoEditorVideoDecoder_render end");
     return err;
 }
 
diff --git a/libvideoeditor/vss/stagefrightshells/src/VideoEditorVideoEncoder.cpp b/libvideoeditor/vss/stagefrightshells/src/VideoEditorVideoEncoder.cpp
index f47c921..4787680 100755
--- a/libvideoeditor/vss/stagefrightshells/src/VideoEditorVideoEncoder.cpp
+++ b/libvideoeditor/vss/stagefrightshells/src/VideoEditorVideoEncoder.cpp
@@ -29,12 +29,14 @@
 #include "M4SYS_AccessUnit.h"
 #include "VideoEditorVideoEncoder.h"
 #include "VideoEditorUtils.h"
+#include "MediaBufferPuller.h"
 #include <I420ColorConverter.h>
 
+#include <unistd.h>
 #include "utils/Log.h"
 #include "utils/Vector.h"
+#include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/MediaSource.h>
-#include <media/stagefright/MediaDebug.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/OMXClient.h>
@@ -117,7 +119,7 @@
         mIsEOS(false),
         mState(CREATED),
         mEncFormat(format) {
-    LOGV("VideoEditorVideoEncoderSource::VideoEditorVideoEncoderSource");
+    ALOGV("VideoEditorVideoEncoderSource::VideoEditorVideoEncoderSource");
 }
 
 VideoEditorVideoEncoderSource::~VideoEditorVideoEncoderSource() {
@@ -131,25 +133,25 @@
 status_t VideoEditorVideoEncoderSource::start(MetaData *meta) {
     status_t err = OK;
 
-    LOGV("VideoEditorVideoEncoderSource::start() begin");
+    ALOGV("VideoEditorVideoEncoderSource::start() begin");
 
     if( CREATED != mState ) {
-        LOGV("VideoEditorVideoEncoderSource::start: invalid state %d", mState);
+        ALOGV("VideoEditorVideoEncoderSource::start: invalid state %d", mState);
         return UNKNOWN_ERROR;
     }
     mState = STARTED;
 
-    LOGV("VideoEditorVideoEncoderSource::start() END (0x%x)", err);
+    ALOGV("VideoEditorVideoEncoderSource::start() END (0x%x)", err);
     return err;
 }
 
 status_t VideoEditorVideoEncoderSource::stop() {
     status_t err = OK;
 
-    LOGV("VideoEditorVideoEncoderSource::stop() begin");
+    ALOGV("VideoEditorVideoEncoderSource::stop() begin");
 
     if( STARTED != mState ) {
-        LOGV("VideoEditorVideoEncoderSource::stop: invalid state %d", mState);
+        ALOGV("VideoEditorVideoEncoderSource::stop: invalid state %d", mState);
         return UNKNOWN_ERROR;
     }
 
@@ -162,19 +164,19 @@
         mFirstBufferLink = mFirstBufferLink->nextLink;
         delete tmpLink;
     }
-    LOGV("VideoEditorVideoEncoderSource::stop : %d buffer remained", i);
+    ALOGV("VideoEditorVideoEncoderSource::stop : %d buffer remained", i);
     mFirstBufferLink = NULL;
     mLastBufferLink = NULL;
 
     mState = CREATED;
 
-    LOGV("VideoEditorVideoEncoderSource::stop() END (0x%x)", err);
+    ALOGV("VideoEditorVideoEncoderSource::stop() END (0x%x)", err);
     return err;
 }
 
 sp<MetaData> VideoEditorVideoEncoderSource::getFormat() {
 
-    LOGV("VideoEditorVideoEncoderSource::getFormat");
+    ALOGV("VideoEditorVideoEncoderSource::getFormat");
     return mEncFormat;
 }
 
@@ -185,10 +187,10 @@
     status_t err = OK;
     MediaBufferChain* tmpLink = NULL;
 
-    LOGV("VideoEditorVideoEncoderSource::read() begin");
+    ALOGV("VideoEditorVideoEncoderSource::read() begin");
 
     if ( STARTED != mState ) {
-        LOGV("VideoEditorVideoEncoderSource::read: invalid state %d", mState);
+        ALOGV("VideoEditorVideoEncoderSource::read: invalid state %d", mState);
         return UNKNOWN_ERROR;
     }
 
@@ -199,7 +201,7 @@
     // End of stream?
     if (mFirstBufferLink == NULL) {
         *buffer = NULL;
-        LOGV("VideoEditorVideoEncoderSource::read : EOS");
+        ALOGV("VideoEditorVideoEncoderSource::read : EOS");
         return ERROR_END_OF_STREAM;
     }
 
@@ -214,7 +216,7 @@
     delete tmpLink;
     mNbBuffer--;
 
-    LOGV("VideoEditorVideoEncoderSource::read() END (0x%x)", err);
+    ALOGV("VideoEditorVideoEncoderSource::read() END (0x%x)", err);
     return err;
 }
 
@@ -222,10 +224,10 @@
     Mutex::Autolock autolock(mLock);
     status_t err = OK;
 
-    LOGV("VideoEditorVideoEncoderSource::storeBuffer() begin");
+    ALOGV("VideoEditorVideoEncoderSource::storeBuffer() begin");
 
     if( NULL == buffer ) {
-        LOGV("VideoEditorVideoEncoderSource::storeBuffer : reached EOS");
+        ALOGV("VideoEditorVideoEncoderSource::storeBuffer : reached EOS");
         mIsEOS = true;
     } else {
         MediaBufferChain* newLink = new MediaBufferChain;
@@ -240,7 +242,7 @@
         mNbBuffer++;
     }
     mBufferCond.signal();
-    LOGV("VideoEditorVideoEncoderSource::storeBuffer() end");
+    ALOGV("VideoEditorVideoEncoderSource::storeBuffer() end");
     return mNbBuffer;
 }
 
@@ -248,194 +250,6 @@
     Mutex::Autolock autolock(mLock);
     return mNbBuffer;
 }
-/********************
- *      PULLER      *
- ********************/
-
-// Pulls media buffers from a MediaSource repeatedly.
-// The user can then get the buffers from that list.
-class VideoEditorVideoEncoderPuller {
-public:
-    VideoEditorVideoEncoderPuller(sp<MediaSource> source);
-    ~VideoEditorVideoEncoderPuller();
-    void start();
-    void stop();
-    MediaBuffer* getBufferBlocking();
-    MediaBuffer* getBufferNonBlocking();
-    void putBuffer(MediaBuffer* buffer);
-    bool hasMediaSourceReturnedError();
-private:
-    static int acquireThreadStart(void* arg);
-    void acquireThreadFunc();
-
-    static int releaseThreadStart(void* arg);
-    void releaseThreadFunc();
-
-    sp<MediaSource> mSource;
-    Vector<MediaBuffer*> mBuffers;
-    Vector<MediaBuffer*> mReleaseBuffers;
-
-    Mutex mLock;
-    Condition mUserCond;     // for the user of this class
-    Condition mAcquireCond;  // for the acquire thread
-    Condition mReleaseCond;  // for the release thread
-
-    bool mAskToStart;      // Asks the threads to start
-    bool mAskToStop;       // Asks the threads to stop
-    bool mAcquireStopped;  // The acquire thread has stopped
-    bool mReleaseStopped;  // The release thread has stopped
-    status_t mSourceError; // Error returned by MediaSource read
-};
-
-VideoEditorVideoEncoderPuller::VideoEditorVideoEncoderPuller(
-    sp<MediaSource> source) {
-    mSource = source;
-    mAskToStart = false;
-    mAskToStop = false;
-    mAcquireStopped = false;
-    mReleaseStopped = false;
-    mSourceError = OK;
-    androidCreateThread(acquireThreadStart, this);
-    androidCreateThread(releaseThreadStart, this);
-}
-
-VideoEditorVideoEncoderPuller::~VideoEditorVideoEncoderPuller() {
-    stop();
-}
-
-bool VideoEditorVideoEncoderPuller::hasMediaSourceReturnedError() {
-    Mutex::Autolock autolock(mLock);
-    return ((mSourceError != OK) ? true : false);
-}
-void VideoEditorVideoEncoderPuller::start() {
-    Mutex::Autolock autolock(mLock);
-    mAskToStart = true;
-    mAcquireCond.signal();
-    mReleaseCond.signal();
-}
-
-void VideoEditorVideoEncoderPuller::stop() {
-    Mutex::Autolock autolock(mLock);
-    mAskToStop = true;
-    mAcquireCond.signal();
-    mReleaseCond.signal();
-    while (!mAcquireStopped || !mReleaseStopped) {
-        mUserCond.wait(mLock);
-    }
-
-    // Release remaining buffers
-    for (size_t i = 0; i < mBuffers.size(); i++) {
-        mBuffers.itemAt(i)->release();
-    }
-
-    for (size_t i = 0; i < mReleaseBuffers.size(); i++) {
-        mReleaseBuffers.itemAt(i)->release();
-    }
-
-    mBuffers.clear();
-    mReleaseBuffers.clear();
-}
-
-MediaBuffer* VideoEditorVideoEncoderPuller::getBufferNonBlocking() {
-    Mutex::Autolock autolock(mLock);
-    if (mBuffers.empty()) {
-        return NULL;
-    } else {
-        MediaBuffer* b = mBuffers.itemAt(0);
-        mBuffers.removeAt(0);
-        return b;
-    }
-}
-
-MediaBuffer* VideoEditorVideoEncoderPuller::getBufferBlocking() {
-    Mutex::Autolock autolock(mLock);
-    while (mBuffers.empty() && !mAcquireStopped) {
-        mUserCond.wait(mLock);
-    }
-
-    if (mBuffers.empty()) {
-        return NULL;
-    } else {
-        MediaBuffer* b = mBuffers.itemAt(0);
-        mBuffers.removeAt(0);
-        return b;
-    }
-}
-
-void VideoEditorVideoEncoderPuller::putBuffer(MediaBuffer* buffer) {
-    Mutex::Autolock autolock(mLock);
-    mReleaseBuffers.push(buffer);
-    mReleaseCond.signal();
-}
-
-int VideoEditorVideoEncoderPuller::acquireThreadStart(void* arg) {
-    VideoEditorVideoEncoderPuller* self = (VideoEditorVideoEncoderPuller*)arg;
-    self->acquireThreadFunc();
-    return 0;
-}
-
-int VideoEditorVideoEncoderPuller::releaseThreadStart(void* arg) {
-    VideoEditorVideoEncoderPuller* self = (VideoEditorVideoEncoderPuller*)arg;
-    self->releaseThreadFunc();
-    return 0;
-}
-
-void VideoEditorVideoEncoderPuller::acquireThreadFunc() {
-    mLock.lock();
-
-    // Wait for the start signal
-    while (!mAskToStart && !mAskToStop) {
-        mAcquireCond.wait(mLock);
-    }
-
-    // Loop until we are asked to stop, or there is nothing more to read
-    while (!mAskToStop) {
-        MediaBuffer* pBuffer;
-        mLock.unlock();
-        status_t result = mSource->read(&pBuffer, NULL);
-        mLock.lock();
-        mSourceError = result;
-        if (result != OK) {
-            break;
-        }
-        mBuffers.push(pBuffer);
-        mUserCond.signal();
-    }
-
-    mAcquireStopped = true;
-    mUserCond.signal();
-    mLock.unlock();
-}
-
-void VideoEditorVideoEncoderPuller::releaseThreadFunc() {
-    mLock.lock();
-
-    // Wait for the start signal
-    while (!mAskToStart && !mAskToStop) {
-        mReleaseCond.wait(mLock);
-    }
-
-    // Loop until we are asked to stop
-    while (1) {
-        if (mReleaseBuffers.empty()) {
-            if (mAskToStop) {
-                break;
-            } else {
-                mReleaseCond.wait(mLock);
-                continue;
-            }
-        }
-        MediaBuffer* pBuffer = mReleaseBuffers.itemAt(0);
-        mReleaseBuffers.removeAt(0);
-        mLock.unlock();
-        pBuffer->release();
-        mLock.lock();
-    }
-
-    mReleaseStopped = true;
-    mUserCond.signal();
-    mLock.unlock();
-}
 
 /**
  ******************************************************************************
@@ -468,7 +282,7 @@
     OMXClient                         mClient;
     sp<MediaSource>                   mEncoder;
     OMX_COLOR_FORMATTYPE              mEncoderColorFormat;
-    VideoEditorVideoEncoderPuller*    mPuller;
+    MediaBufferPuller*                mPuller;
     I420ColorConverter*               mI420ColorConverter;
 
     uint32_t                          mNbInputFrames;
@@ -504,7 +318,7 @@
     sp<MediaSource> encoder = NULL;;
     OMXClient client;
 
-    LOGV("VideoEditorVideoEncoder_getDSI begin");
+    ALOGV("VideoEditorVideoEncoder_getDSI begin");
     // Input parameters check
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext,       M4ERR_PARAMETER);
     VIDEOEDITOR_CHECK(M4OSA_NULL != metaData.get(), M4ERR_PARAMETER);
@@ -579,11 +393,11 @@
     client.disconnect();
     if ( encoderSource != NULL ) { encoderSource.clear(); }
     if ( M4NO_ERROR == err ) {
-        LOGV("VideoEditorVideoEncoder_getDSI no error");
+        ALOGV("VideoEditorVideoEncoder_getDSI no error");
     } else {
-        LOGV("VideoEditorVideoEncoder_getDSI ERROR 0x%X", err);
+        ALOGV("VideoEditorVideoEncoder_getDSI ERROR 0x%X", err);
     }
-    LOGV("VideoEditorVideoEncoder_getDSI end");
+    ALOGV("VideoEditorVideoEncoder_getDSI end");
     return err;
 }
 /********************
@@ -594,7 +408,7 @@
     M4OSA_ERR err = M4NO_ERROR;
     VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
 
-    LOGV("VideoEditorVideoEncoder_cleanup begin");
+    ALOGV("VideoEditorVideoEncoder_cleanup begin");
     // Input parameters check
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
 
@@ -608,11 +422,11 @@
 
 cleanUp:
     if ( M4NO_ERROR == err ) {
-        LOGV("VideoEditorVideoEncoder_cleanup no error");
+        ALOGV("VideoEditorVideoEncoder_cleanup no error");
     } else {
-        LOGV("VideoEditorVideoEncoder_cleanup ERROR 0x%X", err);
+        ALOGV("VideoEditorVideoEncoder_cleanup ERROR 0x%X", err);
     }
-    LOGV("VideoEditorVideoEncoder_cleanup end");
+    ALOGV("VideoEditorVideoEncoder_cleanup end");
     return err;
 }
 
@@ -626,7 +440,7 @@
     VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
     int encoderInput = OMX_COLOR_FormatYUV420Planar;
 
-    LOGV("VideoEditorVideoEncoder_init begin: format  %d", format);
+    ALOGV("VideoEditorVideoEncoder_init begin: format  %d", format);
     // Input parameters check
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
     VIDEOEDITOR_CHECK(M4OSA_NULL != pWriterDataInterface, M4ERR_PARAMETER);
@@ -653,19 +467,19 @@
         pEncoderContext->mI420ColorConverter = NULL;
     }
     pEncoderContext->mEncoderColorFormat = (OMX_COLOR_FORMATTYPE)encoderInput;
-    LOGI("encoder input format = 0x%X\n", encoderInput);
+    ALOGI("encoder input format = 0x%X\n", encoderInput);
 
     *pContext = pEncoderContext;
 
 cleanUp:
     if ( M4NO_ERROR == err ) {
-        LOGV("VideoEditorVideoEncoder_init no error");
+        ALOGV("VideoEditorVideoEncoder_init no error");
     } else {
         VideoEditorVideoEncoder_cleanup(pEncoderContext);
         *pContext = M4OSA_NULL;
-        LOGV("VideoEditorVideoEncoder_init ERROR 0x%X", err);
+        ALOGV("VideoEditorVideoEncoder_init ERROR 0x%X", err);
     }
-    LOGV("VideoEditorVideoEncoder_init end");
+    ALOGV("VideoEditorVideoEncoder_init end");
     return err;
 }
 
@@ -702,7 +516,7 @@
     M4OSA_ERR err = M4NO_ERROR;
     VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
 
-    LOGV("VideoEditorVideoEncoder_close begin");
+    ALOGV("VideoEditorVideoEncoder_close begin");
     // Input parameters check
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
 
@@ -728,11 +542,11 @@
 
 cleanUp:
     if( M4NO_ERROR == err ) {
-        LOGV("VideoEditorVideoEncoder_close no error");
+        ALOGV("VideoEditorVideoEncoder_close no error");
     } else {
-        LOGV("VideoEditorVideoEncoder_close ERROR 0x%X", err);
+        ALOGV("VideoEditorVideoEncoder_close ERROR 0x%X", err);
     }
-    LOGV("VideoEditorVideoEncoder_close end");
+    ALOGV("VideoEditorVideoEncoder_close end");
     return err;
 }
 
@@ -751,7 +565,7 @@
     int32_t iFrameRate = 0;
     uint32_t codecFlags = 0;
 
-    LOGV(">>> VideoEditorVideoEncoder_open begin");
+    ALOGV(">>> VideoEditorVideoEncoder_open begin");
     // Input parameters check
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
     VIDEOEDITOR_CHECK(M4OSA_NULL != pAU,      M4ERR_PARAMETER);
@@ -818,9 +632,9 @@
     }
     iProfile = pEncoderContext->mCodecParams->videoProfile;
     iLevel = pEncoderContext->mCodecParams->videoLevel;
-    LOGV("Encoder mime %s profile %d, level %d",
+    ALOGV("Encoder mime %s profile %d, level %d",
         mime,iProfile, iLevel);
-    LOGV("Encoder w %d, h %d, bitrate %d, fps %d",
+    ALOGV("Encoder w %d, h %d, bitrate %d, fps %d",
         pEncoderContext->mCodecParams->FrameWidth,
         pEncoderContext->mCodecParams->FrameHeight,
         pEncoderContext->mCodecParams->Bitrate,
@@ -857,11 +671,11 @@
         case M4ENCODER_k30_FPS:   iFrameRate = 30; break;
         case M4ENCODER_kVARIABLE_FPS:
             iFrameRate = 30;
-            LOGI("Frame rate set to M4ENCODER_kVARIABLE_FPS: set to 30");
+            ALOGI("Frame rate set to M4ENCODER_kVARIABLE_FPS: set to 30");
           break;
         case M4ENCODER_kUSE_TIMESCALE:
             iFrameRate = 30;
-            LOGI("Frame rate set to M4ENCODER_kUSE_TIMESCALE:  set to 30");
+            ALOGI("Frame rate set to M4ENCODER_kUSE_TIMESCALE:  set to 30");
             break;
 
         default:
@@ -901,8 +715,8 @@
         pEncoderContext->mClient.interface(), encoderMetadata, true,
         pEncoderContext->mEncoderSource, NULL, codecFlags);
     VIDEOEDITOR_CHECK(NULL != pEncoderContext->mEncoder.get(), M4ERR_STATE);
-    LOGV("VideoEditorVideoEncoder_open : DONE");
-    pEncoderContext->mPuller = new VideoEditorVideoEncoderPuller(
+    ALOGV("VideoEditorVideoEncoder_open : DONE");
+    pEncoderContext->mPuller = new MediaBufferPuller(
         pEncoderContext->mEncoder);
 
     // Set the new state
@@ -910,12 +724,12 @@
 
 cleanUp:
     if( M4NO_ERROR == err ) {
-        LOGV("VideoEditorVideoEncoder_open no error");
+        ALOGV("VideoEditorVideoEncoder_open no error");
     } else {
         VideoEditorVideoEncoder_close(pEncoderContext);
-        LOGV("VideoEditorVideoEncoder_open ERROR 0x%X", err);
+        ALOGV("VideoEditorVideoEncoder_open ERROR 0x%X", err);
     }
-    LOGV("VideoEditorVideoEncoder_open end");
+    ALOGV("VideoEditorVideoEncoder_open end");
     return err;
 }
 
@@ -928,7 +742,7 @@
     MediaBuffer* buffer = NULL;
     int32_t nbBuffer = 0;
 
-    LOGV("VideoEditorVideoEncoder_processInputBuffer begin: cts  %f", Cts);
+    ALOGV("VideoEditorVideoEncoder_processInputBuffer begin: cts  %f", Cts);
     // Input parameters check
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
 
@@ -992,7 +806,7 @@
                     encoderWidth, encoderHeight,
                     encoderRect,
                     (uint8_t*)newBuffer->data() + newBuffer->range_offset()) < 0) {
-                    LOGE("convertI420ToEncoderInput failed");
+                    ALOGE("convertI420ToEncoderInput failed");
                 }
 
                 // switch to new buffer
@@ -1010,14 +824,14 @@
 
 cleanUp:
     if ( M4NO_ERROR == err ) {
-        LOGV("VideoEditorVideoEncoder_processInputBuffer error 0x%X", err);
+        ALOGV("VideoEditorVideoEncoder_processInputBuffer error 0x%X", err);
     } else {
         if( NULL != buffer ) {
             buffer->release();
         }
-        LOGV("VideoEditorVideoEncoder_processInputBuffer ERROR 0x%X", err);
+        ALOGV("VideoEditorVideoEncoder_processInputBuffer ERROR 0x%X", err);
     }
-    LOGV("VideoEditorVideoEncoder_processInputBuffer end");
+    ALOGV("VideoEditorVideoEncoder_processInputBuffer end");
     return err;
 }
 
@@ -1030,7 +844,7 @@
     int64_t i64Tmp = 0;
     status_t result = OK;
 
-    LOGV("VideoEditorVideoEncoder_processOutputBuffer begin");
+    ALOGV("VideoEditorVideoEncoder_processOutputBuffer begin");
     // Input parameters check
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
     VIDEOEDITOR_CHECK(M4OSA_NULL != buffer,   M4ERR_PARAMETER);
@@ -1040,18 +854,18 @@
     // Process the returned AU
     if ( 0 == buffer->range_length() ) {
         // Encoder has no data yet, nothing unusual
-        LOGV("VideoEditorVideoEncoder_processOutputBuffer : buffer is empty");
+        ALOGV("VideoEditorVideoEncoder_processOutputBuffer : buffer is empty");
         goto cleanUp;
     }
     VIDEOEDITOR_CHECK(0 == ((M4OSA_UInt32)buffer->data())%4, M4ERR_PARAMETER);
     VIDEOEDITOR_CHECK(buffer->meta_data().get(), M4ERR_PARAMETER);
     if ( buffer->meta_data()->findInt32(kKeyIsCodecConfig, &i32Tmp) && i32Tmp ){
         {   // Display the DSI
-            LOGV("VideoEditorVideoEncoder_processOutputBuffer DSI %d",
+            ALOGV("VideoEditorVideoEncoder_processOutputBuffer DSI %d",
                 buffer->range_length());
             uint8_t* tmp = (uint8_t*)(buffer->data());
             for( uint32_t i=0; i<buffer->range_length(); i++ ) {
-                LOGV("DSI [%d] %.2X", i, tmp[i]);
+                ALOGV("DSI [%d] %.2X", i, tmp[i]);
             }
         }
     } else {
@@ -1066,15 +880,15 @@
         pEncoderContext->mLastOutputCts = i64Tmp;
 
         Cts = (M4OSA_Int32)(i64Tmp/1000);
-        LOGV("[TS_CHECK] VI/ENC WRITE frame %d @ %lld -> %d (last %d)",
+        ALOGV("[TS_CHECK] VI/ENC WRITE frame %d @ %lld -> %d (last %d)",
             pEncoderContext->mNbOutputFrames, i64Tmp, Cts,
             pEncoderContext->mLastCTS);
         if ( Cts < pEncoderContext->mLastCTS ) {
-            LOGV("VideoEncoder_processOutputBuffer WARNING : Cts is going "
+            ALOGV("VideoEncoder_processOutputBuffer WARNING : Cts is going "
             "backwards %d < %d", Cts, pEncoderContext->mLastCTS);
             goto cleanUp;
         }
-        LOGV("VideoEditorVideoEncoder_processOutputBuffer : %d %d",
+        ALOGV("VideoEditorVideoEncoder_processOutputBuffer : %d %d",
             Cts, pEncoderContext->mLastCTS);
 
         // Retrieve the AU container
@@ -1127,7 +941,7 @@
         pEncoderContext->mAccessUnit->CTS = Cts;
         pEncoderContext->mAccessUnit->DTS = Cts;
 
-        LOGV("VideoEditorVideoEncoder_processOutputBuffer: AU @ 0x%X 0x%X %d %d",
+        ALOGV("VideoEditorVideoEncoder_processOutputBuffer: AU @ 0x%X 0x%X %d %d",
             pEncoderContext->mAccessUnit->dataAddress,
             *pEncoderContext->mAccessUnit->dataAddress,
             pEncoderContext->mAccessUnit->size,
@@ -1143,13 +957,13 @@
 
 cleanUp:
     if( M4NO_ERROR == err ) {
-        LOGV("VideoEditorVideoEncoder_processOutputBuffer no error");
+        ALOGV("VideoEditorVideoEncoder_processOutputBuffer no error");
     } else {
         SAFE_FREE(pEncoderContext->mHeader.pBuf);
         pEncoderContext->mHeader.Size = 0;
-        LOGV("VideoEditorVideoEncoder_processOutputBuffer ERROR 0x%X", err);
+        ALOGV("VideoEditorVideoEncoder_processOutputBuffer ERROR 0x%X", err);
     }
-    LOGV("VideoEditorVideoEncoder_processOutputBuffer end");
+    ALOGV("VideoEditorVideoEncoder_processOutputBuffer end");
     return err;
 }
 
@@ -1161,7 +975,7 @@
     status_t result = OK;
     MediaBuffer* outputBuffer = NULL;
 
-    LOGV("VideoEditorVideoEncoder_encode 0x%X %f %d", pInPlane, Cts, FrameMode);
+    ALOGV("VideoEditorVideoEncoder_encode 0x%X %f %d", pInPlane, Cts, FrameMode);
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
 
     pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
@@ -1177,7 +991,7 @@
     }
     pEncoderContext->mLastInputCts = Cts;
 
-    LOGV("VideoEditorVideoEncoder_encode 0x%X %d %f (%d)", pInPlane, FrameMode,
+    ALOGV("VideoEditorVideoEncoder_encode 0x%X %d %f (%d)", pInPlane, FrameMode,
         Cts, pEncoderContext->mLastCTS);
 
     // Push the input buffer to the encoder source
@@ -1224,11 +1038,11 @@
 
 cleanUp:
     if( M4NO_ERROR == err ) {
-        LOGV("VideoEditorVideoEncoder_encode no error");
+        ALOGV("VideoEditorVideoEncoder_encode no error");
     } else {
-        LOGV("VideoEditorVideoEncoder_encode ERROR 0x%X", err);
+        ALOGV("VideoEditorVideoEncoder_encode ERROR 0x%X", err);
     }
-    LOGV("VideoEditorVideoEncoder_encode end");
+    ALOGV("VideoEditorVideoEncoder_encode end");
     return err;
 }
 
@@ -1237,7 +1051,7 @@
     VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
     status_t                   result          = OK;
 
-    LOGV("VideoEditorVideoEncoder_start begin");
+    ALOGV("VideoEditorVideoEncoder_start begin");
     // Input parameters check
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
 
@@ -1261,11 +1075,11 @@
 
 cleanUp:
     if ( M4NO_ERROR == err ) {
-        LOGV("VideoEditorVideoEncoder_start no error");
+        ALOGV("VideoEditorVideoEncoder_start no error");
     } else {
-        LOGV("VideoEditorVideoEncoder_start ERROR 0x%X", err);
+        ALOGV("VideoEditorVideoEncoder_start ERROR 0x%X", err);
     }
-    LOGV("VideoEditorVideoEncoder_start end");
+    ALOGV("VideoEditorVideoEncoder_start end");
     return err;
 }
 
@@ -1275,7 +1089,7 @@
     MediaBuffer* outputBuffer = NULL;
     status_t result = OK;
 
-    LOGV("VideoEditorVideoEncoder_stop begin");
+    ALOGV("VideoEditorVideoEncoder_stop begin");
     // Input parameters check
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
     pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
@@ -1311,22 +1125,22 @@
     }
 
     if (pEncoderContext->mNbInputFrames != pEncoderContext->mNbOutputFrames) {
-        LOGW("Some frames were not encoded: input(%d) != output(%d)",
+        ALOGW("Some frames were not encoded: input(%d) != output(%d)",
             pEncoderContext->mNbInputFrames, pEncoderContext->mNbOutputFrames);
     }
 
 cleanUp:
     if ( M4NO_ERROR == err ) {
-        LOGV("VideoEditorVideoEncoder_stop no error");
+        ALOGV("VideoEditorVideoEncoder_stop no error");
     } else {
-        LOGV("VideoEditorVideoEncoder_stop ERROR 0x%X", err);
+        ALOGV("VideoEditorVideoEncoder_stop ERROR 0x%X", err);
     }
-    LOGV("VideoEditorVideoEncoder_stop end");
+    ALOGV("VideoEditorVideoEncoder_stop end");
     return err;
 }
 
 M4OSA_ERR VideoEditorVideoEncoder_regulBitRate(M4ENCODER_Context pContext) {
-    LOGW("regulBitRate is not implemented");
+    ALOGW("regulBitRate is not implemented");
     return M4NO_ERROR;
 }
 
@@ -1335,7 +1149,7 @@
     M4OSA_ERR err = M4NO_ERROR;
     VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
 
-    LOGV("VideoEditorVideoEncoder_setOption start optionID 0x%X", optionID);
+    ALOGV("VideoEditorVideoEncoder_setOption start optionID 0x%X", optionID);
     // Input parameters check
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
 
@@ -1351,7 +1165,7 @@
                 (M4OSA_Context)optionValue;
             break;
         default:
-            LOGV("VideoEditorVideoEncoder_setOption: unsupported optionId 0x%X",
+            ALOGV("VideoEditorVideoEncoder_setOption: unsupported optionId 0x%X",
                 optionID);
             VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_BAD_OPTION_ID);
             break;
@@ -1359,11 +1173,11 @@
 
 cleanUp:
     if ( M4NO_ERROR == err ) {
-        LOGV("VideoEditorVideoEncoder_setOption no error");
+        ALOGV("VideoEditorVideoEncoder_setOption no error");
     } else {
-        LOGV("VideoEditorVideoEncoder_setOption ERROR 0x%X", err);
+        ALOGV("VideoEditorVideoEncoder_setOption ERROR 0x%X", err);
     }
-    LOGV("VideoEditorVideoEncoder_setOption end");
+    ALOGV("VideoEditorVideoEncoder_setOption end");
     return err;
 }
 
@@ -1372,7 +1186,7 @@
     M4OSA_ERR err = M4NO_ERROR;
     VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
 
-    LOGV("VideoEditorVideoEncoder_getOption begin optinId 0x%X", optionID);
+    ALOGV("VideoEditorVideoEncoder_getOption begin optinId 0x%X", optionID);
     // Input parameters check
     VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
     pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
@@ -1384,7 +1198,7 @@
             *(M4ENCODER_Header**)optionValue = &(pEncoderContext->mHeader);
             break;
         default:
-            LOGV("VideoEditorVideoEncoder_getOption: unsupported optionId 0x%X",
+            ALOGV("VideoEditorVideoEncoder_getOption: unsupported optionId 0x%X",
                 optionID);
             VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_BAD_OPTION_ID);
             break;
@@ -1392,9 +1206,9 @@
 
 cleanUp:
     if ( M4NO_ERROR == err ) {
-        LOGV("VideoEditorVideoEncoder_getOption no error");
+        ALOGV("VideoEditorVideoEncoder_getOption no error");
     } else {
-        LOGV("VideoEditorVideoEncoder_getOption ERROR 0x%X", err);
+        ALOGV("VideoEditorVideoEncoder_getOption ERROR 0x%X", err);
     }
     return err;
 }
@@ -1408,7 +1222,7 @@
     VIDEOEDITOR_CHECK(M4OSA_NULL != pFormat,           M4ERR_PARAMETER);
     VIDEOEDITOR_CHECK(M4OSA_NULL != pEncoderInterface, M4ERR_PARAMETER);
 
-    LOGV("VideoEditorVideoEncoder_getInterface begin 0x%x 0x%x %d", pFormat,
+    ALOGV("VideoEditorVideoEncoder_getInterface begin 0x%x 0x%x %d", pFormat,
         pEncoderInterface, mode);
 
     SAFE_MALLOC(*pEncoderInterface, M4ENCODER_GlobalInterface, 1,
@@ -1436,7 +1250,7 @@
                 break;
             }
         default:
-            LOGV("VideoEditorVideoEncoder_getInterface : unsupported format %d",
+            ALOGV("VideoEditorVideoEncoder_getInterface : unsupported format %d",
                 format);
             VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_PARAMETER);
         break;
@@ -1456,10 +1270,10 @@
 
 cleanUp:
     if( M4NO_ERROR == err ) {
-        LOGV("VideoEditorVideoEncoder_getInterface no error");
+        ALOGV("VideoEditorVideoEncoder_getInterface no error");
     } else {
         *pEncoderInterface = M4OSA_NULL;
-        LOGV("VideoEditorVideoEncoder_getInterface ERROR 0x%X", err);
+        ALOGV("VideoEditorVideoEncoder_getInterface ERROR 0x%X", err);
     }
     return err;
 }