merge in KFS78N (no-op)
diff --git a/camera/ICameraService.cpp b/camera/ICameraService.cpp
index 3debe22..5fc89fb 100644
--- a/camera/ICameraService.cpp
+++ b/camera/ICameraService.cpp
@@ -33,6 +33,7 @@
 #include <camera/ICameraClient.h>
 #include <camera/camera2/ICameraDeviceUser.h>
 #include <camera/camera2/ICameraDeviceCallbacks.h>
+#include <camera/CameraMetadata.h>
 
 namespace android {
 
@@ -119,6 +120,29 @@
         return result;
     }
 
+    // get camera characteristics (static metadata)
+    virtual status_t getCameraCharacteristics(int cameraId,
+                                              CameraMetadata* cameraInfo) {
+        Parcel data, reply;
+        data.writeInterfaceToken(ICameraService::getInterfaceDescriptor());
+        data.writeInt32(cameraId);
+        remote()->transact(BnCameraService::GET_CAMERA_CHARACTERISTICS, data, &reply);
+
+        if (readExceptionCode(reply)) return -EPROTO;
+        status_t result = reply.readInt32();
+
+        CameraMetadata out;
+        if (reply.readInt32() != 0) {
+            out.readFromParcel(&reply);
+        }
+
+        if (cameraInfo != NULL) {
+            cameraInfo->swap(out);
+        }
+
+        return result;
+    }
+
     // connect to camera service (android.hardware.Camera)
     virtual status_t connect(const sp<ICameraClient>& cameraClient, int cameraId,
                              const String16 &clientPackageName, int clientUid,
@@ -239,6 +263,18 @@
             reply->writeInt32(cameraInfo.orientation);
             return NO_ERROR;
         } break;
+        case GET_CAMERA_CHARACTERISTICS: {
+            CHECK_INTERFACE(ICameraService, data, reply);
+            CameraMetadata info;
+            status_t result = getCameraCharacteristics(data.readInt32(), &info);
+            reply->writeNoException();
+            reply->writeInt32(result);
+
+            // out-variables are after exception and return value
+            reply->writeInt32(1); // means the parcelable is included
+            info.writeToParcel(reply);
+            return NO_ERROR;
+        } break;
         case CONNECT: {
             CHECK_INTERFACE(ICameraService, data, reply);
             sp<ICameraClient> cameraClient =
diff --git a/camera/IProCameraCallbacks.cpp b/camera/IProCameraCallbacks.cpp
index 0fdb85a..bd3d420 100644
--- a/camera/IProCameraCallbacks.cpp
+++ b/camera/IProCameraCallbacks.cpp
@@ -67,11 +67,11 @@
                            IBinder::FLAG_ONEWAY);
     }
 
-    void onResultReceived(int32_t frameId, camera_metadata* result) {
+    void onResultReceived(int32_t requestId, camera_metadata* result) {
         ALOGV("onResultReceived");
         Parcel data, reply;
         data.writeInterfaceToken(IProCameraCallbacks::getInterfaceDescriptor());
-        data.writeInt32(frameId);
+        data.writeInt32(requestId);
         CameraMetadata::writeToParcel(data, result);
         remote()->transact(RESULT_RECEIVED, data, &reply, IBinder::FLAG_ONEWAY);
     }
@@ -107,10 +107,10 @@
         case RESULT_RECEIVED: {
             ALOGV("RESULT_RECEIVED");
             CHECK_INTERFACE(IProCameraCallbacks, data, reply);
-            int32_t frameId = data.readInt32();
+            int32_t requestId = data.readInt32();
             camera_metadata_t *result = NULL;
             CameraMetadata::readFromParcel(data, &result);
-            onResultReceived(frameId, result);
+            onResultReceived(requestId, result);
             return NO_ERROR;
             break;
         }
diff --git a/camera/ProCamera.cpp b/camera/ProCamera.cpp
index 577c760..ba5a48c 100644
--- a/camera/ProCamera.cpp
+++ b/camera/ProCamera.cpp
@@ -90,8 +90,8 @@
     }
 }
 
-void ProCamera::onResultReceived(int32_t frameId, camera_metadata* result) {
-    ALOGV("%s: frameId = %d, result = %p", __FUNCTION__, frameId, result);
+void ProCamera::onResultReceived(int32_t requestId, camera_metadata* result) {
+    ALOGV("%s: requestId = %d, result = %p", __FUNCTION__, requestId, result);
 
     sp<ProCameraListener> listener;
     {
@@ -112,7 +112,7 @@
     result = tmp.release();
 
     if (listener != NULL) {
-        listener->onResultReceived(frameId, result);
+        listener->onResultReceived(requestId, result);
     } else {
         free_camera_metadata(result);
     }
diff --git a/camera/camera2/ICameraDeviceCallbacks.cpp b/camera/camera2/ICameraDeviceCallbacks.cpp
index 3cec1f4..613358a 100644
--- a/camera/camera2/ICameraDeviceCallbacks.cpp
+++ b/camera/camera2/ICameraDeviceCallbacks.cpp
@@ -32,7 +32,9 @@
 namespace android {
 
 enum {
-    NOTIFY_CALLBACK = IBinder::FIRST_CALL_TRANSACTION,
+    CAMERA_ERROR = IBinder::FIRST_CALL_TRANSACTION,
+    CAMERA_IDLE,
+    CAPTURE_STARTED,
     RESULT_RECEIVED,
 };
 
@@ -44,19 +46,37 @@
     {
     }
 
-    // generic callback from camera service to app
-    void notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2)
+    void onDeviceError(CameraErrorCode errorCode)
     {
-        ALOGV("notifyCallback");
+        ALOGV("onDeviceError");
         Parcel data, reply;
         data.writeInterfaceToken(ICameraDeviceCallbacks::getInterfaceDescriptor());
-        data.writeInt32(msgType);
-        data.writeInt32(ext1);
-        data.writeInt32(ext2);
-        remote()->transact(NOTIFY_CALLBACK, data, &reply, IBinder::FLAG_ONEWAY);
+        data.writeInt32(static_cast<int32_t>(errorCode));
+        remote()->transact(CAMERA_ERROR, data, &reply, IBinder::FLAG_ONEWAY);
         data.writeNoException();
     }
 
+    void onDeviceIdle()
+    {
+        ALOGV("onDeviceIdle");
+        Parcel data, reply;
+        data.writeInterfaceToken(ICameraDeviceCallbacks::getInterfaceDescriptor());
+        remote()->transact(CAMERA_IDLE, data, &reply, IBinder::FLAG_ONEWAY);
+        data.writeNoException();
+    }
+
+    void onCaptureStarted(int32_t requestId, int64_t timestamp)
+    {
+        ALOGV("onCaptureStarted");
+        Parcel data, reply;
+        data.writeInterfaceToken(ICameraDeviceCallbacks::getInterfaceDescriptor());
+        data.writeInt32(requestId);
+        data.writeInt64(timestamp);
+        remote()->transact(CAPTURE_STARTED, data, &reply, IBinder::FLAG_ONEWAY);
+        data.writeNoException();
+    }
+
+
     void onResultReceived(int32_t requestId, const CameraMetadata& result) {
         ALOGV("onResultReceived");
         Parcel data, reply;
@@ -79,18 +99,33 @@
 {
     ALOGV("onTransact - code = %d", code);
     switch(code) {
-        case NOTIFY_CALLBACK: {
-            ALOGV("NOTIFY_CALLBACK");
+        case CAMERA_ERROR: {
+            ALOGV("onDeviceError");
             CHECK_INTERFACE(ICameraDeviceCallbacks, data, reply);
-            int32_t msgType = data.readInt32();
-            int32_t ext1 = data.readInt32();
-            int32_t ext2 = data.readInt32();
-            notifyCallback(msgType, ext1, ext2);
+            CameraErrorCode errorCode =
+                    static_cast<CameraErrorCode>(data.readInt32());
+            onDeviceError(errorCode);
+            data.readExceptionCode();
+            return NO_ERROR;
+        } break;
+        case CAMERA_IDLE: {
+            ALOGV("onDeviceIdle");
+            CHECK_INTERFACE(ICameraDeviceCallbacks, data, reply);
+            onDeviceIdle();
+            data.readExceptionCode();
+            return NO_ERROR;
+        } break;
+        case CAPTURE_STARTED: {
+            ALOGV("onCaptureStarted");
+            CHECK_INTERFACE(ICameraDeviceCallbacks, data, reply);
+            int32_t requestId = data.readInt32();
+            int64_t timestamp = data.readInt64();
+            onCaptureStarted(requestId, timestamp);
             data.readExceptionCode();
             return NO_ERROR;
         } break;
         case RESULT_RECEIVED: {
-            ALOGV("RESULT_RECEIVED");
+            ALOGV("onResultReceived");
             CHECK_INTERFACE(ICameraDeviceCallbacks, data, reply);
             int32_t requestId = data.readInt32();
             CameraMetadata result;
@@ -102,8 +137,7 @@
             onResultReceived(requestId, result);
             data.readExceptionCode();
             return NO_ERROR;
-            break;
-        }
+        } break;
         default:
             return BBinder::onTransact(code, data, reply, flags);
     }
diff --git a/camera/tests/ProCameraTests.cpp b/camera/tests/ProCameraTests.cpp
index e9aa99d..1f5867a 100644
--- a/camera/tests/ProCameraTests.cpp
+++ b/camera/tests/ProCameraTests.cpp
@@ -284,9 +284,9 @@
         }
     }
 
-    virtual void onResultReceived(int32_t frameId,
+    virtual void onResultReceived(int32_t requestId,
                                   camera_metadata* request) {
-        dout << "Result received frameId = " << frameId
+        dout << "Result received requestId = " << requestId
              << ", requestPtr = " << (void*)request << std::endl;
         QueueEvent(RESULT_RECEIVED);
         free_camera_metadata(request);
@@ -1276,4 +1276,3 @@
 }
 }
 }
-
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index 68289a5..49999b5 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -464,12 +464,16 @@
     err = prepareEncoder(mainDpyInfo.fps, &encoder, &bufferProducer);
 
     if (err != NO_ERROR && !gSizeSpecified) {
-        if (gVideoWidth != kFallbackWidth && gVideoHeight != kFallbackHeight) {
+        // fallback is defined for landscape; swap if we're in portrait
+        bool needSwap = gVideoWidth < gVideoHeight;
+        uint32_t newWidth = needSwap ? kFallbackHeight : kFallbackWidth;
+        uint32_t newHeight = needSwap ? kFallbackWidth : kFallbackHeight;
+        if (gVideoWidth != newWidth && gVideoHeight != newHeight) {
             ALOGV("Retrying with 720p");
-            fprintf(stderr, "WARNING: failed at %dx%d, retrying at 720p\n",
-                    gVideoWidth, gVideoHeight);
-            gVideoWidth = kFallbackWidth;
-            gVideoHeight = kFallbackHeight;
+            fprintf(stderr, "WARNING: failed at %dx%d, retrying at %dx%d\n",
+                    gVideoWidth, gVideoHeight, newWidth, newHeight);
+            gVideoWidth = newWidth;
+            gVideoHeight = newHeight;
             err = prepareEncoder(mainDpyInfo.fps, &encoder, &bufferProducer);
         }
     }
diff --git a/include/camera/ICameraService.h b/include/camera/ICameraService.h
index 0e10699..f342122 100644
--- a/include/camera/ICameraService.h
+++ b/include/camera/ICameraService.h
@@ -30,6 +30,7 @@
 class ICameraServiceListener;
 class ICameraDeviceUser;
 class ICameraDeviceCallbacks;
+class CameraMetadata;
 
 class ICameraService : public IInterface
 {
@@ -45,6 +46,7 @@
         CONNECT_DEVICE,
         ADD_LISTENER,
         REMOVE_LISTENER,
+        GET_CAMERA_CHARACTERISTICS,
     };
 
     enum {
@@ -58,6 +60,9 @@
     virtual status_t getCameraInfo(int cameraId,
                                           struct CameraInfo* cameraInfo) = 0;
 
+    virtual status_t getCameraCharacteristics(int cameraId,
+                                              CameraMetadata* cameraInfo) = 0;
+
     // Returns 'OK' if operation succeeded
     // - Errors: ALREADY_EXISTS if the listener was already added
     virtual status_t addListener(const sp<ICameraServiceListener>& listener)
diff --git a/include/camera/IProCameraCallbacks.h b/include/camera/IProCameraCallbacks.h
index c774698..e8abb89 100644
--- a/include/camera/IProCameraCallbacks.h
+++ b/include/camera/IProCameraCallbacks.h
@@ -51,7 +51,7 @@
     /** Missing by design: implementation is client-side in ProCamera.cpp **/
     // virtual void onBufferReceived(int streamId,
     //                               const CpuConsumer::LockedBufer& buf);
-    virtual void            onResultReceived(int32_t frameId,
+    virtual void            onResultReceived(int32_t requestId,
                                              camera_metadata* result) = 0;
 };
 
diff --git a/include/camera/ProCamera.h b/include/camera/ProCamera.h
index d9ee662..83a3028 100644
--- a/include/camera/ProCamera.h
+++ b/include/camera/ProCamera.h
@@ -252,7 +252,7 @@
     virtual void        onLockStatusChanged(
                                 IProCameraCallbacks::LockStatus newLockStatus);
 
-    virtual void        onResultReceived(int32_t frameId,
+    virtual void        onResultReceived(int32_t requestId,
                                          camera_metadata* result);
 private:
     ProCamera(int cameraId);
diff --git a/include/camera/camera2/ICameraDeviceCallbacks.h b/include/camera/camera2/ICameraDeviceCallbacks.h
index 041fa65..8dac4f2 100644
--- a/include/camera/camera2/ICameraDeviceCallbacks.h
+++ b/include/camera/camera2/ICameraDeviceCallbacks.h
@@ -35,13 +35,27 @@
 public:
     DECLARE_META_INTERFACE(CameraDeviceCallbacks);
 
-    // One way
-    virtual void            notifyCallback(int32_t msgType,
-                                           int32_t ext1,
-                                           int32_t ext2) = 0;
+    /**
+     * Error codes for CAMERA_MSG_ERROR
+     */
+    enum CameraErrorCode {
+        ERROR_CAMERA_DISCONNECTED = 0,
+        ERROR_CAMERA_DEVICE = 1,
+        ERROR_CAMERA_SERVICE = 2
+    };
 
     // One way
-    virtual void            onResultReceived(int32_t frameId,
+    virtual void            onDeviceError(CameraErrorCode errorCode) = 0;
+
+    // One way
+    virtual void            onDeviceIdle() = 0;
+
+    // One way
+    virtual void            onCaptureStarted(int32_t requestId,
+                                             int64_t timestamp) = 0;
+
+    // One way
+    virtual void            onResultReceived(int32_t requestId,
                                              const CameraMetadata& result) = 0;
 };
 
diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h
index 62f0c64..052064d 100644
--- a/include/media/AudioRecord.h
+++ b/include/media/AudioRecord.h
@@ -398,18 +398,20 @@
 
                 void        pause();    // suspend thread from execution at next loop boundary
                 void        resume();   // allow thread to execute, if not requested to exit
-                void        pauseConditional();
-                                        // like pause(), but only if prior resume() wasn't latched
 
     private:
+                void        pauseInternal(nsecs_t ns = 0LL);
+                                        // like pause(), but only used internally within thread
+
         friend class AudioRecord;
         virtual bool        threadLoop();
         AudioRecord&        mReceiver;
         virtual ~AudioRecordThread();
         Mutex               mMyLock;    // Thread::mLock is private
         Condition           mMyCond;    // Thread::mThreadExitedCondition is private
-        bool                mPaused;    // whether thread is currently paused
-        bool                mResumeLatch;   // whether next pauseConditional() will be a nop
+        bool                mPaused;    // whether thread is requested to pause at next loop entry
+        bool                mPausedInt; // whether thread internally requests pause
+        nsecs_t             mPausedNs;  // if mPausedInt then associated timeout, otherwise ignored
     };
 
             // body of AudioRecordThread::threadLoop()
diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h
index 453c106..22ad57e 100644
--- a/include/media/AudioTrack.h
+++ b/include/media/AudioTrack.h
@@ -598,18 +598,20 @@
 
                 void        pause();    // suspend thread from execution at next loop boundary
                 void        resume();   // allow thread to execute, if not requested to exit
-                void        pauseConditional();
-                                        // like pause(), but only if prior resume() wasn't latched
 
     private:
+                void        pauseInternal(nsecs_t ns = 0LL);
+                                        // like pause(), but only used internally within thread
+
         friend class AudioTrack;
         virtual bool        threadLoop();
         AudioTrack&         mReceiver;
         virtual ~AudioTrackThread();
         Mutex               mMyLock;    // Thread::mLock is private
         Condition           mMyCond;    // Thread::mThreadExitedCondition is private
-        bool                mPaused;    // whether thread is currently paused
-        bool                mResumeLatch;   // whether next pauseConditional() will be a nop
+        bool                mPaused;    // whether thread is requested to pause at next loop entry
+        bool                mPausedInt; // whether thread internally requests pause
+        nsecs_t             mPausedNs;  // if mPausedInt then associated timeout, otherwise ignored
     };
 
             // body of AudioTrackThread::threadLoop()
diff --git a/include/media/IAudioFlinger.h b/include/media/IAudioFlinger.h
index 49f921b..eaf7780 100644
--- a/include/media/IAudioFlinger.h
+++ b/include/media/IAudioFlinger.h
@@ -53,6 +53,9 @@
     };
     typedef uint32_t track_flags_t;
 
+    // invariant on exit for all APIs that return an sp<>:
+    //   (return value != 0) == (*status == NO_ERROR)
+
     /* create an audio track and registers it with AudioFlinger.
      * return null if the track cannot be created.
      */
diff --git a/include/media/IAudioTrack.h b/include/media/IAudioTrack.h
index afac4ae..5c8a484 100644
--- a/include/media/IAudioTrack.h
+++ b/include/media/IAudioTrack.h
@@ -90,6 +90,9 @@
 
     /* Return NO_ERROR if timestamp is valid */
     virtual status_t    getTimestamp(AudioTimestamp& timestamp) = 0;
+
+    /* Signal the playback thread for a change in control block */
+    virtual void        signal() = 0;
 };
 
 // ----------------------------------------------------------------------------
diff --git a/include/media/IMediaPlayerService.h b/include/media/IMediaPlayerService.h
index fef7af2..2998b37 100644
--- a/include/media/IMediaPlayerService.h
+++ b/include/media/IMediaPlayerService.h
@@ -49,8 +49,12 @@
     virtual sp<IMediaMetadataRetriever> createMetadataRetriever() = 0;
     virtual sp<IMediaPlayer> create(const sp<IMediaPlayerClient>& client, int audioSessionId = 0) = 0;
 
-    virtual sp<IMemory>         decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat) = 0;
-    virtual sp<IMemory>         decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat) = 0;
+    virtual status_t         decode(const char* url, uint32_t *pSampleRate, int* pNumChannels,
+                                    audio_format_t* pFormat,
+                                    const sp<IMemoryHeap>& heap, size_t *pSize) = 0;
+    virtual status_t         decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate,
+                                    int* pNumChannels, audio_format_t* pFormat,
+                                    const sp<IMemoryHeap>& heap, size_t *pSize) = 0;
     virtual sp<IOMX>            getOMX() = 0;
     virtual sp<ICrypto>         makeCrypto() = 0;
     virtual sp<IDrm>            makeDrm() = 0;
diff --git a/include/media/IOMX.h b/include/media/IOMX.h
index db9093a..9c8451c 100644
--- a/include/media/IOMX.h
+++ b/include/media/IOMX.h
@@ -83,6 +83,10 @@
     virtual status_t storeMetaDataInBuffers(
             node_id node, OMX_U32 port_index, OMX_BOOL enable) = 0;
 
+    virtual status_t prepareForAdaptivePlayback(
+            node_id node, OMX_U32 portIndex, OMX_BOOL enable,
+            OMX_U32 maxFrameWidth, OMX_U32 maxFrameHeight) = 0;
+
     virtual status_t enableGraphicBuffers(
             node_id node, OMX_U32 port_index, OMX_BOOL enable) = 0;
 
diff --git a/include/media/SoundPool.h b/include/media/SoundPool.h
index 9e5654f..2dd78cc 100644
--- a/include/media/SoundPool.h
+++ b/include/media/SoundPool.h
@@ -22,6 +22,8 @@
 #include <utils/Vector.h>
 #include <utils/KeyedVector.h>
 #include <media/AudioTrack.h>
+#include <binder/MemoryHeapBase.h>
+#include <binder/MemoryBase.h>
 
 namespace android {
 
@@ -85,6 +87,7 @@
     int64_t             mLength;
     char*               mUrl;
     sp<IMemory>         mData;
+    sp<MemoryHeapBase>  mHeap;
 };
 
 // stores pending events for stolen channels
diff --git a/include/media/Visualizer.h b/include/media/Visualizer.h
index e429263..6167dd6 100644
--- a/include/media/Visualizer.h
+++ b/include/media/Visualizer.h
@@ -114,6 +114,14 @@
     status_t setScalingMode(uint32_t mode);
     uint32_t getScalingMode() { return mScalingMode; }
 
+    // set which measurements are done on the audio buffers processed by the effect.
+    // valid measurements (mask): MEASUREMENT_MODE_PEAK_RMS
+    status_t setMeasurementMode(uint32_t mode);
+    uint32_t getMeasurementMode() { return mMeasurementMode; }
+
+    // return a set of int32_t measurements
+    status_t getIntMeasurements(uint32_t type, uint32_t number, int32_t *measurements);
+
     // return a capture in PCM 8 bit unsigned format. The size of the capture is equal to
     // getCaptureSize()
     status_t getWaveForm(uint8_t *waveform);
@@ -156,6 +164,7 @@
     uint32_t mCaptureSize;
     uint32_t mSampleRate;
     uint32_t mScalingMode;
+    uint32_t mMeasurementMode;
     capture_cbk_t mCaptureCallBack;
     void *mCaptureCbkUser;
     sp<CaptureThread> mCaptureThread;
diff --git a/include/media/mediaplayer.h b/include/media/mediaplayer.h
index 923c8b2..4c05fc3 100644
--- a/include/media/mediaplayer.h
+++ b/include/media/mediaplayer.h
@@ -45,6 +45,7 @@
     MEDIA_STARTED           = 6,
     MEDIA_PAUSED            = 7,
     MEDIA_STOPPED           = 8,
+    MEDIA_SKIPPED           = 9,
     MEDIA_TIMED_TEXT        = 99,
     MEDIA_ERROR             = 100,
     MEDIA_INFO              = 200,
@@ -223,8 +224,12 @@
             bool            isLooping();
             status_t        setVolume(float leftVolume, float rightVolume);
             void            notify(int msg, int ext1, int ext2, const Parcel *obj = NULL);
-    static  sp<IMemory>     decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat);
-    static  sp<IMemory>     decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat);
+    static  status_t        decode(const char* url, uint32_t *pSampleRate, int* pNumChannels,
+                                   audio_format_t* pFormat,
+                                   const sp<IMemoryHeap>& heap, size_t *pSize);
+    static  status_t        decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate,
+                                   int* pNumChannels, audio_format_t* pFormat,
+                                   const sp<IMemoryHeap>& heap, size_t *pSize);
             status_t        invoke(const Parcel& request, Parcel *reply);
             status_t        setMetadataFilter(const Parcel& filter);
             status_t        getMetadata(bool update_only, bool apply_filter, Parcel *metadata);
diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h
index ad7409d..fe258ad 100644
--- a/include/private/media/AudioTrackShared.h
+++ b/include/private/media/AudioTrackShared.h
@@ -383,8 +383,6 @@
 protected:
     size_t      mAvailToClient; // estimated frames available to client prior to releaseBuffer()
     int32_t     mFlush;         // our copy of cblk->u.mStreaming.mFlush, for streaming output only
-private:
-    bool        mDeferWake;     // whether another releaseBuffer() is expected soon
 };
 
 // Proxy used by AudioFlinger for servicing AudioTrack
diff --git a/media/libeffects/data/audio_effects.conf b/media/libeffects/data/audio_effects.conf
index 69a3c53..c3c4b67 100644
--- a/media/libeffects/data/audio_effects.conf
+++ b/media/libeffects/data/audio_effects.conf
@@ -35,6 +35,9 @@
   downmix {
     path /system/lib/soundfx/libdownmix.so
   }
+  loudness_enhancer {
+    path /system/lib/soundfx/libldnhncr.so
+  }
 }
 
 # Default pre-processing library. Add to audio_effect.conf "libraries" section if
@@ -122,6 +125,10 @@
     library downmix
     uuid 93f04452-e4fe-41cc-91f9-e475b6d1d69f
   }
+  loudness_enhancer {
+    library loudness_enhancer
+    uuid fa415329-2034-4bea-b5dc-5b381c8d1e2c
+  }
 }
 
 # Default pre-processing effects. Add to audio_effect.conf "effects" section if
diff --git a/media/libeffects/loudness/Android.mk b/media/libeffects/loudness/Android.mk
new file mode 100644
index 0000000..dcb7b27
--- /dev/null
+++ b/media/libeffects/loudness/Android.mk
@@ -0,0 +1,27 @@
+LOCAL_PATH:= $(call my-dir)
+
+# LoudnessEnhancer library
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+	EffectLoudnessEnhancer.cpp \
+	dsp/core/dynamic_range_compression.cpp
+
+LOCAL_CFLAGS+= -O2 -fvisibility=hidden
+
+LOCAL_SHARED_LIBRARIES := \
+	libcutils \
+	liblog \
+	libstlport
+
+LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/soundfx
+LOCAL_MODULE:= libldnhncr
+
+LOCAL_C_INCLUDES := \
+	$(call include-path-for, audio-effects) \
+	bionic \
+	bionic/libstdc++/include \
+	external/stlport/stlport
+
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libeffects/loudness/EffectLoudnessEnhancer.cpp b/media/libeffects/loudness/EffectLoudnessEnhancer.cpp
new file mode 100644
index 0000000..91ed677
--- /dev/null
+++ b/media/libeffects/loudness/EffectLoudnessEnhancer.cpp
@@ -0,0 +1,466 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "EffectLE"
+//#define LOG_NDEBUG 0
+#include <cutils/log.h>
+#include <assert.h>
+#include <stdlib.h>
+#include <string.h>
+#include <new>
+#include <time.h>
+#include <math.h>
+#include <audio_effects/effect_loudnessenhancer.h>
+#include "dsp/core/dynamic_range_compression.h"
+
+extern "C" {
+
+// effect_handle_t interface implementation for LE effect
+extern const struct effect_interface_s gLEInterface;
+
+// AOSP Loudness Enhancer UUID: fa415329-2034-4bea-b5dc-5b381c8d1e2c
+const effect_descriptor_t gLEDescriptor = {
+        {0xfe3199be, 0xaed0, 0x413f, 0x87bb, {0x11, 0x26, 0x0e, 0xb6, 0x3c, 0xf1}}, // type
+        {0xfa415329, 0x2034, 0x4bea, 0xb5dc, {0x5b, 0x38, 0x1c, 0x8d, 0x1e, 0x2c}}, // uuid
+        EFFECT_CONTROL_API_VERSION,
+        (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_FIRST),
+        0, // TODO
+        1,
+        "Loudness Enhancer",
+        "The Android Open Source Project",
+};
+
+enum le_state_e {
+    LOUDNESS_ENHANCER_STATE_UNINITIALIZED,
+    LOUDNESS_ENHANCER_STATE_INITIALIZED,
+    LOUDNESS_ENHANCER_STATE_ACTIVE,
+};
+
+struct LoudnessEnhancerContext {
+    const struct effect_interface_s *mItfe;
+    effect_config_t mConfig;
+    uint8_t mState;
+    int32_t mTargetGainmB;// target gain in mB
+    // in this implementation, there is no coupling between the compression on the left and right
+    // channels
+    le_fx::AdaptiveDynamicRangeCompression* mCompressor;
+};
+
+//
+//--- Local functions (not directly used by effect interface)
+//
+
+void LE_reset(LoudnessEnhancerContext *pContext)
+{
+    ALOGV("  > LE_reset(%p)", pContext);
+
+    if (pContext->mCompressor != NULL) {
+        float targetAmp = pow(10, pContext->mTargetGainmB/2000.0f); // mB to linear amplification
+        ALOGV("LE_reset(): Target gain=%dmB <=> factor=%.2fX", pContext->mTargetGainmB, targetAmp);
+        pContext->mCompressor->Initialize(targetAmp, pContext->mConfig.inputCfg.samplingRate);
+    } else {
+        ALOGE("LE_reset(%p): null compressors, can't apply target gain", pContext);
+    }
+}
+
+static inline int16_t clamp16(int32_t sample)
+{
+    if ((sample>>15) ^ (sample>>31))
+        sample = 0x7FFF ^ (sample>>31);
+    return sample;
+}
+
+//----------------------------------------------------------------------------
+// LE_setConfig()
+//----------------------------------------------------------------------------
+// Purpose: Set input and output audio configuration.
+//
+// Inputs:
+//  pContext:   effect engine context
+//  pConfig:    pointer to effect_config_t structure holding input and output
+//      configuration parameters
+//
+// Outputs:
+//
+//----------------------------------------------------------------------------
+
+int LE_setConfig(LoudnessEnhancerContext *pContext, effect_config_t *pConfig)
+{
+    ALOGV("LE_setConfig(%p)", pContext);
+
+    if (pConfig->inputCfg.samplingRate != pConfig->outputCfg.samplingRate) return -EINVAL;
+    if (pConfig->inputCfg.channels != pConfig->outputCfg.channels) return -EINVAL;
+    if (pConfig->inputCfg.format != pConfig->outputCfg.format) return -EINVAL;
+    if (pConfig->inputCfg.channels != AUDIO_CHANNEL_OUT_STEREO) return -EINVAL;
+    if (pConfig->outputCfg.accessMode != EFFECT_BUFFER_ACCESS_WRITE &&
+            pConfig->outputCfg.accessMode != EFFECT_BUFFER_ACCESS_ACCUMULATE) return -EINVAL;
+    if (pConfig->inputCfg.format != AUDIO_FORMAT_PCM_16_BIT) return -EINVAL;
+
+    pContext->mConfig = *pConfig;
+
+    LE_reset(pContext);
+
+    return 0;
+}
+
+
+//----------------------------------------------------------------------------
+// LE_getConfig()
+//----------------------------------------------------------------------------
+// Purpose: Get input and output audio configuration.
+//
+// Inputs:
+//  pContext:   effect engine context
+//  pConfig:    pointer to effect_config_t structure holding input and output
+//      configuration parameters
+//
+// Outputs:
+//
+//----------------------------------------------------------------------------
+
+void LE_getConfig(LoudnessEnhancerContext *pContext, effect_config_t *pConfig)
+{
+    *pConfig = pContext->mConfig;
+}
+
+
+//----------------------------------------------------------------------------
+// LE_init()
+//----------------------------------------------------------------------------
+// Purpose: Initialize engine with default configuration.
+//
+// Inputs:
+//  pContext:   effect engine context
+//
+// Outputs:
+//
+//----------------------------------------------------------------------------
+
+int LE_init(LoudnessEnhancerContext *pContext)
+{
+    ALOGV("LE_init(%p)", pContext);
+
+    pContext->mConfig.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
+    pContext->mConfig.inputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+    pContext->mConfig.inputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
+    pContext->mConfig.inputCfg.samplingRate = 44100;
+    pContext->mConfig.inputCfg.bufferProvider.getBuffer = NULL;
+    pContext->mConfig.inputCfg.bufferProvider.releaseBuffer = NULL;
+    pContext->mConfig.inputCfg.bufferProvider.cookie = NULL;
+    pContext->mConfig.inputCfg.mask = EFFECT_CONFIG_ALL;
+    pContext->mConfig.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_ACCUMULATE;
+    pContext->mConfig.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+    pContext->mConfig.outputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
+    pContext->mConfig.outputCfg.samplingRate = 44100;
+    pContext->mConfig.outputCfg.bufferProvider.getBuffer = NULL;
+    pContext->mConfig.outputCfg.bufferProvider.releaseBuffer = NULL;
+    pContext->mConfig.outputCfg.bufferProvider.cookie = NULL;
+    pContext->mConfig.outputCfg.mask = EFFECT_CONFIG_ALL;
+
+    pContext->mTargetGainmB = LOUDNESS_ENHANCER_DEFAULT_TARGET_GAIN_MB;
+    float targetAmp = pow(10, pContext->mTargetGainmB/2000.0f); // mB to linear amplification
+    ALOGV("LE_init(): Target gain=%dmB <=> factor=%.2fX", pContext->mTargetGainmB, targetAmp);
+
+    if (pContext->mCompressor == NULL) {
+        pContext->mCompressor = new le_fx::AdaptiveDynamicRangeCompression();
+        pContext->mCompressor->Initialize(targetAmp, pContext->mConfig.inputCfg.samplingRate);
+    }
+
+    LE_setConfig(pContext, &pContext->mConfig);
+
+    return 0;
+}
+
+//
+//--- Effect Library Interface Implementation
+//
+
+int LELib_Create(const effect_uuid_t *uuid,
+                         int32_t sessionId,
+                         int32_t ioId,
+                         effect_handle_t *pHandle) {
+    ALOGV("LELib_Create()");
+    int ret;
+    int i;
+
+    if (pHandle == NULL || uuid == NULL) {
+        return -EINVAL;
+    }
+
+    if (memcmp(uuid, &gLEDescriptor.uuid, sizeof(effect_uuid_t)) != 0) {
+        return -EINVAL;
+    }
+
+    LoudnessEnhancerContext *pContext = new LoudnessEnhancerContext;
+
+    pContext->mItfe = &gLEInterface;
+    pContext->mState = LOUDNESS_ENHANCER_STATE_UNINITIALIZED;
+
+    pContext->mCompressor = NULL;
+    ret = LE_init(pContext);
+    if (ret < 0) {
+        ALOGW("LELib_Create() init failed");
+        delete pContext;
+        return ret;
+    }
+
+    *pHandle = (effect_handle_t)pContext;
+
+    pContext->mState = LOUDNESS_ENHANCER_STATE_INITIALIZED;
+
+    ALOGV("  LELib_Create context is %p", pContext);
+
+    return 0;
+
+}
+
+int LELib_Release(effect_handle_t handle) {
+    LoudnessEnhancerContext * pContext = (LoudnessEnhancerContext *)handle;
+
+    ALOGV("LELib_Release %p", handle);
+    if (pContext == NULL) {
+        return -EINVAL;
+    }
+    pContext->mState = LOUDNESS_ENHANCER_STATE_UNINITIALIZED;
+    if (pContext->mCompressor != NULL) {
+        delete pContext->mCompressor;
+        pContext->mCompressor = NULL;
+    }
+    delete pContext;
+
+    return 0;
+}
+
+int LELib_GetDescriptor(const effect_uuid_t *uuid,
+                                effect_descriptor_t *pDescriptor) {
+
+    if (pDescriptor == NULL || uuid == NULL){
+        ALOGV("LELib_GetDescriptor() called with NULL pointer");
+        return -EINVAL;
+    }
+
+    if (memcmp(uuid, &gLEDescriptor.uuid, sizeof(effect_uuid_t)) == 0) {
+        *pDescriptor = gLEDescriptor;
+        return 0;
+    }
+
+    return  -EINVAL;
+} /* end LELib_GetDescriptor */
+
+//
+//--- Effect Control Interface Implementation
+//
+int LE_process(
+        effect_handle_t self, audio_buffer_t *inBuffer, audio_buffer_t *outBuffer)
+{
+    LoudnessEnhancerContext * pContext = (LoudnessEnhancerContext *)self;
+
+    if (pContext == NULL) {
+        return -EINVAL;
+    }
+
+    if (inBuffer == NULL || inBuffer->raw == NULL ||
+        outBuffer == NULL || outBuffer->raw == NULL ||
+        inBuffer->frameCount != outBuffer->frameCount ||
+        inBuffer->frameCount == 0) {
+        return -EINVAL;
+    }
+
+    //ALOGV("LE about to process %d samples", inBuffer->frameCount);
+    uint16_t inIdx;
+    float inputAmp = pow(10, pContext->mTargetGainmB/2000.0f);
+    float leftSample, rightSample;
+    for (inIdx = 0 ; inIdx < inBuffer->frameCount ; inIdx++) {
+        // makeup gain is applied on the input of the compressor
+        leftSample  = inputAmp * (float)inBuffer->s16[2*inIdx];
+        rightSample = inputAmp * (float)inBuffer->s16[2*inIdx +1];
+        pContext->mCompressor->Compress(&leftSample, &rightSample);
+        inBuffer->s16[2*inIdx]    = (int16_t) leftSample;
+        inBuffer->s16[2*inIdx +1] = (int16_t) rightSample;
+    }
+
+    if (inBuffer->raw != outBuffer->raw) {
+        if (pContext->mConfig.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
+            for (size_t i = 0; i < outBuffer->frameCount*2; i++) {
+                outBuffer->s16[i] = clamp16(outBuffer->s16[i] + inBuffer->s16[i]);
+            }
+        } else {
+            memcpy(outBuffer->raw, inBuffer->raw, outBuffer->frameCount * 2 * sizeof(int16_t));
+        }
+    }
+    if (pContext->mState != LOUDNESS_ENHANCER_STATE_ACTIVE) {
+        return -ENODATA;
+    }
+    return 0;
+}
+
+int LE_command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize,
+        void *pCmdData, uint32_t *replySize, void *pReplyData) {
+
+    LoudnessEnhancerContext * pContext = (LoudnessEnhancerContext *)self;
+    int retsize;
+
+    if (pContext == NULL || pContext->mState == LOUDNESS_ENHANCER_STATE_UNINITIALIZED) {
+        return -EINVAL;
+    }
+
+//    ALOGV("LE_command command %d cmdSize %d",cmdCode, cmdSize);
+    switch (cmdCode) {
+    case EFFECT_CMD_INIT:
+        if (pReplyData == NULL || *replySize != sizeof(int)) {
+            return -EINVAL;
+        }
+        *(int *) pReplyData = LE_init(pContext);
+        break;
+    case EFFECT_CMD_SET_CONFIG:
+        if (pCmdData == NULL || cmdSize != sizeof(effect_config_t)
+                || pReplyData == NULL || *replySize != sizeof(int)) {
+            return -EINVAL;
+        }
+        *(int *) pReplyData = LE_setConfig(pContext,
+                (effect_config_t *) pCmdData);
+        break;
+    case EFFECT_CMD_GET_CONFIG:
+        if (pReplyData == NULL ||
+            *replySize != sizeof(effect_config_t)) {
+            return -EINVAL;
+        }
+        LE_getConfig(pContext, (effect_config_t *)pReplyData);
+        break;
+    case EFFECT_CMD_RESET:
+        LE_reset(pContext);
+        break;
+    case EFFECT_CMD_ENABLE:
+        if (pReplyData == NULL || *replySize != sizeof(int)) {
+            return -EINVAL;
+        }
+        if (pContext->mState != LOUDNESS_ENHANCER_STATE_INITIALIZED) {
+            return -ENOSYS;
+        }
+        pContext->mState = LOUDNESS_ENHANCER_STATE_ACTIVE;
+        ALOGV("EFFECT_CMD_ENABLE() OK");
+        *(int *)pReplyData = 0;
+        break;
+    case EFFECT_CMD_DISABLE:
+        if (pReplyData == NULL || *replySize != sizeof(int)) {
+            return -EINVAL;
+        }
+        if (pContext->mState != LOUDNESS_ENHANCER_STATE_ACTIVE) {
+            return -ENOSYS;
+        }
+        pContext->mState = LOUDNESS_ENHANCER_STATE_INITIALIZED;
+        ALOGV("EFFECT_CMD_DISABLE() OK");
+        *(int *)pReplyData = 0;
+        break;
+    case EFFECT_CMD_GET_PARAM: {
+        if (pCmdData == NULL ||
+            cmdSize != (int)(sizeof(effect_param_t) + sizeof(uint32_t)) ||
+            pReplyData == NULL ||
+            *replySize < (int)(sizeof(effect_param_t) + sizeof(uint32_t) + sizeof(uint32_t))) {
+            return -EINVAL;
+        }
+        memcpy(pReplyData, pCmdData, sizeof(effect_param_t) + sizeof(uint32_t));
+        effect_param_t *p = (effect_param_t *)pReplyData;
+        p->status = 0;
+        *replySize = sizeof(effect_param_t) + sizeof(uint32_t);
+        if (p->psize != sizeof(uint32_t)) {
+            p->status = -EINVAL;
+            break;
+        }
+        switch (*(uint32_t *)p->data) {
+        case LOUDNESS_ENHANCER_PARAM_TARGET_GAIN_MB:
+            ALOGV("get target gain(mB) = %d", pContext->mTargetGainmB);
+            *((int32_t *)p->data + 1) = pContext->mTargetGainmB;
+            p->vsize = sizeof(int32_t);
+            *replySize += sizeof(int32_t);
+            break;
+        default:
+            p->status = -EINVAL;
+        }
+        } break;
+    case EFFECT_CMD_SET_PARAM: {
+        if (pCmdData == NULL ||
+            cmdSize != (int)(sizeof(effect_param_t) + sizeof(uint32_t) + sizeof(uint32_t)) ||
+            pReplyData == NULL || *replySize != sizeof(int32_t)) {
+            return -EINVAL;
+        }
+        *(int32_t *)pReplyData = 0;
+        effect_param_t *p = (effect_param_t *)pCmdData;
+        if (p->psize != sizeof(uint32_t) || p->vsize != sizeof(uint32_t)) {
+            *(int32_t *)pReplyData = -EINVAL;
+            break;
+        }
+        switch (*(uint32_t *)p->data) {
+        case LOUDNESS_ENHANCER_PARAM_TARGET_GAIN_MB:
+            pContext->mTargetGainmB = *((int32_t *)p->data + 1);
+            ALOGV("set target gain(mB) = %d", pContext->mTargetGainmB);
+            LE_reset(pContext); // apply parameter update
+            break;
+        default:
+            *(int32_t *)pReplyData = -EINVAL;
+        }
+        } break;
+    case EFFECT_CMD_SET_DEVICE:
+    case EFFECT_CMD_SET_VOLUME:
+    case EFFECT_CMD_SET_AUDIO_MODE:
+        break;
+
+    default:
+        ALOGW("LE_command invalid command %d",cmdCode);
+        return -EINVAL;
+    }
+
+    return 0;
+}
+
+/* Effect Control Interface Implementation: get_descriptor */
+int LE_getDescriptor(effect_handle_t   self,
+                                    effect_descriptor_t *pDescriptor)
+{
+    LoudnessEnhancerContext * pContext = (LoudnessEnhancerContext *) self;
+
+    if (pContext == NULL || pDescriptor == NULL) {
+        ALOGV("LE_getDescriptor() invalid param");
+        return -EINVAL;
+    }
+
+    *pDescriptor = gLEDescriptor;
+
+    return 0;
+}   /* end LE_getDescriptor */
+
+// effect_handle_t interface implementation for DRC effect
+const struct effect_interface_s gLEInterface = {
+        LE_process,
+        LE_command,
+        LE_getDescriptor,
+        NULL,
+};
+
+// This is the only symbol that needs to be exported
+__attribute__ ((visibility ("default")))
+audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
+    tag : AUDIO_EFFECT_LIBRARY_TAG,
+    version : EFFECT_LIBRARY_API_VERSION,
+    name : "Loudness Enhancer Library",
+    implementor : "The Android Open Source Project",
+    create_effect : LELib_Create,
+    release_effect : LELib_Release,
+    get_descriptor : LELib_GetDescriptor,
+};
+
+}; // extern "C"
+
diff --git a/media/libeffects/loudness/MODULE_LICENSE_APACHE2 b/media/libeffects/loudness/MODULE_LICENSE_APACHE2
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/media/libeffects/loudness/MODULE_LICENSE_APACHE2
diff --git a/media/libeffects/loudness/NOTICE b/media/libeffects/loudness/NOTICE
new file mode 100644
index 0000000..ad6ed94
--- /dev/null
+++ b/media/libeffects/loudness/NOTICE
@@ -0,0 +1,190 @@
+
+   Copyright (c) 2013, The Android Open Source Project
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
diff --git a/media/libeffects/loudness/common/core/basic_types.h b/media/libeffects/loudness/common/core/basic_types.h
new file mode 100644
index 0000000..593e914
--- /dev/null
+++ b/media/libeffects/loudness/common/core/basic_types.h
@@ -0,0 +1,114 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LE_FX_ENGINE_COMMON_CORE_BASIC_TYPES_H_
+#define LE_FX_ENGINE_COMMON_CORE_BASIC_TYPES_H_
+
+#include <stddef.h>
+#include <stdlib.h>
+#include <string>
+using ::std::string;
+using ::std::basic_string;
+#include <vector>
+using ::std::vector;
+
+#include "common/core/os.h"
+
+// -----------------------------------------------------------------------------
+// Definitions of common basic types:
+// -----------------------------------------------------------------------------
+
+#if !defined(G_COMPILE) && !defined(BASE_INTEGRAL_TYPES_H_)
+
+namespace le_fx {
+
+typedef signed char         schar;
+typedef signed char         int8;
+typedef short               int16;
+typedef int                 int32;
+typedef long long           int64;
+
+typedef unsigned char       uint8;
+typedef unsigned short      uint16;
+typedef unsigned int        uint32;
+typedef unsigned long long  uint64;
+
+}  // namespace le_fx
+
+#endif
+
+namespace le_fx {
+
+struct FloatArray {
+  int length;
+  float *data;
+
+  FloatArray(void) {
+    data = NULL;
+    length = 0;
+  }
+};
+
+struct Int16Array {
+  int length;
+  int16 *data;
+
+  Int16Array(void) {
+    data = NULL;
+    length = 0;
+  }
+};
+
+struct Int32Array {
+  int length;
+  int32 *data;
+
+  Int32Array(void) {
+    data = NULL;
+    length = 0;
+  }
+};
+
+struct Int8Array {
+  int length;
+  uint8 *data;
+
+  Int8Array(void) {
+    data = NULL;
+    length = 0;
+  }
+};
+
+//
+// Simple wrapper for waveform data:
+//
+class WaveData : public vector<int16> {
+ public:
+  WaveData();
+  ~WaveData();
+
+  void Set(int number_samples, int sampling_rate, int16 *data);
+  int sample_rate(void) const;
+  void set_sample_rate(int sample_rate);
+  bool Equals(const WaveData &wave_data, int threshold = 0) const;
+
+ private:
+  int sample_rate_;
+};
+
+}  // namespace le_fx
+
+#endif  // LE_FX_ENGINE_COMMON_CORE_BASIC_TYPES_H_
diff --git a/media/libeffects/loudness/common/core/byte_swapper.h b/media/libeffects/loudness/common/core/byte_swapper.h
new file mode 100644
index 0000000..8f0caf3
--- /dev/null
+++ b/media/libeffects/loudness/common/core/byte_swapper.h
@@ -0,0 +1,151 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LE_FX_ENGINE_COMMON_CORE_BYTE_SWAPPER_H_
+#define LE_FX_ENGINE_COMMON_CORE_BYTE_SWAPPER_H_
+
+#include <stdio.h>
+#include <string.h>
+
+#include "common/core/basic_types.h"
+#include "common/core/os.h"
+
+namespace le_fx {
+
+namespace arch {
+
+inline bool IsLittleEndian(void) {
+  int16 word = 1;
+  char *cp = reinterpret_cast<char *>(&word);
+  return cp[0] != 0;
+}
+
+inline bool IsBigEndian(void) {
+  return !IsLittleEndian();
+}
+
+template <typename T, unsigned int kValSize>
+struct ByteSwapper {
+  static T Swap(const T &val) {
+    T new_val = val;
+    char *first = &new_val, *last = first + kValSize - 1, x;
+    for (; first < last; ++first, --last) {
+      x = *last;
+      *last = *first;
+      *first = x;
+    }
+    return new_val;
+  }
+};
+
+template <typename T>
+struct ByteSwapper<T, 1> {
+  static T Swap(const T &val) {
+    return val;
+  }
+};
+
+template <typename T>
+struct ByteSwapper<T, 2> {
+  static T Swap(const T &val) {
+    T new_val;
+    const char *o = (const char *)&val;
+    char *p = reinterpret_cast<char *>(&new_val);
+    p[0] = o[1];
+    p[1] = o[0];
+    return new_val;
+  }
+};
+
+template <typename T>
+struct ByteSwapper<T, 4> {
+  static T Swap(const T &val) {
+    T new_val;
+    const char *o = (const char *)&val;
+    char *p = reinterpret_cast<char *>(&new_val);
+    p[0] = o[3];
+    p[1] = o[2];
+    p[2] = o[1];
+    p[3] = o[0];
+    return new_val;
+  }
+};
+
+template <typename T>
+struct ByteSwapper<T, 8> {
+  static T Swap(const T &val) {
+    T new_val = val;
+    const char *o = (const char *)&val;
+    char *p = reinterpret_cast<char *>(&new_val);
+    p[0] = o[7];
+    p[1] = o[6];
+    p[2] = o[5];
+    p[3] = o[4];
+    p[4] = o[3];
+    p[5] = o[2];
+    p[6] = o[1];
+    p[7] = o[0];
+    return new_val;
+  }
+};
+
+template <typename T>
+T SwapBytes(const T &val, bool force_swap) {
+  if (force_swap) {
+#if !defined(LE_FX__NEED_BYTESWAP)
+    return ByteSwapper<T, sizeof(T)>::Swap(val);
+#else
+    return val;
+#endif  // !LE_FX_NEED_BYTESWAP
+  } else {
+#if !defined(LE_FX_NEED_BYTESWAP)
+    return val;
+#else
+    return ByteSwapper<T, sizeof(T)>::Swap(val);
+#endif  // !LE_FX_NEED_BYTESWAP
+  }
+}
+
+template <typename T>
+const T *SwapBytes(const T *vals, unsigned int num_items, bool force_swap) {
+  if (force_swap) {
+#if !defined(LE_FX_NEED_BYTESWAP)
+    T *writeable_vals = const_cast<T *>(vals);
+    for (unsigned int i = 0; i < num_items; i++) {
+      writeable_vals[i] = ByteSwapper<T, sizeof(T)>::Swap(vals[i]);
+    }
+    return writeable_vals;
+#else
+    return vals;
+#endif  // !LE_FX_NEED_BYTESWAP
+  } else {
+#if !defined(LE_FX_NEED_BYTESWAP)
+    return vals;
+#else
+    T *writeable_vals = const_cast<T *>(vals);
+    for (unsigned int i = 0; i < num_items; i++) {
+      writeable_vals[i] = ByteSwapper<T, sizeof(T)>::Swap(vals[i]);
+    }
+    return writeable_vals;
+#endif  // !LE_FX_NEED_BYTESWAP
+  }
+}
+
+}  // namespace arch
+
+}  // namespace le_fx
+
+#endif  // LE_FX_ENGINE_COMMON_CORE_BYTE_SWAPPER_H_
diff --git a/media/libeffects/loudness/common/core/math.h b/media/libeffects/loudness/common/core/math.h
new file mode 100644
index 0000000..3f302cc
--- /dev/null
+++ b/media/libeffects/loudness/common/core/math.h
@@ -0,0 +1,89 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LE_FX_ENGINE_COMMON_CORE_MATH_H_
+#define LE_FX_ENGINE_COMMON_CORE_MATH_H_
+
+#include <math.h>
+#include <algorithm>
+using ::std::min;
+using ::std::max;
+using ::std::fill;
+using ::std::fill_n;using ::std::lower_bound;
+#include <cmath>
+#include <math.h>
+//using ::std::fpclassify;
+
+#include "common/core/os.h"
+#include "common/core/types.h"
+
+namespace le_fx {
+namespace math {
+
+// A fast approximation to log2(.)
+inline float fast_log2(float val) {
+  int* const exp_ptr = reinterpret_cast <int *> (&val);
+  int x = *exp_ptr;
+  const int log_2 = ((x >> 23) & 255) - 128;
+  x &= ~(255 << 23);
+  x += 127 << 23;
+  *exp_ptr = x;
+  val = ((-1.0f / 3) * val + 2) * val - 2.0f / 3;
+  return static_cast<float>(val + log_2);
+}
+
+// A fast approximation to log(.)
+inline float fast_log(float val) {
+  return fast_log2(val) *
+      0.693147180559945286226763982995180413126945495605468750f;
+}
+
+// An approximation of the exp(.) function using a 5-th order Taylor expansion.
+// It's pretty accurate between +-0.1 and accurate to 10e-3 between +-1
+template <typename T>
+inline T ExpApproximationViaTaylorExpansionOrder5(T x) {
+  const T x2 = x * x;
+  const T x3 = x2 * x;
+  const T x4 = x2 * x2;
+  const T x5 = x3 * x2;
+  return 1.0f + x + 0.5f * x2 +
+      0.16666666666666665741480812812369549646973609924316406250f * x3 +
+      0.0416666666666666643537020320309238741174340248107910156250f * x4 +
+      0.008333333333333333217685101601546193705871701240539550781250f * x5;
+}
+
+}  // namespace math
+}  // namespace le_fx
+
+// Math functions missing in Android NDK:
+#if defined(LE_FX_OS_ANDROID)
+
+namespace std {
+
+//
+// Round to the nearest integer: We need this implementation
+// since std::round is missing on android.
+//
+template <typename T>
+inline T round(const T &x) {
+  return static_cast<T>(std::floor(static_cast<double>(x) + 0.5));
+}
+
+}  // namespace std
+
+#endif  // LE_FX_OS_ANDROID
+
+#endif  // LE_FX_ENGINE_COMMON_CORE_MATH_H_
diff --git a/media/libeffects/loudness/common/core/os.h b/media/libeffects/loudness/common/core/os.h
new file mode 100644
index 0000000..4a8ce82
--- /dev/null
+++ b/media/libeffects/loudness/common/core/os.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LE_FX_ENGINE_COMMON_CORE_OS_H_
+#define LE_FX_ENGINE_COMMON_CORE_OS_H_
+
+// -----------------------------------------------------------------------------
+// OS Identification:
+// -----------------------------------------------------------------------------
+
+#define LE_FX_OS_UNIX
+#if defined(__ANDROID__)
+#    define LE_FX_OS_ANDROID
+#endif  // Android
+
+#endif // LE_FX_ENGINE_COMMON_CORE_OS_H_
diff --git a/media/libeffects/loudness/common/core/types.h b/media/libeffects/loudness/common/core/types.h
new file mode 100644
index 0000000..d1b6c6a
--- /dev/null
+++ b/media/libeffects/loudness/common/core/types.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LE_FX_ENGINE_COMMON_CORE_TYPES_H_
+#define LE_FX_ENGINE_COMMON_CORE_TYPES_H_
+
+#include "common/core/os.h"
+
+#include "common/core/basic_types.h"
+
+#ifndef LE_FX_DISALLOW_COPY_AND_ASSIGN
+#define LE_FX_DISALLOW_COPY_AND_ASSIGN(TypeName) \
+  TypeName(const TypeName&); \
+  void operator=(const TypeName&)
+#endif  // LE_FX_DISALLOW_COPY_AND_ASSIGN
+
+
+#endif  // LE_FX_ENGINE_COMMON_CORE_TYPES_H_
diff --git a/media/libeffects/loudness/dsp/core/basic-inl.h b/media/libeffects/loudness/dsp/core/basic-inl.h
new file mode 100644
index 0000000..3f77147
--- /dev/null
+++ b/media/libeffects/loudness/dsp/core/basic-inl.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LE_FX_ENGINE_DSP_CORE_BASIC_INL_H_
+#define LE_FX_ENGINE_DSP_CORE_BASIC_INL_H_
+
+#include <math.h>
+
+namespace le_fx {
+
+namespace sigmod {
+
+template <typename T>
+int SearchIndex(const T x_data[],
+                T x,
+                int start_index,
+                int end_index) {
+  int start = start_index;
+  int end = end_index;
+  while (end > start + 1) {
+    int i = (end + start) / 2;
+    if (x_data[i] > x) {
+      end = i;
+    } else {
+      start = i;
+    }
+  }
+  return start;
+}
+
+}  // namespace sigmod
+
+}  // namespace le_fx
+
+#endif  // LE_FX_ENGINE_DSP_CORE_BASIC_INL_H_
diff --git a/media/libeffects/loudness/dsp/core/basic.h b/media/libeffects/loudness/dsp/core/basic.h
new file mode 100644
index 0000000..27e0a8d
--- /dev/null
+++ b/media/libeffects/loudness/dsp/core/basic.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LE_FX_ENGINE_DSP_CORE_BASIC_H_
+#define LE_FX_ENGINE_DSP_CORE_BASIC_H_
+
+#include <limits.h>
+#include "common/core/math.h"
+#include "common/core/types.h"
+
+namespace le_fx {
+
+namespace sigmod {
+
+// Searchs for the interval that contains <x> using a divide-and-conquer
+// algorithm.
+// X[]: a vector of sorted values (X[i+1] > X[i])
+// x:   a value
+// StartIndex: the minimum searched index
+// EndIndex: the maximum searched index
+// returns: the index <i> that satisfies: X[i] <= x <= X[i+1] &&
+//          StartIndex <= i <= (EndIndex-1)
+template <typename T>
+int SearchIndex(const T x_data[],
+                T x,
+                int start_index,
+                int end_index);
+
+}  // namespace sigmod
+
+}  // namespace le_fx
+
+#include "dsp/core/basic-inl.h"
+
+#endif  // LE_FX_ENGINE_DSP_CORE_BASIC_H_
diff --git a/media/libeffects/loudness/dsp/core/dynamic_range_compression-inl.h b/media/libeffects/loudness/dsp/core/dynamic_range_compression-inl.h
new file mode 100644
index 0000000..da75ceb
--- /dev/null
+++ b/media/libeffects/loudness/dsp/core/dynamic_range_compression-inl.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef LE_FX_ENGINE_DSP_CORE_DYNAMIC_RANGE_COMPRESSION_INL_H_
+#define LE_FX_ENGINE_DSP_CORE_DYNAMIC_RANGE_COMPRESSION_INL_H_
+
+//#define LOG_NDEBUG 0
+#include <cutils/log.h>
+
+
+namespace le_fx {
+
+
+inline void AdaptiveDynamicRangeCompression::set_knee_threshold(float decibel) {
+  // Converts to 1og-base
+  knee_threshold_in_decibel_ = decibel;
+  knee_threshold_ = 0.1151292546497023061569109358970308676362037658691406250f *
+      decibel + 10.39717719035538401328722102334722876548767089843750f;
+}
+
+
+inline void AdaptiveDynamicRangeCompression::set_knee_threshold_via_target_gain(
+    float target_gain) {
+  const float decibel = target_gain_to_knee_threshold_.Interpolate(
+        target_gain);
+  ALOGV("set_knee_threshold_via_target_gain: decibel =%.3fdB", decibel);
+  set_knee_threshold(decibel);
+}
+
+}  // namespace le_fx
+
+
+#endif  // LE_FX_ENGINE_DSP_CORE_DYNAMIC_RANGE_COMPRESSION_INL_H_
diff --git a/media/libeffects/loudness/dsp/core/dynamic_range_compression.cpp b/media/libeffects/loudness/dsp/core/dynamic_range_compression.cpp
new file mode 100644
index 0000000..7bd068e
--- /dev/null
+++ b/media/libeffects/loudness/dsp/core/dynamic_range_compression.cpp
@@ -0,0 +1,141 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cmath>
+
+#include "common/core/math.h"
+#include "common/core/types.h"
+#include "dsp/core/basic.h"
+#include "dsp/core/interpolation.h"
+#include "dsp/core/dynamic_range_compression.h"
+
+//#define LOG_NDEBUG 0
+#include <cutils/log.h>
+
+
+namespace le_fx {
+
+// Definitions for static const class members declared in
+// dynamic_range_compression.h.
+const float AdaptiveDynamicRangeCompression::kMinAbsValue = 0.000001f;
+const float AdaptiveDynamicRangeCompression::kMinLogAbsValue =
+    0.032766999999999997517097227728299912996590137481689453125f;
+const float AdaptiveDynamicRangeCompression::kFixedPointLimit = 32767.0f;
+const float AdaptiveDynamicRangeCompression::kInverseFixedPointLimit =
+    1.0f / AdaptiveDynamicRangeCompression::kFixedPointLimit;
+const float AdaptiveDynamicRangeCompression::kDefaultKneeThresholdInDecibel =
+    -8.0f;
+const float AdaptiveDynamicRangeCompression::kCompressionRatio = 7.0f;
+const float AdaptiveDynamicRangeCompression::kTauAttack = 0.001f;
+const float AdaptiveDynamicRangeCompression::kTauRelease = 0.015f;
+
+AdaptiveDynamicRangeCompression::AdaptiveDynamicRangeCompression() {
+  static const float kTargetGain[] = {
+      1.0f, 2.0f, 3.0f, 4.0f, 5.0f };
+  static const float kKneeThreshold[] = {
+      -8.0f, -8.0f, -8.5f, -9.0f, -10.0f };
+  target_gain_to_knee_threshold_.Initialize(
+      &kTargetGain[0], &kKneeThreshold[0],
+      sizeof(kTargetGain) / sizeof(kTargetGain[0]));
+}
+
+bool AdaptiveDynamicRangeCompression::Initialize(
+        float target_gain, float sampling_rate) {
+  set_knee_threshold_via_target_gain(target_gain);
+  sampling_rate_ = sampling_rate;
+  state_ = 0.0f;
+  compressor_gain_ = 1.0f;
+  if (kTauAttack > 0.0f) {
+    const float taufs = kTauAttack * sampling_rate_;
+    alpha_attack_ = std::exp(-1.0f / taufs);
+  } else {
+    alpha_attack_ = 0.0f;
+  }
+  if (kTauRelease > 0.0f) {
+    const float taufs = kTauRelease * sampling_rate_;
+    alpha_release_ = std::exp(-1.0f / taufs);
+  } else {
+    alpha_release_ = 0.0f;
+  }
+  // Feed-forward topology
+  slope_ = 1.0f / kCompressionRatio - 1.0f;
+  return true;
+}
+
+float AdaptiveDynamicRangeCompression::Compress(float x) {
+  const float max_abs_x = std::max(std::fabs(x), kMinLogAbsValue);
+  const float max_abs_x_dB = math::fast_log(max_abs_x);
+  // Subtract Threshold from log-encoded input to get the amount of overshoot
+  const float overshoot = max_abs_x_dB - knee_threshold_;
+  // Hard half-wave rectifier
+  const float rect = std::max(overshoot, 0.0f);
+  // Multiply rectified overshoot with slope
+  const float cv = rect * slope_;
+  const float prev_state = state_;
+  if (cv <= state_) {
+    state_ = alpha_attack_ * state_ + (1.0f - alpha_attack_) * cv;
+  } else {
+    state_ = alpha_release_ * state_ + (1.0f - alpha_release_) * cv;
+  }
+  compressor_gain_ *=
+      math::ExpApproximationViaTaylorExpansionOrder5(state_ - prev_state);
+  x *= compressor_gain_;
+  if (x > kFixedPointLimit) {
+    return kFixedPointLimit;
+  }
+  if (x < -kFixedPointLimit) {
+    return -kFixedPointLimit;
+  }
+  return x;
+}
+
+void AdaptiveDynamicRangeCompression::Compress(float *x1, float *x2) {
+  // Taking the maximum amplitude of both channels
+  const float max_abs_x = std::max(std::fabs(*x1),
+    std::max(std::fabs(*x2), kMinLogAbsValue));
+  const float max_abs_x_dB = math::fast_log(max_abs_x);
+  // Subtract Threshold from log-encoded input to get the amount of overshoot
+  const float overshoot = max_abs_x_dB - knee_threshold_;
+  // Hard half-wave rectifier
+  const float rect = std::max(overshoot, 0.0f);
+  // Multiply rectified overshoot with slope
+  const float cv = rect * slope_;
+  const float prev_state = state_;
+  if (cv <= state_) {
+    state_ = alpha_attack_ * state_ + (1.0f - alpha_attack_) * cv;
+  } else {
+    state_ = alpha_release_ * state_ + (1.0f - alpha_release_) * cv;
+  }
+  compressor_gain_ *=
+      math::ExpApproximationViaTaylorExpansionOrder5(state_ - prev_state);
+  *x1 *= compressor_gain_;
+  if (*x1 > kFixedPointLimit) {
+    *x1 = kFixedPointLimit;
+  }
+  if (*x1 < -kFixedPointLimit) {
+    *x1 = -kFixedPointLimit;
+  }
+  *x2 *= compressor_gain_;
+  if (*x2 > kFixedPointLimit) {
+    *x2 = kFixedPointLimit;
+  }
+  if (*x2 < -kFixedPointLimit) {
+    *x2 = -kFixedPointLimit;
+  }
+}
+
+}  // namespace le_fx
+
diff --git a/media/libeffects/loudness/dsp/core/dynamic_range_compression.h b/media/libeffects/loudness/dsp/core/dynamic_range_compression.h
new file mode 100644
index 0000000..2821a78
--- /dev/null
+++ b/media/libeffects/loudness/dsp/core/dynamic_range_compression.h
@@ -0,0 +1,119 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef LE_FX_ENGINE_DSP_CORE_DYNAMIC_RANGE_COMPRESSION_H_
+#define LE_FX_ENGINE_DSP_CORE_DYNAMIC_RANGE_COMPRESSION_H_
+
+#include "common/core/types.h"
+#include "common/core/math.h"
+#include "dsp/core/basic.h"
+#include "dsp/core/interpolation.h"
+
+//#define LOG_NDEBUG 0
+#include <cutils/log.h>
+
+
+namespace le_fx {
+
+// An adaptive dynamic range compression algorithm. The gain adaptation is made
+// at the logarithmic domain and it is based on a Branching-Smooth compensated
+// digital peak detector with different time constants for attack and release.
+class AdaptiveDynamicRangeCompression {
+ public:
+    AdaptiveDynamicRangeCompression();
+
+    // Initializes the compressor using prior information. It assumes that the
+    // input signal is speech from high-quality recordings that is scaled and then
+    // fed to the compressor. The compressor is tuned according to the target gain
+    // that is expected to be applied.
+    //
+    // Target gain receives values between 0.0 and 10.0. The knee threshold is
+    // reduced as the target gain increases in order to fit the increased range of
+    // values.
+    //
+    // Values between 1.0 and 2.0 will only mildly affect your signal. Higher
+    // values will reduce the dynamic range of the signal to the benefit of
+    // increased loudness.
+    //
+    // If nothing is known regarding the input, a `target_gain` of 1.0f is a
+    // relatively safe choice for many signals.
+    bool Initialize(float target_gain, float sampling_rate);
+
+  // A fast version of the algorithm that uses approximate computations for the
+  // log(.) and exp(.).
+  float Compress(float x);
+
+  // Stereo channel version of the compressor
+  void Compress(float *x1, float *x2);
+
+  // This version is slower than Compress(.) but faster than CompressSlow(.)
+  float CompressNormalSpeed(float x);
+
+  // A slow version of the algorithm that is easier for further developement,
+  // tuning and debugging
+  float CompressSlow(float x);
+
+  // Sets knee threshold (in decibel).
+  void set_knee_threshold(float decibel);
+
+  // Sets knee threshold via the target gain using an experimentally derived
+  // relationship.
+  void set_knee_threshold_via_target_gain(float target_gain);
+
+ private:
+  // The minimum accepted absolute input value and it's natural logarithm. This
+  // is to prevent numerical issues when the input is close to zero
+  static const float kMinAbsValue;
+  static const float kMinLogAbsValue;
+  // Fixed-point arithmetic limits
+  static const float kFixedPointLimit;
+  static const float kInverseFixedPointLimit;
+  // The default knee threshold in decibel. The knee threshold defines when the
+  // compressor is actually starting to compress the value of the input samples
+  static const float kDefaultKneeThresholdInDecibel;
+  // The compression ratio is the reciprocal of the slope of the line segment
+  // above the threshold (in the log-domain). The ratio controls the
+  // effectiveness of the compression.
+  static const float kCompressionRatio;
+  // The attack time of the envelope detector
+  static const float kTauAttack;
+  // The release time of the envelope detector
+  static const float kTauRelease;
+
+  float sampling_rate_;
+  // the internal state of the envelope detector
+  float state_;
+  // the latest gain factor that was applied to the input signal
+  float compressor_gain_;
+  // attack constant for exponential dumping
+  float alpha_attack_;
+  // release constant for exponential dumping
+  float alpha_release_;
+  float slope_;
+  // The knee threshold
+  float knee_threshold_;
+  float knee_threshold_in_decibel_;
+  // This interpolator provides the function that relates target gain to knee
+  // threshold.
+  sigmod::InterpolatorLinear<float> target_gain_to_knee_threshold_;
+
+  LE_FX_DISALLOW_COPY_AND_ASSIGN(AdaptiveDynamicRangeCompression);
+};
+
+}  // namespace le_fx
+
+#include "dsp/core/dynamic_range_compression-inl.h"
+
+#endif  // LE_FX_ENGINE_DSP_CORE_DYNAMIC_RANGE_COMPRESSION_H_
diff --git a/media/libeffects/loudness/dsp/core/interpolation.h b/media/libeffects/loudness/dsp/core/interpolation.h
new file mode 100644
index 0000000..23c287c
--- /dev/null
+++ b/media/libeffects/loudness/dsp/core/interpolation.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef LE_FX_ENGINE_DSP_CORE_INTERPOLATION_H_
+#define LE_FX_ENGINE_DSP_CORE_INTERPOLATION_H_
+
+#include "common/core/math.h"
+#include "dsp/core/interpolator_base.h"
+#include "dsp/core/interpolator_linear.h"
+
+#endif  // LE_FX_ENGINE_DSP_CORE_INTERPOLATION_H_
+
diff --git a/media/libeffects/loudness/dsp/core/interpolator_base-inl.h b/media/libeffects/loudness/dsp/core/interpolator_base-inl.h
new file mode 100644
index 0000000..bd08b65
--- /dev/null
+++ b/media/libeffects/loudness/dsp/core/interpolator_base-inl.h
@@ -0,0 +1,180 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_BASE_INL_H_
+#define LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_BASE_INL_H_
+
+#include "dsp/core/basic.h"
+
+//#define LOG_NDEBUG 0
+#include <cutils/log.h>
+
+
+namespace le_fx {
+
+namespace sigmod {
+
+template <typename T, class Algorithm>
+InterpolatorBase<T, Algorithm>::InterpolatorBase() {
+  status_ = false;
+  cached_index_ = 0;
+  x_data_ = NULL;
+  y_data_ = NULL;
+  data_length_ = 0;
+  own_x_data_ = false;
+  x_start_offset_ = 0.0;
+  last_element_index_ = -1;
+  x_inverse_sampling_interval_ = 0.0;
+  state_ = NULL;
+}
+
+template <typename T, class Algorithm>
+InterpolatorBase<T, Algorithm>::~InterpolatorBase() {
+  delete [] state_;
+  if (own_x_data_) {
+    delete [] x_data_;
+  }
+}
+
+template <typename T, class Algorithm>
+bool InterpolatorBase<T, Algorithm>::Initialize(const vector<T> &x_data,
+                                                const vector<T> &y_data) {
+#ifndef NDEBUG
+  if (x_data.size() != y_data.size()) {
+    LoggerError("InterpolatorBase::Initialize: xData size (%d) != yData size"
+                  " (%d)", x_data.size(), y_data.size());
+  }
+#endif
+  return Initialize(&x_data[0], &y_data[0], x_data.size());
+}
+
+template <typename T, class Algorithm>
+bool InterpolatorBase<T, Algorithm>::Initialize(double x_start_offset,
+                                                double x_sampling_interval,
+                                                const vector<T> &y_data) {
+  return Initialize(x_start_offset,
+                    x_sampling_interval,
+                    &y_data[0],
+                    y_data.size());
+}
+
+template <typename T, class Algorithm>
+bool InterpolatorBase<T, Algorithm>::Initialize(double x_start_offset,
+                                                double x_sampling_interval,
+                                                const T *y_data,
+                                                int data_length) {
+  // Constructs and populate x-axis data: `x_data_`
+  T *x_data_tmp = new T[data_length];
+  float time_offset = x_start_offset;
+  for (int n = 0; n < data_length; n++) {
+    x_data_tmp[n] = time_offset;
+    time_offset += x_sampling_interval;
+  }
+  Initialize(x_data_tmp, y_data, data_length);
+  // Sets-up the regularly sampled interpolation mode
+  x_start_offset_ = x_start_offset;
+  x_inverse_sampling_interval_ = 1.0 / x_sampling_interval;
+  own_x_data_ = true;
+  return status_;
+}
+
+
+template <typename T, class Algorithm>
+bool InterpolatorBase<T, Algorithm>::Initialize(
+    const T *x_data, const T *y_data, int data_length) {
+  // Default settings
+  cached_index_ = 0;
+  data_length_ = 0;
+  x_start_offset_ = 0;
+  x_inverse_sampling_interval_ = 0;
+  state_ = NULL;
+  // Input data is externally owned
+  own_x_data_ = false;
+  x_data_ = x_data;
+  y_data_ = y_data;
+  data_length_ = data_length;
+  last_element_index_ = data_length - 1;
+  // Check input data sanity
+  for (int n = 0; n < last_element_index_; ++n) {
+    if (x_data_[n + 1] <= x_data_[n]) {
+      ALOGE("InterpolatorBase::Initialize: xData are not ordered or "
+              "contain equal values (X[%d] <= X[%d]) (%.5e <= %.5e)",
+              n + 1, n, x_data_[n + 1], x_data_[n]);
+      status_ = false;
+      return false;
+    }
+  }
+  // Pre-compute internal state by calling the corresponding function of the
+  // derived class.
+  status_ = static_cast<Algorithm*>(this)->SetInternalState();
+  return status_;
+}
+
+template <typename T, class Algorithm>
+T InterpolatorBase<T, Algorithm>::Interpolate(T x) {
+#ifndef NDEBUG
+  if (cached_index_ < 0 || cached_index_ > data_length_ - 2) {
+    LoggerError("InterpolatorBase:Interpolate: CachedIndex_ out of bounds "
+                  "[0, %d, %d]", cached_index_, data_length_ - 2);
+  }
+#endif
+  // Search for the containing interval
+  if (x <= x_data_[cached_index_]) {
+    if (cached_index_ <= 0) {
+      cached_index_ = 0;
+      return y_data_[0];
+    }
+    if (x >= x_data_[cached_index_ - 1]) {
+      cached_index_--;  // Fast descending
+    } else {
+      if (x <= x_data_[0]) {
+        cached_index_ = 0;
+        return y_data_[0];
+      }
+      cached_index_ = SearchIndex(x_data_, x, 0, cached_index_);
+    }
+  } else {
+    if (cached_index_ >= last_element_index_) {
+      cached_index_ = last_element_index_;
+      return y_data_[last_element_index_];
+    }
+    if (x > x_data_[cached_index_ + 1]) {
+      if (cached_index_ + 2 > last_element_index_) {
+        cached_index_ = last_element_index_ - 1;
+        return y_data_[last_element_index_];
+      }
+      if (x <= x_data_[cached_index_ + 2]) {
+        cached_index_++;  // Fast ascending
+      } else {
+        if (x >= x_data_[last_element_index_]) {
+          cached_index_ = last_element_index_ - 1;
+          return y_data_[last_element_index_];
+        }
+        cached_index_ = SearchIndex(
+            x_data_, x, cached_index_, last_element_index_);
+      }
+    }
+  }
+  // Compute interpolated value by calling the corresponding function of the
+  // derived class.
+  return static_cast<Algorithm*>(this)->MethodSpecificInterpolation(x);
+}
+
+}  // namespace sigmod
+
+}  // namespace le_fx
+
+#endif  // LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_BASE_INL_H_
diff --git a/media/libeffects/loudness/dsp/core/interpolator_base.h b/media/libeffects/loudness/dsp/core/interpolator_base.h
new file mode 100644
index 0000000..0cd1a35
--- /dev/null
+++ b/media/libeffects/loudness/dsp/core/interpolator_base.h
@@ -0,0 +1,112 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_BASE_H_
+#define LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_BASE_H_
+
+#include "common/core/types.h"
+
+namespace le_fx {
+
+namespace sigmod {
+
+// Interpolation base-class that provides the interface, while it is the derived
+// class that provides the specific interpolation algorithm. The following list
+// of interpolation algorithms are currently present:
+//
+// InterpolationSine<T>: weighted interpolation between y_data[n] and
+//                       y_data[n+1] using a sin(.) weighting factor from
+//                       0 to pi/4.
+// InterpolationLinear<T>: linear interpolation
+// InterpolationSplines<T>: spline-based interpolation
+//
+// Example (using derived spline-based interpolation class):
+//  InterpolatorSplines<float> interp(x_data, y_data, data_length);
+//  for (int n = 0; n < data_length; n++) Y[n] = interp.Interpolate(X[n]);
+//
+template <typename T, class Algorithm>
+class InterpolatorBase {
+ public:
+  InterpolatorBase();
+  ~InterpolatorBase();
+
+  // Generic random-access interpolation with arbitrary spaced x-axis samples.
+  // Below X[0], the interpolator returns Y[0]. Above X[data_length-1], it
+  // returns Y[data_length-1].
+  T Interpolate(T x);
+
+  bool get_status() const {
+    return status_;
+  }
+
+  // Initializes internal buffers.
+  //  x_data: [(data_length)x1] x-axis coordinates (searching axis)
+  //  y_data: [(data_length)x1] y-axis coordinates (interpolation axis)
+  //  data_length: number of points
+  // returns `true` if everything is ok, `false`, otherwise
+  bool Initialize(const T *x_data, const T *y_data, int data_length);
+
+  // Initializes internal buffers.
+  //  x_data: x-axis coordinates (searching axis)
+  //  y_data: y-axis coordinates (interpolating axis)
+  // returns `true` if everything is ok, `false`, otherwise
+  bool Initialize(const vector<T> &x_data, const vector<T> &y_data);
+
+  // Initialization for regularly sampled sequences, where:
+  //  x_data[i] = x_start_offset + i * x_sampling_interval
+  bool Initialize(double x_start_offset,
+                  double x_sampling_interval,
+                  const vector<T> &y_data);
+
+  // Initialization for regularly sampled sequences, where:
+  //  x_data[i] = x_start_offset + i * x_sampling_interval
+  bool Initialize(double x_start_offset,
+                  double x_sampling_interval,
+                  const T *y_data,
+                  int data_length);
+
+ protected:
+  // Is set to false if something goes wrong, and to true if everything is ok.
+  bool status_;
+
+  // The start-index of the previously searched interval
+  int cached_index_;
+
+  // Data points
+  const T *x_data_;  // Externally or internally owned, depending on own_x_data_
+  const T *y_data_;  // Externally owned (always)
+  int data_length_;
+  // Index of the last element `data_length_ - 1` kept here for optimization
+  int last_element_index_;
+  bool own_x_data_;
+  // For regularly-samples sequences, keep only the boundaries and the intervals
+  T x_start_offset_;
+  float x_inverse_sampling_interval_;
+
+  // Algorithm state (internally owned)
+  double *state_;
+
+ private:
+  LE_FX_DISALLOW_COPY_AND_ASSIGN(InterpolatorBase);
+};
+
+}  // namespace sigmod
+
+}  // namespace le_fx
+
+#include "dsp/core/interpolator_base-inl.h"
+
+#endif  // LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_BASE_H_
diff --git a/media/libeffects/loudness/dsp/core/interpolator_linear.h b/media/libeffects/loudness/dsp/core/interpolator_linear.h
new file mode 100644
index 0000000..434698a
--- /dev/null
+++ b/media/libeffects/loudness/dsp/core/interpolator_linear.h
@@ -0,0 +1,81 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_LINEAR_H_
+#define LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_LINEAR_H_
+
+#include <math.h>
+#include "dsp/core/interpolator_base.h"
+
+namespace le_fx {
+
+namespace sigmod {
+
+// Linear interpolation class.
+//
+// The main functionality of this class is provided by it's base-class, so
+// please refer to: InterpolatorBase
+//
+// Example:
+//  InterpolatorLinear<float> interp(x_data, y_data, data_length);
+//  for (int n = 0; n < data_length; n++) Y[n] = interp.Interpolate(X[n]);
+//
+template <typename T>
+class InterpolatorLinear: public InterpolatorBase<T, InterpolatorLinear<T> > {
+ public:
+  InterpolatorLinear() { }
+  ~InterpolatorLinear() { }
+
+ protected:
+  // Provides the main implementation of the linear interpolation algorithm.
+  // Assumes that: X[cached_index_] < x < X[cached_index_ + 1]
+  T MethodSpecificInterpolation(T x);
+
+  // Pre-compute internal state_ parameters.
+  bool SetInternalState();
+
+ private:
+  friend class InterpolatorBase<T, InterpolatorLinear<T> >;
+  typedef InterpolatorBase<T, InterpolatorLinear<T> > BaseClass;
+  using BaseClass::status_;
+  using BaseClass::cached_index_;
+  using BaseClass::x_data_;
+  using BaseClass::y_data_;
+  using BaseClass::data_length_;
+  using BaseClass::state_;
+
+  LE_FX_DISALLOW_COPY_AND_ASSIGN(InterpolatorLinear<T>);
+};
+
+template <typename T>
+inline T InterpolatorLinear<T>::MethodSpecificInterpolation(T x) {
+  T dX = x_data_[cached_index_ + 1] - x_data_[cached_index_];
+  T dY = y_data_[cached_index_ + 1] - y_data_[cached_index_];
+  T dx = x - x_data_[cached_index_];
+  return y_data_[cached_index_] + (dY * dx) / dX;
+}
+
+template <typename T>
+bool InterpolatorLinear<T>::SetInternalState() {
+  state_ = NULL;
+  return true;
+}
+
+}  // namespace sigmod
+
+}  // namespace le_fx
+
+#endif  // LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_LINEAR_H_
diff --git a/media/libeffects/proxy/EffectProxy.cpp b/media/libeffects/proxy/EffectProxy.cpp
index 77c6e89..dd4ad08 100644
--- a/media/libeffects/proxy/EffectProxy.cpp
+++ b/media/libeffects/proxy/EffectProxy.cpp
@@ -30,9 +30,10 @@
 // This is a dummy proxy descriptor just to return to Factory during the initial
 // GetDescriptor call. Later in the factory, it is replaced with the
 // SW sub effect descriptor
+// proxy UUID af8da7e0-2ca1-11e3-b71d-0002a5d5c51b
 const effect_descriptor_t gProxyDescriptor = {
         EFFECT_UUID_INITIALIZER, // type
-        EFFECT_UUID_INITIALIZER, // uuid
+        {0xaf8da7e0, 0x2ca1, 0x11e3, 0xb71d, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b }}, // uuid
         EFFECT_CONTROL_API_VERSION, //version of effect control API
         (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_LAST |
          EFFECT_FLAG_VOLUME_CTRL), // effect capability flags
@@ -48,6 +49,7 @@
     &gProxyDescriptor,
 };
 
+
 int EffectProxyCreate(const effect_uuid_t *uuid,
                             int32_t             sessionId,
                             int32_t             ioId,
@@ -65,6 +67,7 @@
     pContext->ioId = ioId;
     pContext->uuid = *uuid;
     pContext->common_itfe = &gEffectInterface;
+
     // The sub effects will be created in effect_command when the first command
     // for the effect is received
     pContext->eHandle[SUB_FX_HOST] = pContext->eHandle[SUB_FX_OFFLOAD] = NULL;
@@ -109,6 +112,10 @@
         uuid_print.node[1], uuid_print.node[2], uuid_print.node[3],
         uuid_print.node[4], uuid_print.node[5]);
 #endif
+
+    pContext->replySize = PROXY_REPLY_SIZE_DEFAULT;
+    pContext->replyData = (char *)malloc(PROXY_REPLY_SIZE_DEFAULT);
+
     *pHandle = (effect_handle_t)pContext;
     ALOGV("EffectCreate end");
     return 0;
@@ -122,6 +129,8 @@
     }
     ALOGV("EffectRelease");
     delete pContext->desc;
+    free(pContext->replyData);
+
     if (pContext->eHandle[SUB_FX_HOST])
        EffectRelease(pContext->eHandle[SUB_FX_HOST]);
     if (pContext->eHandle[SUB_FX_OFFLOAD])
@@ -155,7 +164,6 @@
         int index = pContext->index;
         // if the index refers to HW , do not do anything. Just return.
         if (index == SUB_FX_HOST) {
-            ALOGV("Calling CoreProcess");
             ret = (*pContext->eHandle[index])->process(pContext->eHandle[index],
                                                        inBuffer, outBuffer);
         }
@@ -172,7 +180,7 @@
                               void                *pReplyData) {
 
     EffectContext *pContext = (EffectContext *) self;
-    int status;
+    int status = 0;
     if (pContext == NULL) {
         ALOGV("Effect_command() Proxy context is NULL");
         return -EINVAL;
@@ -237,23 +245,56 @@
         ALOGV("Effect_command: effect index is neither offload nor host");
         return -EINVAL;
     }
-    ALOGV("Effect_command: pContext->eHandle[%d]: %p",
-            index, pContext->eHandle[index]);
-    if (pContext->eHandle[SUB_FX_HOST])
-         (*pContext->eHandle[SUB_FX_HOST])->command(
-                             pContext->eHandle[SUB_FX_HOST], cmdCode, cmdSize,
-                             pCmdData, replySize, pReplyData);
-    if (pContext->eHandle[SUB_FX_OFFLOAD]) {
-        // In case of SET CMD, when the offload stream is unavailable,
-        // we will store the effect param values in the DSP effect wrapper.
-        // When the offload effects get enabled, we send these values to the
-        // DSP during Effect_config.
-        // So,we send the params to DSP wrapper also
-        (*pContext->eHandle[SUB_FX_OFFLOAD])->command(
-                         pContext->eHandle[SUB_FX_OFFLOAD], cmdCode, cmdSize,
-                         pCmdData, replySize, pReplyData);
+
+    // Getter commands are only sent to the active sub effect.
+    int *subStatus[SUB_FX_COUNT];
+    uint32_t *subReplySize[SUB_FX_COUNT];
+    void *subReplyData[SUB_FX_COUNT];
+    uint32_t tmpSize;
+    int tmpStatus;
+
+    // grow temp reply buffer if needed
+    if (replySize != NULL) {
+        tmpSize = pContext->replySize;
+        while (tmpSize < *replySize && tmpSize < PROXY_REPLY_SIZE_MAX) {
+            tmpSize *= 2;
+        }
+        if (tmpSize > pContext->replySize) {
+            ALOGV("Effect_command grow reply buf to %d", tmpSize);
+            pContext->replyData = (char *)realloc(pContext->replyData, tmpSize);
+            pContext->replySize = tmpSize;
+        }
+        if (tmpSize > *replySize) {
+            tmpSize = *replySize;
+        }
+    } else {
+        tmpSize = 0;
     }
-    return 0;
+    // tmpSize is now the actual reply size for the non active sub effect
+
+    // Send command to sub effects. The command is sent to all sub effects so that their internal
+    // state is kept in sync.
+    // Only the reply from the active sub effect is returned to the caller. The reply from the
+    // other sub effect is lost in pContext->replyData
+    for (int i = 0; i < SUB_FX_COUNT; i++) {
+        if (pContext->eHandle[i] == NULL) {
+            continue;
+        }
+        if (i == index) {
+            subStatus[i] = &status;
+            subReplySize[i] = replySize;
+            subReplyData[i] = pReplyData;
+        } else {
+            subStatus[i] = &tmpStatus;
+            subReplySize[i] = replySize == NULL ? NULL : &tmpSize;
+            subReplyData[i] = pReplyData == NULL ? NULL : pContext->replyData;
+        }
+        *subStatus[i] = (*pContext->eHandle[i])->command(
+                             pContext->eHandle[i], cmdCode, cmdSize,
+                             pCmdData, subReplySize[i], subReplyData[i]);
+    }
+
+    return status;
 }    /* end Effect_command */
 
 
diff --git a/media/libeffects/proxy/EffectProxy.h b/media/libeffects/proxy/EffectProxy.h
index 8992f93..acbe17e 100644
--- a/media/libeffects/proxy/EffectProxy.h
+++ b/media/libeffects/proxy/EffectProxy.h
@@ -57,6 +57,9 @@
   NULL,
 };
 
+#define PROXY_REPLY_SIZE_MAX     (64 * 1024) // must be power of two
+#define PROXY_REPLY_SIZE_DEFAULT 32          // must be power of two
+
 struct EffectContext {
   const struct effect_interface_s  *common_itfe; // Holds the itfe of the Proxy
   effect_descriptor_t*  desc;                    // Points to the sub effect descriptors
@@ -67,6 +70,8 @@
   int32_t               ioId;        // The ioId in which the effect is created.
                                      // Stored in context to pass on to sub effect creation
   effect_uuid_t         uuid;        // UUID of the Proxy
+  char*                 replyData;   // temporary buffer for non active sub effect command reply
+  uint32_t              replySize;   // current size of temporary reply buffer
 };
 
 #if __cplusplus
diff --git a/media/libeffects/visualizer/EffectVisualizer.cpp b/media/libeffects/visualizer/EffectVisualizer.cpp
index e7eccf1..dc403ab 100644
--- a/media/libeffects/visualizer/EffectVisualizer.cpp
+++ b/media/libeffects/visualizer/EffectVisualizer.cpp
@@ -22,6 +22,7 @@
 #include <string.h>
 #include <new>
 #include <time.h>
+#include <math.h>
 #include <audio_effects/effect_visualizer.h>
 
 
@@ -54,6 +55,18 @@
 
 #define CAPTURE_BUF_SIZE 65536 // "64k should be enough for everyone"
 
+#define DISCARD_MEASUREMENTS_TIME_MS 2000 // discard measurements older than this number of ms
+
+// maximum number of buffers for which we keep track of the measurements
+#define MEASUREMENT_WINDOW_MAX_SIZE_IN_BUFFERS 25 // note: buffer index is stored in uint8_t
+
+
+struct BufferStats {
+    bool mIsValid;
+    uint16_t mPeakU16; // the positive peak of the absolute value of the samples in a buffer
+    float mRmsSquared; // the average square of the samples in a buffer
+};
+
 struct VisualizerContext {
     const struct effect_interface_s *mItfe;
     effect_config_t mConfig;
@@ -61,15 +74,38 @@
     uint32_t mCaptureSize;
     uint32_t mScalingMode;
     uint8_t mState;
-    uint8_t mLastCaptureIdx;
+    uint32_t mLastCaptureIdx;
     uint32_t mLatency;
     struct timespec mBufferUpdateTime;
     uint8_t mCaptureBuf[CAPTURE_BUF_SIZE];
+    // for measurements
+    uint8_t mChannelCount; // to avoid recomputing it every time a buffer is processed
+    uint32_t mMeasurementMode;
+    uint8_t mMeasurementWindowSizeInBuffers;
+    uint8_t mMeasurementBufferIdx;
+    BufferStats mPastMeasurements[MEASUREMENT_WINDOW_MAX_SIZE_IN_BUFFERS];
 };
 
 //
 //--- Local functions
 //
+uint32_t Visualizer_getDeltaTimeMsFromUpdatedTime(VisualizerContext* pContext) {
+    uint32_t deltaMs = 0;
+    if (pContext->mBufferUpdateTime.tv_sec != 0) {
+        struct timespec ts;
+        if (clock_gettime(CLOCK_MONOTONIC, &ts) == 0) {
+            time_t secs = ts.tv_sec - pContext->mBufferUpdateTime.tv_sec;
+            long nsec = ts.tv_nsec - pContext->mBufferUpdateTime.tv_nsec;
+            if (nsec < 0) {
+                --secs;
+                nsec += 1000000000;
+            }
+            deltaMs = secs * 1000 + nsec / 1000000;
+        }
+    }
+    return deltaMs;
+}
+
 
 void Visualizer_reset(VisualizerContext *pContext)
 {
@@ -165,9 +201,21 @@
     pContext->mConfig.outputCfg.bufferProvider.cookie = NULL;
     pContext->mConfig.outputCfg.mask = EFFECT_CONFIG_ALL;
 
+    // visualization initialization
     pContext->mCaptureSize = VISUALIZER_CAPTURE_SIZE_MAX;
     pContext->mScalingMode = VISUALIZER_SCALING_MODE_NORMALIZED;
 
+    // measurement initialization
+    pContext->mChannelCount = popcount(pContext->mConfig.inputCfg.channels);
+    pContext->mMeasurementMode = MEASUREMENT_MODE_NONE;
+    pContext->mMeasurementWindowSizeInBuffers = MEASUREMENT_WINDOW_MAX_SIZE_IN_BUFFERS;
+    pContext->mMeasurementBufferIdx = 0;
+    for (uint32_t i=0 ; i<pContext->mMeasurementWindowSizeInBuffers ; i++) {
+        pContext->mPastMeasurements[i].mIsValid = false;
+        pContext->mPastMeasurements[i].mPeakU16 = 0;
+        pContext->mPastMeasurements[i].mRmsSquared = 0;
+    }
+
     Visualizer_setConfig(pContext, &pContext->mConfig);
 
     return 0;
@@ -270,6 +318,30 @@
         return -EINVAL;
     }
 
+    // perform measurements if needed
+    if (pContext->mMeasurementMode & MEASUREMENT_MODE_PEAK_RMS) {
+        // find the peak and RMS squared for the new buffer
+        uint32_t inIdx;
+        int16_t maxSample = 0;
+        float rmsSqAcc = 0;
+        for (inIdx = 0 ; inIdx < inBuffer->frameCount * pContext->mChannelCount ; inIdx++) {
+            if (inBuffer->s16[inIdx] > maxSample) {
+                maxSample = inBuffer->s16[inIdx];
+            } else if (-inBuffer->s16[inIdx] > maxSample) {
+                maxSample = -inBuffer->s16[inIdx];
+            }
+            rmsSqAcc += (inBuffer->s16[inIdx] * inBuffer->s16[inIdx]);
+        }
+        // store the measurement
+        pContext->mPastMeasurements[pContext->mMeasurementBufferIdx].mPeakU16 = (uint16_t)maxSample;
+        pContext->mPastMeasurements[pContext->mMeasurementBufferIdx].mRmsSquared =
+                rmsSqAcc / (inBuffer->frameCount * pContext->mChannelCount);
+        pContext->mPastMeasurements[pContext->mMeasurementBufferIdx].mIsValid = true;
+        if (++pContext->mMeasurementBufferIdx >= pContext->mMeasurementWindowSizeInBuffers) {
+            pContext->mMeasurementBufferIdx = 0;
+        }
+    }
+
     // all code below assumes stereo 16 bit PCM output and input
     int32_t shift;
 
@@ -423,6 +495,12 @@
             p->vsize = sizeof(uint32_t);
             *replySize += sizeof(uint32_t);
             break;
+        case VISUALIZER_PARAM_MEASUREMENT_MODE:
+            ALOGV("get mMeasurementMode = %d", pContext->mMeasurementMode);
+            *((uint32_t *)p->data + 1) = pContext->mMeasurementMode;
+            p->vsize = sizeof(uint32_t);
+            *replySize += sizeof(uint32_t);
+            break;
         default:
             p->status = -EINVAL;
         }
@@ -452,6 +530,10 @@
             pContext->mLatency = *((uint32_t *)p->data + 1);
             ALOGV("set mLatency = %d", pContext->mLatency);
             break;
+        case VISUALIZER_PARAM_MEASUREMENT_MODE:
+            pContext->mMeasurementMode = *((uint32_t *)p->data + 1);
+            ALOGV("set mMeasurementMode = %d", pContext->mMeasurementMode);
+            break;
         default:
             *(int32_t *)pReplyData = -EINVAL;
         }
@@ -470,24 +552,12 @@
         }
         if (pContext->mState == VISUALIZER_STATE_ACTIVE) {
             int32_t latencyMs = pContext->mLatency;
-            uint32_t deltaMs = 0;
-            if (pContext->mBufferUpdateTime.tv_sec != 0) {
-                struct timespec ts;
-                if (clock_gettime(CLOCK_MONOTONIC, &ts) == 0) {
-                    time_t secs = ts.tv_sec - pContext->mBufferUpdateTime.tv_sec;
-                    long nsec = ts.tv_nsec - pContext->mBufferUpdateTime.tv_nsec;
-                    if (nsec < 0) {
-                        --secs;
-                        nsec += 1000000000;
-                    }
-                    deltaMs = secs * 1000 + nsec / 1000000;
-                    latencyMs -= deltaMs;
-                    if (latencyMs < 0) {
-                        latencyMs = 0;
-                    }
-                }
+            const uint32_t deltaMs = Visualizer_getDeltaTimeMsFromUpdatedTime(pContext);
+            latencyMs -= deltaMs;
+            if (latencyMs < 0) {
+                latencyMs = 0;
             }
-            uint32_t deltaSmpl = pContext->mConfig.inputCfg.samplingRate * latencyMs / 1000;
+            const uint32_t deltaSmpl = pContext->mConfig.inputCfg.samplingRate * latencyMs / 1000;
 
             int32_t capturePoint = pContext->mCaptureIdx - pContext->mCaptureSize - deltaSmpl;
             int32_t captureSize = pContext->mCaptureSize;
@@ -499,7 +569,7 @@
                 memcpy(pReplyData,
                        pContext->mCaptureBuf + CAPTURE_BUF_SIZE + capturePoint,
                        size);
-                pReplyData += size;
+                pReplyData = (char *)pReplyData + size;
                 captureSize -= size;
                 capturePoint = 0;
             }
@@ -525,6 +595,54 @@
 
         break;
 
+    case VISUALIZER_CMD_MEASURE: {
+        uint16_t peakU16 = 0;
+        float sumRmsSquared = 0.0f;
+        uint8_t nbValidMeasurements = 0;
+        // reset measurements if last measurement was too long ago (which implies stored
+        // measurements aren't relevant anymore and shouldn't bias the new one)
+        const int32_t delayMs = Visualizer_getDeltaTimeMsFromUpdatedTime(pContext);
+        if (delayMs > DISCARD_MEASUREMENTS_TIME_MS) {
+            ALOGV("Discarding measurements, last measurement is %dms old", delayMs);
+            for (uint32_t i=0 ; i<pContext->mMeasurementWindowSizeInBuffers ; i++) {
+                pContext->mPastMeasurements[i].mIsValid = false;
+                pContext->mPastMeasurements[i].mPeakU16 = 0;
+                pContext->mPastMeasurements[i].mRmsSquared = 0;
+            }
+            pContext->mMeasurementBufferIdx = 0;
+        } else {
+            // only use actual measurements, otherwise the first RMS measure happening before
+            // MEASUREMENT_WINDOW_MAX_SIZE_IN_BUFFERS have been played will always be artificially
+            // low
+            for (uint32_t i=0 ; i < pContext->mMeasurementWindowSizeInBuffers ; i++) {
+                if (pContext->mPastMeasurements[i].mIsValid) {
+                    if (pContext->mPastMeasurements[i].mPeakU16 > peakU16) {
+                        peakU16 = pContext->mPastMeasurements[i].mPeakU16;
+                    }
+                    sumRmsSquared += pContext->mPastMeasurements[i].mRmsSquared;
+                    nbValidMeasurements++;
+                }
+            }
+        }
+        float rms = nbValidMeasurements == 0 ? 0.0f : sqrtf(sumRmsSquared / nbValidMeasurements);
+        int32_t* pIntReplyData = (int32_t*)pReplyData;
+        // convert from I16 sample values to mB and write results
+        if (rms < 0.000016f) {
+            pIntReplyData[MEASUREMENT_IDX_RMS] = -9600; //-96dB
+        } else {
+            pIntReplyData[MEASUREMENT_IDX_RMS] = (int32_t) (2000 * log10(rms / 32767.0f));
+        }
+        if (peakU16 == 0) {
+            pIntReplyData[MEASUREMENT_IDX_PEAK] = -9600; //-96dB
+        } else {
+            pIntReplyData[MEASUREMENT_IDX_PEAK] = (int32_t) (2000 * log10(peakU16 / 32767.0f));
+        }
+        ALOGV("VISUALIZER_CMD_MEASURE peak=%d (%dmB), rms=%.1f (%dmB)",
+                peakU16, pIntReplyData[MEASUREMENT_IDX_PEAK],
+                rms, pIntReplyData[MEASUREMENT_IDX_RMS]);
+        }
+        break;
+
     default:
         ALOGW("Visualizer_command invalid command %d",cmdCode);
         return -EINVAL;
diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp
index e934a3e..666fafa 100644
--- a/media/libmedia/AudioRecord.cpp
+++ b/media/libmedia/AudioRecord.cpp
@@ -105,6 +105,7 @@
         // Otherwise the callback thread will never exit.
         stop();
         if (mAudioRecordThread != 0) {
+            mProxy->interrupt();
             mAudioRecordThread->requestExit();  // see comment in AudioRecord.h
             mAudioRecordThread->requestExitAndWait();
             mAudioRecordThread.clear();
@@ -473,7 +474,7 @@
     ALOGE_IF(originalSessionId != 0 && mSessionId != originalSessionId,
             "session ID changed from %d to %d", originalSessionId, mSessionId);
 
-    if (record == 0) {
+    if (record == 0 || status != NO_ERROR) {
         ALOGE("AudioFlinger could not create record track, status: %d", status);
         AudioSystem::releaseInput(input);
         return status;
@@ -483,6 +484,11 @@
         ALOGE("Could not get control block");
         return NO_INIT;
     }
+    void *iMemPointer = iMem->pointer();
+    if (iMemPointer == NULL) {
+        ALOGE("Could not get control block pointer");
+        return NO_INIT;
+    }
     if (mAudioRecord != 0) {
         mAudioRecord->asBinder()->unlinkToDeath(mDeathNotifier, this);
         mDeathNotifier.clear();
@@ -490,7 +496,7 @@
     mInput = input;
     mAudioRecord = record;
     mCblkMemory = iMem;
-    audio_track_cblk_t* cblk = static_cast<audio_track_cblk_t*>(iMem->pointer());
+    audio_track_cblk_t* cblk = static_cast<audio_track_cblk_t*>(iMemPointer);
     mCblk = cblk;
     // FIXME missing fast track frameCount logic
     mAwaitBoost = false;
@@ -960,7 +966,7 @@
 // =========================================================================
 
 AudioRecord::AudioRecordThread::AudioRecordThread(AudioRecord& receiver, bool bCanCallJava)
-    : Thread(bCanCallJava), mReceiver(receiver), mPaused(true), mResumeLatch(false)
+    : Thread(bCanCallJava), mReceiver(receiver), mPaused(true), mPausedInt(false), mPausedNs(0LL)
 {
 }
 
@@ -977,25 +983,32 @@
             // caller will check for exitPending()
             return true;
         }
+        if (mPausedInt) {
+            if (mPausedNs > 0) {
+                (void) mMyCond.waitRelative(mMyLock, mPausedNs);
+            } else {
+                mMyCond.wait(mMyLock);
+            }
+            mPausedInt = false;
+            return true;
+        }
     }
     nsecs_t ns =  mReceiver.processAudioBuffer(this);
     switch (ns) {
     case 0:
         return true;
-    case NS_WHENEVER:
-        sleep(1);
-        return true;
     case NS_INACTIVE:
-        pauseConditional();
+        pauseInternal();
         return true;
     case NS_NEVER:
         return false;
+    case NS_WHENEVER:
+        // FIXME increase poll interval, or make event-driven
+        ns = 1000000000LL;
+        // fall through
     default:
         LOG_ALWAYS_FATAL_IF(ns < 0, "processAudioBuffer() returned %lld", ns);
-        struct timespec req;
-        req.tv_sec = ns / 1000000000LL;
-        req.tv_nsec = ns % 1000000000LL;
-        nanosleep(&req, NULL /*rem*/);
+        pauseInternal(ns);
         return true;
     }
 }
@@ -1004,38 +1017,37 @@
 {
     // must be in this order to avoid a race condition
     Thread::requestExit();
-    resume();
+    AutoMutex _l(mMyLock);
+    if (mPaused || mPausedInt) {
+        mPaused = false;
+        mPausedInt = false;
+        mMyCond.signal();
+    }
 }
 
 void AudioRecord::AudioRecordThread::pause()
 {
     AutoMutex _l(mMyLock);
     mPaused = true;
-    mResumeLatch = false;
-}
-
-void AudioRecord::AudioRecordThread::pauseConditional()
-{
-    AutoMutex _l(mMyLock);
-    if (mResumeLatch) {
-        mResumeLatch = false;
-    } else {
-        mPaused = true;
-    }
 }
 
 void AudioRecord::AudioRecordThread::resume()
 {
     AutoMutex _l(mMyLock);
-    if (mPaused) {
+    if (mPaused || mPausedInt) {
         mPaused = false;
-        mResumeLatch = false;
+        mPausedInt = false;
         mMyCond.signal();
-    } else {
-        mResumeLatch = true;
     }
 }
 
+void AudioRecord::AudioRecordThread::pauseInternal(nsecs_t ns)
+{
+    AutoMutex _l(mMyLock);
+    mPausedInt = true;
+    mPausedNs = ns;
+}
+
 // -------------------------------------------------------------------------
 
 }; // namespace android
diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp
index 744faee..37d50cf 100644
--- a/media/libmedia/AudioTrack.cpp
+++ b/media/libmedia/AudioTrack.cpp
@@ -534,6 +534,9 @@
 
     mProxy->setVolumeLR((uint32_t(uint16_t(right * 0x1000)) << 16) | uint16_t(left * 0x1000));
 
+    if (isOffloaded()) {
+        mAudioTrack->signal();
+    }
     return NO_ERROR;
 }
 
@@ -815,12 +818,29 @@
         return NO_INIT;
     }
 
+    // Not all of these values are needed under all conditions, but it is easier to get them all
+
     uint32_t afLatency;
-    if ((status = AudioSystem::getLatency(output, streamType, &afLatency)) != NO_ERROR) {
+    status = AudioSystem::getLatency(output, streamType, &afLatency);
+    if (status != NO_ERROR) {
         ALOGE("getLatency(%d) failed status %d", output, status);
         return NO_INIT;
     }
 
+    size_t afFrameCount;
+    status = AudioSystem::getFrameCount(output, streamType, &afFrameCount);
+    if (status != NO_ERROR) {
+        ALOGE("getFrameCount(output=%d, streamType=%d) status %d", output, streamType, status);
+        return NO_INIT;
+    }
+
+    uint32_t afSampleRate;
+    status = AudioSystem::getSamplingRate(output, streamType, &afSampleRate);
+    if (status != NO_ERROR) {
+        ALOGE("getSamplingRate(output=%d, streamType=%d) status %d", output, streamType, status);
+        return NO_INIT;
+    }
+
     // Client decides whether the track is TIMED (see below), but can only express a preference
     // for FAST.  Server will perform additional tests.
     if ((flags & AUDIO_OUTPUT_FLAG_FAST) && !(
@@ -836,6 +856,14 @@
     }
     ALOGV("createTrack_l() output %d afLatency %d", output, afLatency);
 
+    // The client's AudioTrack buffer is divided into n parts for purpose of wakeup by server, where
+    //  n = 1   fast track; nBuffering is ignored
+    //  n = 2   normal track, no sample rate conversion
+    //  n = 3   normal track, with sample rate conversion
+    //          (pessimistic; some non-1:1 conversion ratios don't actually need triple-buffering)
+    //  n > 3   very high latency or very small notification interval; nBuffering is ignored
+    const uint32_t nBuffering = (sampleRate == afSampleRate) ? 2 : 3;
+
     mNotificationFramesAct = mNotificationFramesReq;
 
     if (!audio_is_linear_pcm(format)) {
@@ -844,13 +872,6 @@
             // Same comment as below about ignoring frameCount parameter for set()
             frameCount = sharedBuffer->size();
         } else if (frameCount == 0) {
-            size_t afFrameCount;
-            status = AudioSystem::getFrameCount(output, streamType, &afFrameCount);
-            if (status != NO_ERROR) {
-                ALOGE("getFrameCount(output=%d, streamType=%d) status %d", output, streamType,
-                        status);
-                return NO_INIT;
-            }
             frameCount = afFrameCount;
         }
         if (mNotificationFramesAct != frameCount) {
@@ -880,26 +901,13 @@
     } else if (!(flags & AUDIO_OUTPUT_FLAG_FAST)) {
 
         // FIXME move these calculations and associated checks to server
-        uint32_t afSampleRate;
-        status = AudioSystem::getSamplingRate(output, streamType, &afSampleRate);
-        if (status != NO_ERROR) {
-            ALOGE("getSamplingRate(output=%d, streamType=%d) status %d", output, streamType,
-                    status);
-            return NO_INIT;
-        }
-        size_t afFrameCount;
-        status = AudioSystem::getFrameCount(output, streamType, &afFrameCount);
-        if (status != NO_ERROR) {
-            ALOGE("getFrameCount(output=%d, streamType=%d) status %d", output, streamType, status);
-            return NO_INIT;
-        }
 
         // Ensure that buffer depth covers at least audio hardware latency
         uint32_t minBufCount = afLatency / ((1000 * afFrameCount)/afSampleRate);
         ALOGV("afFrameCount=%d, minBufCount=%d, afSampleRate=%u, afLatency=%d",
                 afFrameCount, minBufCount, afSampleRate, afLatency);
-        if (minBufCount <= 2) {
-            minBufCount = sampleRate == afSampleRate ? 2 : 3;
+        if (minBufCount <= nBuffering) {
+            minBufCount = nBuffering;
         }
 
         size_t minFrameCount = (afFrameCount*sampleRate*minBufCount)/afSampleRate;
@@ -909,18 +917,16 @@
 
         if (frameCount == 0) {
             frameCount = minFrameCount;
-        }
-        // Make sure that application is notified with sufficient margin
-        // before underrun
-        if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/2) {
-            mNotificationFramesAct = frameCount/2;
-        }
-        if (frameCount < minFrameCount) {
+        } else if (frameCount < minFrameCount) {
             // not ALOGW because it happens all the time when playing key clicks over A2DP
             ALOGV("Minimum buffer size corrected from %d to %d",
                      frameCount, minFrameCount);
             frameCount = minFrameCount;
         }
+        // Make sure that application is notified with sufficient margin before underrun
+        if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/nBuffering) {
+            mNotificationFramesAct = frameCount/nBuffering;
+        }
 
     } else {
         // For fast tracks, the frame count calculations and checks are done by server
@@ -1001,8 +1007,8 @@
             flags = (audio_output_flags_t) (flags & ~AUDIO_OUTPUT_FLAG_FAST);
             mFlags = flags;
             if (sharedBuffer == 0) {
-                if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/2) {
-                    mNotificationFramesAct = frameCount/2;
+                if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/nBuffering) {
+                    mNotificationFramesAct = frameCount/nBuffering;
                 }
             }
         }
@@ -1779,7 +1785,7 @@
 // =========================================================================
 
 AudioTrack::AudioTrackThread::AudioTrackThread(AudioTrack& receiver, bool bCanCallJava)
-    : Thread(bCanCallJava), mReceiver(receiver), mPaused(true), mResumeLatch(false)
+    : Thread(bCanCallJava), mReceiver(receiver), mPaused(true), mPausedInt(false), mPausedNs(0LL)
 {
 }
 
@@ -1796,25 +1802,32 @@
             // caller will check for exitPending()
             return true;
         }
+        if (mPausedInt) {
+            if (mPausedNs > 0) {
+                (void) mMyCond.waitRelative(mMyLock, mPausedNs);
+            } else {
+                mMyCond.wait(mMyLock);
+            }
+            mPausedInt = false;
+            return true;
+        }
     }
     nsecs_t ns = mReceiver.processAudioBuffer(this);
     switch (ns) {
     case 0:
         return true;
-    case NS_WHENEVER:
-        sleep(1);
-        return true;
     case NS_INACTIVE:
-        pauseConditional();
+        pauseInternal();
         return true;
     case NS_NEVER:
         return false;
+    case NS_WHENEVER:
+        // FIXME increase poll interval, or make event-driven
+        ns = 1000000000LL;
+        // fall through
     default:
         LOG_ALWAYS_FATAL_IF(ns < 0, "processAudioBuffer() returned %lld", ns);
-        struct timespec req;
-        req.tv_sec = ns / 1000000000LL;
-        req.tv_nsec = ns % 1000000000LL;
-        nanosleep(&req, NULL /*rem*/);
+        pauseInternal(ns);
         return true;
     }
 }
@@ -1823,36 +1836,35 @@
 {
     // must be in this order to avoid a race condition
     Thread::requestExit();
-    resume();
+    AutoMutex _l(mMyLock);
+    if (mPaused || mPausedInt) {
+        mPaused = false;
+        mPausedInt = false;
+        mMyCond.signal();
+    }
 }
 
 void AudioTrack::AudioTrackThread::pause()
 {
     AutoMutex _l(mMyLock);
     mPaused = true;
-    mResumeLatch = false;
-}
-
-void AudioTrack::AudioTrackThread::pauseConditional()
-{
-    AutoMutex _l(mMyLock);
-    if (mResumeLatch) {
-        mResumeLatch = false;
-    } else {
-        mPaused = true;
-    }
 }
 
 void AudioTrack::AudioTrackThread::resume()
 {
     AutoMutex _l(mMyLock);
-    if (mPaused) {
+    if (mPaused || mPausedInt) {
         mPaused = false;
-        mResumeLatch = false;
+        mPausedInt = false;
         mMyCond.signal();
-    } else {
-        mResumeLatch = true;
     }
 }
 
+void AudioTrack::AudioTrackThread::pauseInternal(nsecs_t ns)
+{
+    AutoMutex _l(mMyLock);
+    mPausedInt = true;
+    mPausedNs = ns;
+}
+
 }; // namespace android
diff --git a/media/libmedia/AudioTrackShared.cpp b/media/libmedia/AudioTrackShared.cpp
index e7abb40..4fd92b2 100644
--- a/media/libmedia/AudioTrackShared.cpp
+++ b/media/libmedia/AudioTrackShared.cpp
@@ -481,7 +481,7 @@
 ServerProxy::ServerProxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount,
         size_t frameSize, bool isOut, bool clientInServer)
     : Proxy(cblk, buffers, frameCount, frameSize, isOut, clientInServer),
-      mAvailToClient(0), mFlush(0), mDeferWake(false)
+      mAvailToClient(0), mFlush(0)
 {
 }
 
@@ -559,9 +559,6 @@
             &((char *) mBuffers)[(mIsOut ? front : rear) * mFrameSize] : NULL;
     buffer->mNonContig = availToServer - part1;
     mUnreleased = part1;
-    // optimization to avoid waking up the client too early
-    // FIXME need to test for recording
-    mDeferWake = part1 < ask && availToServer >= ask;
     return part1 > 0 ? NO_ERROR : WOULD_BLOCK;
     }
 no_init:
@@ -607,7 +604,7 @@
         minimum = half;
     }
     // FIXME AudioRecord wakeup needs to be optimized; it currently wakes up client every time
-    if (!mIsOut || (!mDeferWake && mAvailToClient + stepCount >= minimum)) {
+    if (!mIsOut || (mAvailToClient + stepCount >= minimum)) {
         ALOGV("mAvailToClient=%u stepCount=%u minimum=%u", mAvailToClient, stepCount, minimum);
         int32_t old = android_atomic_or(CBLK_FUTEX_WAKE, &cblk->mFutex);
         if (!(old & CBLK_FUTEX_WAKE)) {
diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp
index be818c6..448a82e 100644
--- a/media/libmedia/IAudioFlinger.cpp
+++ b/media/libmedia/IAudioFlinger.cpp
@@ -108,7 +108,12 @@
         data.writeInt32(frameCount);
         track_flags_t lFlags = flags != NULL ? *flags : (track_flags_t) TRACK_DEFAULT;
         data.writeInt32(lFlags);
-        data.writeStrongBinder(sharedBuffer->asBinder());
+        if (sharedBuffer != 0) {
+            data.writeInt32(true);
+            data.writeStrongBinder(sharedBuffer->asBinder());
+        } else {
+            data.writeInt32(false);
+        }
         data.writeInt32((int32_t) output);
         data.writeInt32((int32_t) tid);
         int lSessionId = 0;
@@ -179,6 +184,17 @@
             }
             lStatus = reply.readInt32();
             record = interface_cast<IAudioRecord>(reply.readStrongBinder());
+            if (lStatus == NO_ERROR) {
+                if (record == 0) {
+                    ALOGE("openRecord should have returned an IAudioRecord");
+                    lStatus = UNKNOWN_ERROR;
+                }
+            } else {
+                if (record != 0) {
+                    ALOGE("openRecord returned an IAudioRecord but with status %d", lStatus);
+                    record.clear();
+                }
+            }
         }
         if (status) {
             *status = lStatus;
@@ -738,15 +754,27 @@
             audio_channel_mask_t channelMask = data.readInt32();
             size_t frameCount = data.readInt32();
             track_flags_t flags = (track_flags_t) data.readInt32();
-            sp<IMemory> buffer = interface_cast<IMemory>(data.readStrongBinder());
+            bool haveSharedBuffer = data.readInt32() != 0;
+            sp<IMemory> buffer;
+            if (haveSharedBuffer) {
+                buffer = interface_cast<IMemory>(data.readStrongBinder());
+            }
             audio_io_handle_t output = (audio_io_handle_t) data.readInt32();
             pid_t tid = (pid_t) data.readInt32();
             int sessionId = data.readInt32();
             String8 name;
             status_t status;
-            sp<IAudioTrack> track = createTrack(
-                    (audio_stream_type_t) streamType, sampleRate, format,
-                    channelMask, frameCount, &flags, buffer, output, tid, &sessionId, name, &status);
+            sp<IAudioTrack> track;
+            if ((haveSharedBuffer && (buffer == 0)) ||
+                    ((buffer != 0) && (buffer->pointer() == NULL))) {
+                ALOGW("CREATE_TRACK: cannot retrieve shared memory");
+                status = DEAD_OBJECT;
+            } else {
+                track = createTrack(
+                        (audio_stream_type_t) streamType, sampleRate, format,
+                        channelMask, frameCount, &flags, buffer, output, tid,
+                        &sessionId, name, &status);
+            }
             reply->writeInt32(flags);
             reply->writeInt32(sessionId);
             reply->writeString8(name);
@@ -767,6 +795,7 @@
             status_t status;
             sp<IAudioRecord> record = openRecord(input,
                     sampleRate, format, channelMask, frameCount, &flags, tid, &sessionId, &status);
+            LOG_ALWAYS_FATAL_IF((record != 0) != (status == NO_ERROR));
             reply->writeInt32(flags);
             reply->writeInt32(sessionId);
             reply->writeInt32(status);
diff --git a/media/libmedia/IAudioTrack.cpp b/media/libmedia/IAudioTrack.cpp
index f0d75ba..3cd9cfd 100644
--- a/media/libmedia/IAudioTrack.cpp
+++ b/media/libmedia/IAudioTrack.cpp
@@ -41,6 +41,7 @@
     SET_MEDIA_TIME_TRANSFORM,
     SET_PARAMETERS,
     GET_TIMESTAMP,
+    SIGNAL,
 };
 
 class BpAudioTrack : public BpInterface<IAudioTrack>
@@ -182,6 +183,12 @@
         }
         return status;
     }
+
+    virtual void signal() {
+        Parcel data, reply;
+        data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor());
+        remote()->transact(SIGNAL, data, &reply);
+    }
 };
 
 IMPLEMENT_META_INTERFACE(AudioTrack, "android.media.IAudioTrack");
@@ -269,6 +276,11 @@
             }
             return NO_ERROR;
         } break;
+        case SIGNAL: {
+            CHECK_INTERFACE(IAudioTrack, data, reply);
+            signal();
+            return NO_ERROR;
+        } break;
         default:
             return BBinder::onTransact(code, data, reply, flags);
     }
diff --git a/media/libmedia/IMediaPlayerService.cpp b/media/libmedia/IMediaPlayerService.cpp
index 74f574d..3c22b4c 100644
--- a/media/libmedia/IMediaPlayerService.cpp
+++ b/media/libmedia/IMediaPlayerService.cpp
@@ -86,30 +86,48 @@
         return interface_cast<IMediaRecorder>(reply.readStrongBinder());
     }
 
-    virtual sp<IMemory> decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat)
+    virtual status_t decode(const char* url, uint32_t *pSampleRate, int* pNumChannels,
+                               audio_format_t* pFormat,
+                               const sp<IMemoryHeap>& heap, size_t *pSize)
     {
         Parcel data, reply;
         data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
         data.writeCString(url);
-        remote()->transact(DECODE_URL, data, &reply);
-        *pSampleRate = uint32_t(reply.readInt32());
-        *pNumChannels = reply.readInt32();
-        *pFormat = (audio_format_t) reply.readInt32();
-        return interface_cast<IMemory>(reply.readStrongBinder());
+        data.writeStrongBinder(heap->asBinder());
+        status_t status = remote()->transact(DECODE_URL, data, &reply);
+        if (status == NO_ERROR) {
+            status = (status_t)reply.readInt32();
+            if (status == NO_ERROR) {
+                *pSampleRate = uint32_t(reply.readInt32());
+                *pNumChannels = reply.readInt32();
+                *pFormat = (audio_format_t)reply.readInt32();
+                *pSize = (size_t)reply.readInt32();
+            }
+        }
+        return status;
     }
 
-    virtual sp<IMemory> decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat)
+    virtual status_t decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate,
+                               int* pNumChannels, audio_format_t* pFormat,
+                               const sp<IMemoryHeap>& heap, size_t *pSize)
     {
         Parcel data, reply;
         data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
         data.writeFileDescriptor(fd);
         data.writeInt64(offset);
         data.writeInt64(length);
-        remote()->transact(DECODE_FD, data, &reply);
-        *pSampleRate = uint32_t(reply.readInt32());
-        *pNumChannels = reply.readInt32();
-        *pFormat = (audio_format_t) reply.readInt32();
-        return interface_cast<IMemory>(reply.readStrongBinder());
+        data.writeStrongBinder(heap->asBinder());
+        status_t status = remote()->transact(DECODE_FD, data, &reply);
+        if (status == NO_ERROR) {
+            status = (status_t)reply.readInt32();
+            if (status == NO_ERROR) {
+                *pSampleRate = uint32_t(reply.readInt32());
+                *pNumChannels = reply.readInt32();
+                *pFormat = (audio_format_t)reply.readInt32();
+                *pSize = (size_t)reply.readInt32();
+            }
+        }
+        return status;
     }
 
     virtual sp<IOMX> getOMX() {
@@ -205,14 +223,19 @@
         case DECODE_URL: {
             CHECK_INTERFACE(IMediaPlayerService, data, reply);
             const char* url = data.readCString();
+            sp<IMemoryHeap> heap = interface_cast<IMemoryHeap>(data.readStrongBinder());
             uint32_t sampleRate;
             int numChannels;
             audio_format_t format;
-            sp<IMemory> player = decode(url, &sampleRate, &numChannels, &format);
-            reply->writeInt32(sampleRate);
-            reply->writeInt32(numChannels);
-            reply->writeInt32((int32_t) format);
-            reply->writeStrongBinder(player->asBinder());
+            size_t size;
+            status_t status = decode(url, &sampleRate, &numChannels, &format, heap, &size);
+            reply->writeInt32(status);
+            if (status == NO_ERROR) {
+                reply->writeInt32(sampleRate);
+                reply->writeInt32(numChannels);
+                reply->writeInt32((int32_t)format);
+                reply->writeInt32((int32_t)size);
+            }
             return NO_ERROR;
         } break;
         case DECODE_FD: {
@@ -220,14 +243,20 @@
             int fd = dup(data.readFileDescriptor());
             int64_t offset = data.readInt64();
             int64_t length = data.readInt64();
+            sp<IMemoryHeap> heap = interface_cast<IMemoryHeap>(data.readStrongBinder());
             uint32_t sampleRate;
             int numChannels;
             audio_format_t format;
-            sp<IMemory> player = decode(fd, offset, length, &sampleRate, &numChannels, &format);
-            reply->writeInt32(sampleRate);
-            reply->writeInt32(numChannels);
-            reply->writeInt32((int32_t) format);
-            reply->writeStrongBinder(player->asBinder());
+            size_t size;
+            status_t status = decode(fd, offset, length, &sampleRate, &numChannels, &format,
+                                     heap, &size);
+            reply->writeInt32(status);
+            if (status == NO_ERROR) {
+                reply->writeInt32(sampleRate);
+                reply->writeInt32(numChannels);
+                reply->writeInt32((int32_t)format);
+                reply->writeInt32((int32_t)size);
+            }
             return NO_ERROR;
         } break;
         case CREATE_MEDIA_RECORDER: {
diff --git a/media/libmedia/IOMX.cpp b/media/libmedia/IOMX.cpp
index ef99f4f..71ce320 100644
--- a/media/libmedia/IOMX.cpp
+++ b/media/libmedia/IOMX.cpp
@@ -43,6 +43,7 @@
     CREATE_INPUT_SURFACE,
     SIGNAL_END_OF_INPUT_STREAM,
     STORE_META_DATA_IN_BUFFERS,
+    PREPARE_FOR_ADAPTIVE_PLAYBACK,
     ALLOC_BUFFER,
     ALLOC_BUFFER_WITH_BACKUP,
     FREE_BUFFER,
@@ -351,6 +352,22 @@
         return err;
     }
 
+    virtual status_t prepareForAdaptivePlayback(
+            node_id node, OMX_U32 port_index, OMX_BOOL enable,
+            OMX_U32 max_width, OMX_U32 max_height) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+        data.writeIntPtr((intptr_t)node);
+        data.writeInt32(port_index);
+        data.writeInt32((int32_t)enable);
+        data.writeInt32(max_width);
+        data.writeInt32(max_height);
+        remote()->transact(PREPARE_FOR_ADAPTIVE_PLAYBACK, data, &reply);
+
+        status_t err = reply.readInt32();
+        return err;
+    }
+
     virtual status_t allocateBuffer(
             node_id node, OMX_U32 port_index, size_t size,
             buffer_id *buffer, void **buffer_data) {
@@ -770,6 +787,23 @@
             return NO_ERROR;
         }
 
+        case PREPARE_FOR_ADAPTIVE_PLAYBACK:
+        {
+            CHECK_OMX_INTERFACE(IOMX, data, reply);
+
+            node_id node = (void*)data.readIntPtr();
+            OMX_U32 port_index = data.readInt32();
+            OMX_BOOL enable = (OMX_BOOL)data.readInt32();
+            OMX_U32 max_width = data.readInt32();
+            OMX_U32 max_height = data.readInt32();
+
+            status_t err = prepareForAdaptivePlayback(
+                    node, port_index, enable, max_width, max_height);
+            reply->writeInt32(err);
+
+            return NO_ERROR;
+        }
+
         case ALLOC_BUFFER:
         {
             CHECK_OMX_INTERFACE(IOMX, data, reply);
diff --git a/media/libmedia/SoundPool.cpp b/media/libmedia/SoundPool.cpp
index 7f10e05..8434d43 100644
--- a/media/libmedia/SoundPool.cpp
+++ b/media/libmedia/SoundPool.cpp
@@ -18,7 +18,7 @@
 #define LOG_TAG "SoundPool"
 #include <utils/Log.h>
 
-//#define USE_SHARED_MEM_BUFFER
+#define USE_SHARED_MEM_BUFFER
 
 #include <media/AudioTrack.h>
 #include <media/mediaplayer.h>
@@ -32,6 +32,8 @@
 uint32_t kMaxSampleRate = 48000;
 uint32_t kDefaultSampleRate = 44100;
 uint32_t kDefaultFrameCount = 1200;
+size_t kDefaultHeapSize = 1024 * 1024; // 1MB
+
 
 SoundPool::SoundPool(int maxChannels, audio_stream_type_t streamType, int srcQuality)
 {
@@ -464,7 +466,6 @@
 
 void Sample::init()
 {
-    mData = 0;
     mSize = 0;
     mRefCount = 0;
     mSampleID = 0;
@@ -482,7 +483,6 @@
         ALOGV("close(%d)", mFd);
         ::close(mFd);
     }
-    mData.clear();
     free(mUrl);
 }
 
@@ -491,44 +491,48 @@
     uint32_t sampleRate;
     int numChannels;
     audio_format_t format;
-    sp<IMemory> p;
+    status_t status;
+    mHeap = new MemoryHeapBase(kDefaultHeapSize);
+
     ALOGV("Start decode");
     if (mUrl) {
-        p = MediaPlayer::decode(mUrl, &sampleRate, &numChannels, &format);
+        status = MediaPlayer::decode(mUrl, &sampleRate, &numChannels, &format, mHeap, &mSize);
     } else {
-        p = MediaPlayer::decode(mFd, mOffset, mLength, &sampleRate, &numChannels, &format);
+        status = MediaPlayer::decode(mFd, mOffset, mLength, &sampleRate, &numChannels, &format,
+                                     mHeap, &mSize);
         ALOGV("close(%d)", mFd);
         ::close(mFd);
         mFd = -1;
     }
-    if (p == 0) {
+    if (status != NO_ERROR) {
         ALOGE("Unable to load sample: %s", mUrl);
-        return -1;
+        goto error;
     }
     ALOGV("pointer = %p, size = %u, sampleRate = %u, numChannels = %d",
-            p->pointer(), p->size(), sampleRate, numChannels);
+          mHeap->getBase(), mSize, sampleRate, numChannels);
 
     if (sampleRate > kMaxSampleRate) {
        ALOGE("Sample rate (%u) out of range", sampleRate);
-       return - 1;
+       status = BAD_VALUE;
+       goto error;
     }
 
     if ((numChannels < 1) || (numChannels > 2)) {
         ALOGE("Sample channel count (%d) out of range", numChannels);
-        return - 1;
+        status = BAD_VALUE;
+        goto error;
     }
 
-    //_dumpBuffer(p->pointer(), p->size());
-    uint8_t* q = static_cast<uint8_t*>(p->pointer()) + p->size() - 10;
-    //_dumpBuffer(q, 10, 10, false);
-
-    mData = p;
-    mSize = p->size();
+    mData = new MemoryBase(mHeap, 0, mSize);
     mSampleRate = sampleRate;
     mNumChannels = numChannels;
     mFormat = format;
     mState = READY;
-    return 0;
+    return NO_ERROR;
+
+error:
+    mHeap.clear();
+    return status;
 }
 
 
@@ -602,7 +606,7 @@
         // do not create a new audio track if current track is compatible with sample parameters
 #ifdef USE_SHARED_MEM_BUFFER
         newTrack = new AudioTrack(streamType, sampleRate, sample->format(),
-                channels, sample->getIMemory(), AUDIO_OUTPUT_FLAG_NONE, callback, userData);
+                channels, sample->getIMemory(), AUDIO_OUTPUT_FLAG_FAST, callback, userData);
 #else
         newTrack = new AudioTrack(streamType, sampleRate, sample->format(),
                 channels, frameCount, AUDIO_OUTPUT_FLAG_FAST, callback, userData,
@@ -740,11 +744,11 @@
             b->size = count;
             //ALOGV("buffer=%p, [0]=%d", b->i16, b->i16[0]);
         }
-    } else if (event == AudioTrack::EVENT_UNDERRUN) {
-        ALOGV("process %p channel %d EVENT_UNDERRUN", this, mChannelID);
+    } else if (event == AudioTrack::EVENT_UNDERRUN || event == AudioTrack::EVENT_BUFFER_END) {
+        ALOGV("process %p channel %d EVENT_UNDERRUN or EVENT_BUFFER_END", this, mChannelID);
         mSoundPool->addToStopList(this);
     } else if (event == AudioTrack::EVENT_LOOP_END) {
-        ALOGV("End loop %p channel %d count %d", this, mChannelID, *(int *)info);
+        ALOGV("End loop %p channel %d", this, mChannelID);
     }
 }
 
diff --git a/media/libmedia/Visualizer.cpp b/media/libmedia/Visualizer.cpp
index e519f13..c146b8d 100644
--- a/media/libmedia/Visualizer.cpp
+++ b/media/libmedia/Visualizer.cpp
@@ -43,6 +43,7 @@
         mCaptureSize(CAPTURE_SIZE_DEF),
         mSampleRate(44100000),
         mScalingMode(VISUALIZER_SCALING_MODE_NORMALIZED),
+        mMeasurementMode(MEASUREMENT_MODE_NONE),
         mCaptureCallBack(NULL),
         mCaptureCbkUser(NULL)
 {
@@ -186,6 +187,73 @@
     return status;
 }
 
+status_t Visualizer::setMeasurementMode(uint32_t mode) {
+    if ((mode != MEASUREMENT_MODE_NONE)
+            //Note: needs to be handled as a mask when more measurement modes are added
+            && ((mode & MEASUREMENT_MODE_PEAK_RMS) != mode)) {
+        return BAD_VALUE;
+    }
+
+    Mutex::Autolock _l(mCaptureLock);
+
+    uint32_t buf32[sizeof(effect_param_t) / sizeof(uint32_t) + 2];
+    effect_param_t *p = (effect_param_t *)buf32;
+
+    p->psize = sizeof(uint32_t);
+    p->vsize = sizeof(uint32_t);
+    *(int32_t *)p->data = VISUALIZER_PARAM_MEASUREMENT_MODE;
+    *((int32_t *)p->data + 1)= mode;
+    status_t status = setParameter(p);
+
+    ALOGV("setMeasurementMode mode %d  status %d p->status %d", mode, status, p->status);
+
+    if (status == NO_ERROR) {
+        status = p->status;
+        if (status == NO_ERROR) {
+            mMeasurementMode = mode;
+        }
+    }
+    return status;
+}
+
+status_t Visualizer::getIntMeasurements(uint32_t type, uint32_t number, int32_t *measurements) {
+    if (mMeasurementMode == MEASUREMENT_MODE_NONE) {
+        ALOGE("Cannot retrieve int measurements, no measurement mode set");
+        return INVALID_OPERATION;
+    }
+    if (!(mMeasurementMode & type)) {
+        // measurement type has not been set on this Visualizer
+        ALOGE("Cannot retrieve int measurements, requested measurement mode 0x%x not set(0x%x)",
+                type, mMeasurementMode);
+        return INVALID_OPERATION;
+    }
+    // only peak+RMS measurement supported
+    if ((type != MEASUREMENT_MODE_PEAK_RMS)
+            // for peak+RMS measurement, the results are 2 int32_t values
+            || (number != 2)) {
+        ALOGE("Cannot retrieve int measurements, MEASUREMENT_MODE_PEAK_RMS returns 2 ints, not %d",
+                        number);
+        return BAD_VALUE;
+    }
+
+    status_t status = NO_ERROR;
+    if (mEnabled) {
+        uint32_t replySize = number * sizeof(int32_t);
+        status = command(VISUALIZER_CMD_MEASURE,
+                sizeof(uint32_t)  /*cmdSize*/,
+                &type /*cmdData*/,
+                &replySize, measurements);
+        ALOGV("getMeasurements() command returned %d", status);
+        if ((status == NO_ERROR) && (replySize == 0)) {
+            status = NOT_ENOUGH_DATA;
+        }
+    } else {
+        ALOGV("getMeasurements() disabled");
+        return INVALID_OPERATION;
+    }
+    return status;
+}
+
 status_t Visualizer::getWaveForm(uint8_t *waveform)
 {
     if (waveform == NULL) {
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 4323d0c..0f6d897 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -776,17 +776,20 @@
     }
 }
 
-/*static*/ sp<IMemory> MediaPlayer::decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat)
+/*static*/ status_t MediaPlayer::decode(const char* url, uint32_t *pSampleRate,
+                                           int* pNumChannels, audio_format_t* pFormat,
+                                           const sp<IMemoryHeap>& heap, size_t *pSize)
 {
     ALOGV("decode(%s)", url);
-    sp<IMemory> p;
+    status_t status;
     const sp<IMediaPlayerService>& service = getMediaPlayerService();
     if (service != 0) {
-        p = service->decode(url, pSampleRate, pNumChannels, pFormat);
+        status = service->decode(url, pSampleRate, pNumChannels, pFormat, heap, pSize);
     } else {
         ALOGE("Unable to locate media service");
+        status = DEAD_OBJECT;
     }
-    return p;
+    return status;
 
 }
 
@@ -796,17 +799,22 @@
     notify(MEDIA_ERROR, MEDIA_ERROR_SERVER_DIED, 0);
 }
 
-/*static*/ sp<IMemory> MediaPlayer::decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat)
+/*static*/ status_t MediaPlayer::decode(int fd, int64_t offset, int64_t length,
+                                        uint32_t *pSampleRate, int* pNumChannels,
+                                        audio_format_t* pFormat,
+                                        const sp<IMemoryHeap>& heap, size_t *pSize)
 {
     ALOGV("decode(%d, %lld, %lld)", fd, offset, length);
-    sp<IMemory> p;
+    status_t status;
     const sp<IMediaPlayerService>& service = getMediaPlayerService();
     if (service != 0) {
-        p = service->decode(fd, offset, length, pSampleRate, pNumChannels, pFormat);
+        status = service->decode(fd, offset, length, pSampleRate,
+                                 pNumChannels, pFormat, heap, pSize);
     } else {
         ALOGE("Unable to locate media service");
+        status = DEAD_OBJECT;
     }
-    return p;
+    return status;
 
 }
 
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 8833bd7..9553458 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -319,8 +319,8 @@
 
     result.append(" AudioCache\n");
     if (mHeap != 0) {
-        snprintf(buffer, 255, "  heap base(%p), size(%d), flags(%d), device(%s)\n",
-                mHeap->getBase(), mHeap->getSize(), mHeap->getFlags(), mHeap->getDevice());
+        snprintf(buffer, 255, "  heap base(%p), size(%d), flags(%d)\n",
+                mHeap->getBase(), mHeap->getSize(), mHeap->getFlags());
         result.append(buffer);
     }
     snprintf(buffer, 255, "  msec per frame(%f), channel count(%d), format(%d), frame count(%zd)\n",
@@ -744,7 +744,7 @@
 
     sp<ANativeWindow> anw;
     if (bufferProducer != NULL) {
-        anw = new Surface(bufferProducer);
+        anw = new Surface(bufferProducer, true /* controlledByApp */);
         status_t err = native_window_api_connect(anw.get(),
                 NATIVE_WINDOW_API_MEDIA);
 
@@ -1176,13 +1176,13 @@
 }
 #endif
 
-static size_t kDefaultHeapSize = 1024 * 1024; // 1MB
-
-sp<IMemory> MediaPlayerService::decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat)
+status_t MediaPlayerService::decode(const char* url, uint32_t *pSampleRate, int* pNumChannels,
+                                       audio_format_t* pFormat,
+                                       const sp<IMemoryHeap>& heap, size_t *pSize)
 {
     ALOGV("decode(%s)", url);
-    sp<MemoryBase> mem;
     sp<MediaPlayerBase> player;
+    status_t status = BAD_VALUE;
 
     // Protect our precious, precious DRMd ringtones by only allowing
     // decoding of http, but not filesystem paths or content Uris.
@@ -1190,7 +1190,7 @@
     // filedescriptor for them and use that.
     if (url != NULL && strncmp(url, "http://", 7) != 0) {
         ALOGD("Can't decode %s by path, use filedescriptor instead", url);
-        return mem;
+        return BAD_VALUE;
     }
 
     player_type playerType =
@@ -1198,7 +1198,7 @@
     ALOGV("player type = %d", playerType);
 
     // create the right type of player
-    sp<AudioCache> cache = new AudioCache(url);
+    sp<AudioCache> cache = new AudioCache(heap);
     player = MediaPlayerFactory::createPlayer(playerType, cache.get(), cache->notify);
     if (player == NULL) goto Exit;
     if (player->hardwareOutput()) goto Exit;
@@ -1224,22 +1224,27 @@
         goto Exit;
     }
 
-    mem = new MemoryBase(cache->getHeap(), 0, cache->size());
+    *pSize = cache->size();
     *pSampleRate = cache->sampleRate();
     *pNumChannels = cache->channelCount();
     *pFormat = cache->format();
-    ALOGV("return memory @ %p, sampleRate=%u, channelCount = %d, format = %d", mem->pointer(), *pSampleRate, *pNumChannels, *pFormat);
+    ALOGV("return size %d sampleRate=%u, channelCount = %d, format = %d",
+          *pSize, *pSampleRate, *pNumChannels, *pFormat);
+    status = NO_ERROR;
 
 Exit:
     if (player != 0) player->reset();
-    return mem;
+    return status;
 }
 
-sp<IMemory> MediaPlayerService::decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat)
+status_t MediaPlayerService::decode(int fd, int64_t offset, int64_t length,
+                                       uint32_t *pSampleRate, int* pNumChannels,
+                                       audio_format_t* pFormat,
+                                       const sp<IMemoryHeap>& heap, size_t *pSize)
 {
     ALOGV("decode(%d, %lld, %lld)", fd, offset, length);
-    sp<MemoryBase> mem;
     sp<MediaPlayerBase> player;
+    status_t status = BAD_VALUE;
 
     player_type playerType = MediaPlayerFactory::getPlayerType(NULL /* client */,
                                                                fd,
@@ -1248,7 +1253,7 @@
     ALOGV("player type = %d", playerType);
 
     // create the right type of player
-    sp<AudioCache> cache = new AudioCache("decode_fd");
+    sp<AudioCache> cache = new AudioCache(heap);
     player = MediaPlayerFactory::createPlayer(playerType, cache.get(), cache->notify);
     if (player == NULL) goto Exit;
     if (player->hardwareOutput()) goto Exit;
@@ -1274,16 +1279,18 @@
         goto Exit;
     }
 
-    mem = new MemoryBase(cache->getHeap(), 0, cache->size());
+    *pSize = cache->size();
     *pSampleRate = cache->sampleRate();
     *pNumChannels = cache->channelCount();
     *pFormat = cache->format();
-    ALOGV("return memory @ %p, sampleRate=%u, channelCount = %d, format = %d", mem->pointer(), *pSampleRate, *pNumChannels, *pFormat);
+    ALOGV("return size %d, sampleRate=%u, channelCount = %d, format = %d",
+          *pSize, *pSampleRate, *pNumChannels, *pFormat);
+    status = NO_ERROR;
 
 Exit:
     if (player != 0) player->reset();
     ::close(fd);
-    return mem;
+    return status;
 }
 
 
@@ -1803,12 +1810,10 @@
 
 #undef LOG_TAG
 #define LOG_TAG "AudioCache"
-MediaPlayerService::AudioCache::AudioCache(const char* name) :
-    mChannelCount(0), mFrameCount(1024), mSampleRate(0), mSize(0),
-    mError(NO_ERROR), mCommandComplete(false)
+MediaPlayerService::AudioCache::AudioCache(const sp<IMemoryHeap>& heap) :
+    mHeap(heap), mChannelCount(0), mFrameCount(1024), mSampleRate(0), mSize(0),
+    mError(NO_ERROR),  mCommandComplete(false)
 {
-    // create ashmem heap
-    mHeap = new MemoryHeapBase(kDefaultHeapSize, 0, name);
 }
 
 uint32_t MediaPlayerService::AudioCache::latency () const
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 7d27944..21f4117 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -177,7 +177,7 @@
     class AudioCache : public MediaPlayerBase::AudioSink
     {
     public:
-                                AudioCache(const char* name);
+                                AudioCache(const sp<IMemoryHeap>& heap);
         virtual                 ~AudioCache() {}
 
         virtual bool            ready() const { return (mChannelCount > 0) && (mHeap->getHeapID() > 0); }
@@ -224,7 +224,7 @@
 
         Mutex               mLock;
         Condition           mSignal;
-        sp<MemoryHeapBase>  mHeap;
+        sp<IMemoryHeap>     mHeap;
         float               mMsecsPerFrame;
         uint16_t            mChannelCount;
         audio_format_t      mFormat;
@@ -247,8 +247,13 @@
 
     virtual sp<IMediaPlayer>    create(const sp<IMediaPlayerClient>& client, int audioSessionId);
 
-    virtual sp<IMemory>         decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat);
-    virtual sp<IMemory>         decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat);
+    virtual status_t            decode(const char* url, uint32_t *pSampleRate, int* pNumChannels,
+                                       audio_format_t* pFormat,
+                                       const sp<IMemoryHeap>& heap, size_t *pSize);
+    virtual status_t            decode(int fd, int64_t offset, int64_t length,
+                                       uint32_t *pSampleRate, int* pNumChannels,
+                                       audio_format_t* pFormat,
+                                       const sp<IMemoryHeap>& heap, size_t *pSize);
     virtual sp<IOMX>            getOMX();
     virtual sp<ICrypto>         makeCrypto();
     virtual sp<IDrm>            makeDrm();
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index e1735fa..750287f 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -1396,6 +1396,11 @@
             uint32_t flags;
             CHECK(msg->findInt32("flags", (int32_t *)&flags));
 
+            sp<NuPlayerDriver> driver = mDriver.promote();
+            if (driver != NULL) {
+                driver->notifyFlagsChanged(flags);
+            }
+
             if ((mSourceFlags & Source::FLAG_DYNAMIC_DURATION)
                     && (!(flags & Source::FLAG_DYNAMIC_DURATION))) {
                 cancelPollDuration();
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
index 3385a19..18cf6d1 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
@@ -358,11 +358,10 @@
             uint32_t flags = 0;
 
             if (mHandler->isSeekable()) {
-                flags = FLAG_CAN_PAUSE | FLAG_CAN_SEEK;
-
-                // Seeking 10secs forward or backward is a very expensive
-                // operation for rtsp, so let's not enable that.
-                // The user can always use the seek bar.
+                flags = FLAG_CAN_PAUSE
+                        | FLAG_CAN_SEEK
+                        | FLAG_CAN_SEEK_BACKWARD
+                        | FLAG_CAN_SEEK_FORWARD;
             }
 
             notifyFlagsChanged(flags);
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 2e55c4f..1adab38 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -1106,9 +1106,49 @@
     if (!encoder && video && haveNativeWindow) {
         err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, OMX_TRUE);
         if (err != OK) {
-            // allow failure
             ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d",
                   mComponentName.c_str(), err);
+
+            // if adaptive playback has been requested, try JB fallback
+            // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS
+            // LARGE MEMORY REQUIREMENT
+
+            // we will not do adaptive playback on software accessed
+            // surfaces as they never had to respond to changes in the
+            // crop window, and we don't trust that they will be able to.
+            int usageBits = 0;
+            bool canDoAdaptivePlayback;
+
+            sp<NativeWindowWrapper> windowWrapper(
+                    static_cast<NativeWindowWrapper *>(obj.get()));
+            sp<ANativeWindow> nativeWindow = windowWrapper->getNativeWindow();
+
+            if (nativeWindow->query(
+                    nativeWindow.get(),
+                    NATIVE_WINDOW_CONSUMER_USAGE_BITS,
+                    &usageBits) != OK) {
+                canDoAdaptivePlayback = false;
+            } else {
+                canDoAdaptivePlayback =
+                    (usageBits &
+                            (GRALLOC_USAGE_SW_READ_MASK |
+                             GRALLOC_USAGE_SW_WRITE_MASK)) == 0;
+            }
+
+            int32_t maxWidth = 0, maxHeight = 0;
+            if (canDoAdaptivePlayback &&
+                msg->findInt32("max-width", &maxWidth) &&
+                msg->findInt32("max-height", &maxHeight)) {
+                ALOGV("[%s] prepareForAdaptivePlayback(%ldx%ld)",
+                      mComponentName.c_str(), maxWidth, maxHeight);
+
+                err = mOMX->prepareForAdaptivePlayback(
+                        mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight);
+                ALOGW_IF(err != OK,
+                        "[%s] prepareForAdaptivePlayback failed w/ err %d",
+                        mComponentName.c_str(), err);
+            }
+            // allow failure
             err = OK;
         } else {
             ALOGV("[%s] storeMetaDataInBuffers succeeded", mComponentName.c_str());
@@ -2366,6 +2406,10 @@
 
     while (countBuffersOwnedByNativeWindow() > (size_t)minUndequeuedBufs
             && dequeueBufferFromNativeWindow() != NULL) {
+        // these buffers will be submitted as regular buffers; account for this
+        if (mStoreMetaDataInOutputBuffers && mMetaDataBuffersToSubmit > 0) {
+            --mMetaDataBuffersToSubmit;
+        }
     }
 }
 
@@ -3022,16 +3066,17 @@
     sp<ABuffer> buffer;
     int32_t err = OK;
     bool eos = false;
+    PortMode mode = getPortMode(kPortIndexInput);
 
     if (!msg->findBuffer("buffer", &buffer)) {
+        /* these are unfilled buffers returned by client */
         CHECK(msg->findInt32("err", &err));
 
         ALOGV("[%s] saw error %d instead of an input buffer",
              mCodec->mComponentName.c_str(), err);
 
         buffer.clear();
-
-        eos = true;
+        mode = KEEP_BUFFERS;
     }
 
     int32_t tmp;
@@ -3045,8 +3090,6 @@
 
     info->mStatus = BufferInfo::OWNED_BY_US;
 
-    PortMode mode = getPortMode(kPortIndexInput);
-
     switch (mode) {
         case KEEP_BUFFERS:
         {
@@ -4000,10 +4043,9 @@
 }
 
 void ACodec::ExecutingState::submitOutputBuffers() {
+    submitRegularOutputBuffers();
     if (mCodec->mStoreMetaDataInOutputBuffers) {
         submitOutputMetaBuffers();
-    } else {
-        submitRegularOutputBuffers();
     }
 }
 
@@ -4131,7 +4173,7 @@
 
 status_t ACodec::setParameters(const sp<AMessage> &params) {
     int32_t videoBitrate;
-    if (params->findInt32("videoBitrate", &videoBitrate)) {
+    if (params->findInt32("video-bitrate", &videoBitrate)) {
         OMX_VIDEO_CONFIG_BITRATETYPE configParams;
         InitOMXParams(&configParams);
         configParams.nPortIndex = kPortIndexOutput;
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index 1f68b51..6a2a696 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -89,6 +89,7 @@
         libutils \
         libvorbisidec \
         libz \
+        libpowermanager
 
 LOCAL_STATIC_LIBRARIES := \
         libstagefright_color_conversion \
diff --git a/media/libstagefright/AudioPlayer.cpp b/media/libstagefright/AudioPlayer.cpp
index e38e261..a8a8786 100644
--- a/media/libstagefright/AudioPlayer.cpp
+++ b/media/libstagefright/AudioPlayer.cpp
@@ -363,6 +363,7 @@
     mPositionTimeMediaUs = -1;
     mPositionTimeRealUs = -1;
     mSeeking = false;
+    mSeekTimeUs = 0;
     mReachedEOS = false;
     mFinalStatus = OK;
     mStarted = false;
@@ -602,15 +603,24 @@
 
             // need to adjust the mStartPosUs for offload decoding since parser
             // might not be able to get the exact seek time requested.
-            if (refreshSeekTime && useOffload()) {
-                if (postSeekComplete) {
-                    ALOGV("fillBuffer is going to post SEEK_COMPLETE");
-                    mObserver->postAudioSeekComplete();
-                    postSeekComplete = false;
-                }
+            if (refreshSeekTime) {
+                if (useOffload()) {
+                    if (postSeekComplete) {
+                        ALOGV("fillBuffer is going to post SEEK_COMPLETE");
+                        mObserver->postAudioSeekComplete();
+                        postSeekComplete = false;
+                    }
 
-                mStartPosUs = mPositionTimeMediaUs;
-                ALOGV("adjust seek time to: %.2f", mStartPosUs/ 1E6);
+                    mStartPosUs = mPositionTimeMediaUs;
+                    ALOGV("adjust seek time to: %.2f", mStartPosUs/ 1E6);
+                }
+                // clear seek time with mLock locked and once we have valid mPositionTimeMediaUs
+                // and mPositionTimeRealUs
+                // before clearing mSeekTimeUs check if a new seek request has been received while
+                // we were reading from the source with mLock released.
+                if (!mSeeking) {
+                    mSeekTimeUs = 0;
+                }
             }
 
             if (!useOffload()) {
@@ -741,12 +751,10 @@
         return mPositionTimeRealUs;
     }
 
-    if (mPositionTimeMediaUs < 0 || mPositionTimeRealUs < 0) {
-        if (mSeeking) {
-            return mSeekTimeUs;
-        }
 
-        return 0;
+    if (mPositionTimeMediaUs < 0 || mPositionTimeRealUs < 0) {
+        // mSeekTimeUs is either seek time while seeking or 0 if playback did not start.
+        return mSeekTimeUs;
     }
 
     int64_t realTimeOffset = getRealTimeUsLocked() - mPositionTimeRealUs;
diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp
index 5fbee7e..be6719a 100644
--- a/media/libstagefright/AwesomePlayer.cpp
+++ b/media/libstagefright/AwesomePlayer.cpp
@@ -259,6 +259,7 @@
 
         mQueue.cancelEvent(mBufferingEvent->eventID());
         mBufferingEventPending = false;
+        mAudioTearDown = false;
     }
 }
 
@@ -797,7 +798,9 @@
         }
     }
 
-    postBufferingEvent_l();
+    if (mFlags & (PLAYING | PREPARING)) {
+        postBufferingEvent_l();
+    }
 }
 
 void AwesomePlayer::sendCacheStats() {
@@ -927,6 +930,9 @@
 
             if ((err != OK) && mOffloadAudio) {
                 ALOGI("play_l() cannot create offload output, fallback to sw decode");
+                int64_t curTimeUs;
+                getPosition(&curTimeUs);
+
                 delete mAudioPlayer;
                 mAudioPlayer = NULL;
                 // if the player was started it will take care of stopping the source when destroyed
@@ -942,6 +948,10 @@
                     if (err != OK) {
                         mAudioSource.clear();
                     } else {
+                        mSeekNotificationSent = true;
+                        if (mExtractorFlags & MediaExtractor::CAN_SEEK) {
+                            seekTo_l(curTimeUs);
+                        }
                         createAudioPlayer_l();
                         err = startAudioPlayer_l(false);
                     }
@@ -993,6 +1003,10 @@
     }
     addBatteryData(params);
 
+    if (isStreamingHTTP()) {
+        postBufferingEvent_l();
+    }
+
     return OK;
 }
 
@@ -1405,8 +1419,10 @@
     mSeekTimeUs = timeUs;
     modifyFlags((AT_EOS | AUDIO_AT_EOS | VIDEO_AT_EOS), CLEAR);
 
-    notifyListener_l(MEDIA_PAUSED);
-    mMediaRenderingStartGeneration = ++mStartGeneration;
+    if (mFlags & PLAYING) {
+        notifyListener_l(MEDIA_PAUSED);
+        mMediaRenderingStartGeneration = ++mStartGeneration;
+    }
 
     seekAudioIfNecessary_l();
 
@@ -1651,6 +1667,16 @@
         return;
     }
 
+    // If we paused, then seeked, then resumed, it is possible that we have
+    // signaled SEEK_COMPLETE at a copmletely different media time than where
+    // we are now resuming.  Signal new position to media time provider.
+    // Cannot signal another SEEK_COMPLETE, as existing clients may not expect
+    // multiple SEEK_COMPLETE responses to a single seek() request.
+    if (mSeekNotificationSent && abs(mSeekTimeUs - videoTimeUs) > 10000) {
+        // notify if we are resuming more than 10ms away from desired seek time
+        notifyListener_l(MEDIA_SKIPPED);
+    }
+
     if (mAudioPlayer != NULL) {
         ALOGV("seeking audio to %lld us (%.2f secs).", videoTimeUs, videoTimeUs / 1E6);
 
@@ -1922,7 +1948,9 @@
             notifyListener_l(MEDIA_INFO, MEDIA_INFO_RENDERING_START);
         }
 
-        notifyIfMediaStarted_l();
+        if (mFlags & PLAYING) {
+            notifyIfMediaStarted_l();
+        }
     }
 
     mVideoBuffer->release();
@@ -2301,6 +2329,7 @@
     modifyFlags((PREPARING|PREPARE_CANCELLED|PREPARING_CONNECTED), CLEAR);
     mAsyncPrepareEvent = NULL;
     mPreparedCondition.broadcast();
+    mAudioTearDown = false;
 }
 
 // static
@@ -2374,6 +2403,20 @@
     modifyFlags(PREPARED, SET);
     mAsyncPrepareEvent = NULL;
     mPreparedCondition.broadcast();
+
+    if (mAudioTearDown) {
+        if (mPrepareResult == OK) {
+            if (mExtractorFlags & MediaExtractor::CAN_SEEK) {
+                seekTo_l(mAudioTearDownPosition);
+            }
+
+            if (mAudioTearDownWasPlaying) {
+                modifyFlags(CACHE_UNDERRUN, CLEAR);
+                play_l();
+            }
+        }
+        mAudioTearDown = false;
+    }
 }
 
 uint32_t AwesomePlayer::flags() const {
@@ -2791,7 +2834,7 @@
     ALOGV("onAudioTearDownEvent");
 
     // stream info is cleared by reset_l() so copy what we need
-    const bool wasPlaying = (mFlags & PLAYING);
+    mAudioTearDownWasPlaying = (mFlags & PLAYING);
     KeyedVector<String8, String8> uriHeaders(mUriHeaders);
     sp<DataSource> fileSource(mFileSource);
 
@@ -2800,8 +2843,7 @@
     mStatsLock.unlock();
 
     // get current position so we can start recreated stream from here
-    int64_t position = 0;
-    getPosition(&position);
+    getPosition(&mAudioTearDownPosition);
 
     // Reset and recreate
     reset_l();
@@ -2825,21 +2867,8 @@
     mAudioTearDown = true;
     mIsAsyncPrepare = true;
 
-    // Call parepare for the host decoding
+    // Call prepare for the host decoding
     beginPrepareAsync_l();
-
-    if (mPrepareResult == OK) {
-        if (mExtractorFlags & MediaExtractor::CAN_SEEK) {
-            seekTo_l(position);
-        }
-
-        if (wasPlaying) {
-            modifyFlags(CACHE_UNDERRUN, CLEAR);
-            play_l();
-        }
-    }
-
-    mAudioTearDown = false;
 }
 
 }  // namespace android
diff --git a/media/libstagefright/OMXClient.cpp b/media/libstagefright/OMXClient.cpp
index 9820ef5..9f9352d 100644
--- a/media/libstagefright/OMXClient.cpp
+++ b/media/libstagefright/OMXClient.cpp
@@ -69,6 +69,10 @@
     virtual status_t storeMetaDataInBuffers(
             node_id node, OMX_U32 port_index, OMX_BOOL enable);
 
+    virtual status_t prepareForAdaptivePlayback(
+            node_id node, OMX_U32 port_index, OMX_BOOL enable,
+            OMX_U32 maxFrameWidth, OMX_U32 maxFrameHeight);
+
     virtual status_t enableGraphicBuffers(
             node_id node, OMX_U32 port_index, OMX_BOOL enable);
 
@@ -268,6 +272,13 @@
     return getOMX(node)->storeMetaDataInBuffers(node, port_index, enable);
 }
 
+status_t MuxOMX::prepareForAdaptivePlayback(
+        node_id node, OMX_U32 port_index, OMX_BOOL enable,
+        OMX_U32 maxFrameWidth, OMX_U32 maxFrameHeight) {
+    return getOMX(node)->prepareForAdaptivePlayback(
+            node, port_index, enable, maxFrameWidth, maxFrameHeight);
+}
+
 status_t MuxOMX::enableGraphicBuffers(
         node_id node, OMX_U32 port_index, OMX_BOOL enable) {
     return getOMX(node)->enableGraphicBuffers(node, port_index, enable);
diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp
index 7b37365..7f56af8 100644
--- a/media/libstagefright/OMXCodec.cpp
+++ b/media/libstagefright/OMXCodec.cpp
@@ -4585,12 +4585,6 @@
     caps->mFlags = 0;
     caps->mComponentName = componentName;
 
-    if (!isEncoder && !strncmp(mime, "video/", 6) &&
-            omx->storeMetaDataInBuffers(
-                    node, 1 /* port index */, OMX_TRUE) == OK) {
-        caps->mFlags |= CodecCapabilities::kFlagSupportsAdaptivePlayback;
-    }
-
     OMX_VIDEO_PARAM_PROFILELEVELTYPE param;
     InitOMXParams(&param);
 
@@ -4626,6 +4620,16 @@
         caps->mColorFormats.push(portFormat.eColorFormat);
     }
 
+    if (!isEncoder && !strncmp(mime, "video/", 6)) {
+        if (omx->storeMetaDataInBuffers(
+                    node, 1 /* port index */, OMX_TRUE) == OK ||
+            omx->prepareForAdaptivePlayback(
+                    node, 1 /* port index */, OMX_TRUE,
+                    1280 /* width */, 720 /* height */) == OK) {
+            caps->mFlags |= CodecCapabilities::kFlagSupportsAdaptivePlayback;
+        }
+    }
+
     CHECK_EQ(omx->freeNode(node), (status_t)OK);
 
     return OK;
diff --git a/media/libstagefright/TimedEventQueue.cpp b/media/libstagefright/TimedEventQueue.cpp
index 7e9c4bf..6a16bb4 100644
--- a/media/libstagefright/TimedEventQueue.cpp
+++ b/media/libstagefright/TimedEventQueue.cpp
@@ -31,17 +31,26 @@
 
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/ALooper.h>
+#include <binder/IServiceManager.h>
+#include <powermanager/PowerManager.h>
+#include <binder/IPCThreadState.h>
+#include <utils/CallStack.h>
 
 namespace android {
 
 TimedEventQueue::TimedEventQueue()
     : mNextEventID(1),
       mRunning(false),
-      mStopped(false) {
+      mStopped(false),
+      mDeathRecipient(new PMDeathRecipient(this)) {
 }
 
 TimedEventQueue::~TimedEventQueue() {
     stop();
+    if (mPowerManager != 0) {
+        sp<IBinder> binder = mPowerManager->asBinder();
+        binder->unlinkToDeath(mDeathRecipient);
+    }
 }
 
 void TimedEventQueue::start() {
@@ -76,6 +85,11 @@
     void *dummy;
     pthread_join(mThread, &dummy);
 
+    // some events may be left in the queue if we did not flush and the wake lock
+    // must be released.
+    if (!mQueue.empty()) {
+        releaseWakeLock_l();
+    }
     mQueue.clear();
 
     mRunning = false;
@@ -117,6 +131,9 @@
         mQueueHeadChangedCondition.signal();
     }
 
+    if (mQueue.empty()) {
+        acquireWakeLock_l();
+    }
     mQueue.insert(it, item);
 
     mQueueNotEmptyCondition.signal();
@@ -172,7 +189,9 @@
 
         (*it).event->setEventID(0);
         it = mQueue.erase(it);
-
+        if (mQueue.empty()) {
+            releaseWakeLock_l();
+        }
         if (stopAfterFirstMatch) {
             return;
         }
@@ -280,7 +299,9 @@
             event->setEventID(0);
 
             mQueue.erase(it);
-
+            if (mQueue.empty()) {
+                releaseWakeLock_l();
+            }
             return event;
         }
     }
@@ -290,5 +311,60 @@
     return NULL;
 }
 
+void TimedEventQueue::acquireWakeLock_l()
+{
+    if (mWakeLockToken != 0) {
+        return;
+    }
+    if (mPowerManager == 0) {
+        // use checkService() to avoid blocking if power service is not up yet
+        sp<IBinder> binder =
+            defaultServiceManager()->checkService(String16("power"));
+        if (binder == 0) {
+            ALOGW("cannot connect to the power manager service");
+        } else {
+            mPowerManager = interface_cast<IPowerManager>(binder);
+            binder->linkToDeath(mDeathRecipient);
+        }
+    }
+    if (mPowerManager != 0) {
+        sp<IBinder> binder = new BBinder();
+        int64_t token = IPCThreadState::self()->clearCallingIdentity();
+        status_t status = mPowerManager->acquireWakeLock(POWERMANAGER_PARTIAL_WAKE_LOCK,
+                                                         binder,
+                                                         String16("TimedEventQueue"),
+                                                         String16("media"));
+        IPCThreadState::self()->restoreCallingIdentity(token);
+        if (status == NO_ERROR) {
+            mWakeLockToken = binder;
+        }
+    }
+}
+
+void TimedEventQueue::releaseWakeLock_l()
+{
+    if (mWakeLockToken == 0) {
+        return;
+    }
+    if (mPowerManager != 0) {
+        int64_t token = IPCThreadState::self()->clearCallingIdentity();
+        mPowerManager->releaseWakeLock(mWakeLockToken, 0);
+        IPCThreadState::self()->restoreCallingIdentity(token);
+    }
+    mWakeLockToken.clear();
+}
+
+void TimedEventQueue::clearPowerManager()
+{
+    Mutex::Autolock _l(mLock);
+    releaseWakeLock_l();
+    mPowerManager.clear();
+}
+
+void TimedEventQueue::PMDeathRecipient::binderDied(const wp<IBinder>& who)
+{
+    mQueue->clearPowerManager();
+}
+
 }  // namespace android
 
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
index 5f2b5c8..8375cac 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
+++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
@@ -141,6 +141,7 @@
       mWidth(176),
       mHeight(144),
       mBitrate(192000),  // in bps
+      mBitrateUpdated(false),
       mBitrateControlMode(VPX_VBR),  // variable bitrate
       mFrameDurationUs(33333),  // Defaults to 30 fps
       mDCTPartitions(0),
@@ -536,6 +537,22 @@
             return OMX_ErrorNone;
         }
 
+        case OMX_IndexConfigVideoBitrate:
+        {
+            OMX_VIDEO_CONFIG_BITRATETYPE *params =
+                (OMX_VIDEO_CONFIG_BITRATETYPE *)_params;
+
+            if (params->nPortIndex != kOutputPortIndex) {
+                return OMX_ErrorBadPortIndex;
+            }
+
+            if (mBitrate != params->nEncodeBitrate) {
+                mBitrate = params->nEncodeBitrate;
+                mBitrateUpdated = true;
+            }
+            return OMX_ErrorNone;
+        }
+
         default:
             return SimpleSoftOMXComponent::setConfig(index, _params);
     }
@@ -779,6 +796,21 @@
             mKeyFrameRequested = false;
         }
 
+        if (mBitrateUpdated) {
+            mCodecConfiguration->rc_target_bitrate = mBitrate/1000;
+            vpx_codec_err_t res = vpx_codec_enc_config_set(mCodecContext,
+                                                           mCodecConfiguration);
+            if (res != VPX_CODEC_OK) {
+                ALOGE("vp8 encoder failed to update bitrate: %s",
+                      vpx_codec_err_to_string(res));
+                notify(OMX_EventError,
+                       OMX_ErrorUndefined,
+                       0, // Extra notification data
+                       NULL); // Notification data pointer
+            }
+            mBitrateUpdated = false;
+        }
+
         codec_return = vpx_codec_encode(
                 mCodecContext,
                 &raw_frame,
@@ -803,6 +835,8 @@
             if (encoded_packet->kind == VPX_CODEC_CX_FRAME_PKT) {
                 outputBufferHeader->nTimeStamp = encoded_packet->data.frame.pts;
                 outputBufferHeader->nFlags = 0;
+                if (encoded_packet->data.frame.flags & VPX_FRAME_IS_KEY)
+                  outputBufferHeader->nFlags |= OMX_BUFFERFLAG_SYNCFRAME;
                 outputBufferHeader->nOffset = 0;
                 outputBufferHeader->nFilledLen = encoded_packet->data.frame.sz;
                 memcpy(outputBufferHeader->pBuffer,
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
index 4ee5e51..076830f 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
+++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
@@ -128,7 +128,10 @@
     int32_t mHeight;
 
     // Target bitrate set for the encoder, in bits per second.
-    int32_t mBitrate;
+    uint32_t mBitrate;
+
+    // If a request for a change it bitrate has been received.
+    bool mBitrateUpdated;
 
     // Bitrate control mode, either constant or variable
     vpx_rc_mode mBitrateControlMode;
diff --git a/media/libstagefright/include/AwesomePlayer.h b/media/libstagefright/include/AwesomePlayer.h
index b001cf4..271df8e 100644
--- a/media/libstagefright/include/AwesomePlayer.h
+++ b/media/libstagefright/include/AwesomePlayer.h
@@ -342,6 +342,8 @@
 
     bool    mOffloadAudio;
     bool    mAudioTearDown;
+    bool    mAudioTearDownWasPlaying;
+    int64_t mAudioTearDownPosition;
 
     status_t setVideoScalingMode(int32_t mode);
     status_t setVideoScalingMode_l(int32_t mode);
diff --git a/media/libstagefright/include/OMX.h b/media/libstagefright/include/OMX.h
index 7e53af3..31a5077 100644
--- a/media/libstagefright/include/OMX.h
+++ b/media/libstagefright/include/OMX.h
@@ -71,6 +71,10 @@
     virtual status_t storeMetaDataInBuffers(
             node_id node, OMX_U32 port_index, OMX_BOOL enable);
 
+    virtual status_t prepareForAdaptivePlayback(
+            node_id node, OMX_U32 portIndex, OMX_BOOL enable,
+            OMX_U32 max_frame_width, OMX_U32 max_frame_height);
+
     virtual status_t useBuffer(
             node_id node, OMX_U32 port_index, const sp<IMemory> &params,
             buffer_id *buffer);
diff --git a/media/libstagefright/include/OMXNodeInstance.h b/media/libstagefright/include/OMXNodeInstance.h
index ae498b4..339179e 100644
--- a/media/libstagefright/include/OMXNodeInstance.h
+++ b/media/libstagefright/include/OMXNodeInstance.h
@@ -58,6 +58,10 @@
 
     status_t storeMetaDataInBuffers(OMX_U32 portIndex, OMX_BOOL enable);
 
+    status_t prepareForAdaptivePlayback(
+            OMX_U32 portIndex, OMX_BOOL enable,
+            OMX_U32 maxFrameWidth, OMX_U32 maxFrameHeight);
+
     status_t useBuffer(
             OMX_U32 portIndex, const sp<IMemory> &params,
             OMX::buffer_id *buffer);
diff --git a/media/libstagefright/include/TimedEventQueue.h b/media/libstagefright/include/TimedEventQueue.h
index 11f844c..4e49c83 100644
--- a/media/libstagefright/include/TimedEventQueue.h
+++ b/media/libstagefright/include/TimedEventQueue.h
@@ -23,6 +23,7 @@
 #include <utils/List.h>
 #include <utils/RefBase.h>
 #include <utils/threads.h>
+#include <powermanager/IPowerManager.h>
 
 namespace android {
 
@@ -57,6 +58,21 @@
         Event &operator=(const Event &);
     };
 
+    class PMDeathRecipient : public IBinder::DeathRecipient {
+    public:
+                    PMDeathRecipient(TimedEventQueue *queue) : mQueue(queue) {}
+        virtual     ~PMDeathRecipient() {}
+
+        // IBinder::DeathRecipient
+        virtual     void        binderDied(const wp<IBinder>& who);
+
+    private:
+                    PMDeathRecipient(const PMDeathRecipient&);
+                    PMDeathRecipient& operator = (const PMDeathRecipient&);
+
+                    TimedEventQueue *mQueue;
+    };
+
     TimedEventQueue();
     ~TimedEventQueue();
 
@@ -96,6 +112,8 @@
 
     static int64_t getRealTimeUs();
 
+    void clearPowerManager();
+
 private:
     struct QueueItem {
         sp<Event> event;
@@ -118,11 +136,18 @@
     bool mRunning;
     bool mStopped;
 
+    sp<IPowerManager>       mPowerManager;
+    sp<IBinder>             mWakeLockToken;
+    const sp<PMDeathRecipient> mDeathRecipient;
+
     static void *ThreadWrapper(void *me);
     void threadEntry();
 
     sp<Event> removeEventFromQueue_l(event_id id);
 
+    void acquireWakeLock_l();
+    void releaseWakeLock_l();
+
     TimedEventQueue(const TimedEventQueue &);
     TimedEventQueue &operator=(const TimedEventQueue &);
 };
diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp
index aaa9f89..84a0e10 100644
--- a/media/libstagefright/omx/OMX.cpp
+++ b/media/libstagefright/omx/OMX.cpp
@@ -331,6 +331,13 @@
     return findInstance(node)->storeMetaDataInBuffers(port_index, enable);
 }
 
+status_t OMX::prepareForAdaptivePlayback(
+        node_id node, OMX_U32 portIndex, OMX_BOOL enable,
+        OMX_U32 maxFrameWidth, OMX_U32 maxFrameHeight) {
+    return findInstance(node)->prepareForAdaptivePlayback(
+            portIndex, enable, maxFrameWidth, maxFrameHeight);
+}
+
 status_t OMX::useBuffer(
         node_id node, OMX_U32 port_index, const sp<IMemory> &params,
         buffer_id *buffer) {
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index ef683a0..46e5d71 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -417,6 +417,40 @@
     return err;
 }
 
+status_t OMXNodeInstance::prepareForAdaptivePlayback(
+        OMX_U32 portIndex, OMX_BOOL enable, OMX_U32 maxFrameWidth,
+        OMX_U32 maxFrameHeight) {
+    Mutex::Autolock autolock(mLock);
+
+    OMX_INDEXTYPE index;
+    OMX_STRING name = const_cast<OMX_STRING>(
+            "OMX.google.android.index.prepareForAdaptivePlayback");
+
+    OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index);
+    if (err != OMX_ErrorNone) {
+        ALOGW_IF(enable, "OMX_GetExtensionIndex %s failed", name);
+        return StatusFromOMXError(err);
+    }
+
+    PrepareForAdaptivePlaybackParams params;
+    params.nSize = sizeof(params);
+    params.nVersion.s.nVersionMajor = 1;
+    params.nVersion.s.nVersionMinor = 0;
+    params.nVersion.s.nRevision = 0;
+    params.nVersion.s.nStep = 0;
+
+    params.nPortIndex = portIndex;
+    params.bEnable = enable;
+    params.nMaxFrameWidth = maxFrameWidth;
+    params.nMaxFrameHeight = maxFrameHeight;
+    if ((err = OMX_SetParameter(mHandle, index, &params)) != OMX_ErrorNone) {
+        ALOGW("OMX_SetParameter failed for PrepareForAdaptivePlayback "
+              "with error %d (0x%08x)", err, err);
+        return UNKNOWN_ERROR;
+    }
+    return err;
+}
+
 status_t OMXNodeInstance::useBuffer(
         OMX_U32 portIndex, const sp<IMemory> &params,
         OMX::buffer_id *buffer) {
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h
index 946f602..f4b5846 100644
--- a/media/libstagefright/rtsp/MyHandler.h
+++ b/media/libstagefright/rtsp/MyHandler.h
@@ -1681,6 +1681,26 @@
         return true;
     }
 
+    void handleFirstAccessUnit() {
+        if (mFirstAccessUnit) {
+            sp<AMessage> msg = mNotify->dup();
+            msg->setInt32("what", kWhatConnected);
+            msg->post();
+
+            if (mSeekable) {
+                for (size_t i = 0; i < mTracks.size(); ++i) {
+                    TrackInfo *info = &mTracks.editItemAt(i);
+
+                    postNormalPlayTimeMapping(
+                            i,
+                            info->mNormalPlayTimeRTP, info->mNormalPlayTimeUs);
+                }
+            }
+
+            mFirstAccessUnit = false;
+        }
+    }
+
     void onTimeUpdate(int32_t trackIndex, uint32_t rtpTime, uint64_t ntpTime) {
         ALOGV("onTimeUpdate track %d, rtpTime = 0x%08x, ntpTime = 0x%016llx",
              trackIndex, rtpTime, ntpTime);
@@ -1712,6 +1732,8 @@
             }
         }
         if (mAllTracksHaveTime && dataReceivedOnAllChannels()) {
+            handleFirstAccessUnit();
+
             // Time is now established, lets start timestamping immediately
             for (size_t i = 0; i < mTracks.size(); ++i) {
                 TrackInfo *trackInfo = &mTracks.editItemAt(i);
@@ -1745,23 +1767,7 @@
             return;
         }
 
-        if (mFirstAccessUnit) {
-            sp<AMessage> msg = mNotify->dup();
-            msg->setInt32("what", kWhatConnected);
-            msg->post();
-
-            if (mSeekable) {
-                for (size_t i = 0; i < mTracks.size(); ++i) {
-                    TrackInfo *info = &mTracks.editItemAt(i);
-
-                    postNormalPlayTimeMapping(
-                            i,
-                            info->mNormalPlayTimeRTP, info->mNormalPlayTimeUs);
-                }
-            }
-
-            mFirstAccessUnit = false;
-        }
+        handleFirstAccessUnit();
 
         TrackInfo *track = &mTracks.editItemAt(trackIndex);
 
diff --git a/media/libstagefright/wifi-display/Android.mk b/media/libstagefright/wifi-display/Android.mk
index c7d107e..f70454a 100644
--- a/media/libstagefright/wifi-display/Android.mk
+++ b/media/libstagefright/wifi-display/Android.mk
@@ -3,16 +3,9 @@
 include $(CLEAR_VARS)
 
 LOCAL_SRC_FILES:= \
-        MediaReceiver.cpp               \
         MediaSender.cpp                 \
         Parameters.cpp                  \
-        rtp/RTPAssembler.cpp            \
-        rtp/RTPReceiver.cpp             \
         rtp/RTPSender.cpp               \
-        sink/DirectRenderer.cpp         \
-        sink/WifiDisplaySink.cpp        \
-        SNTPClient.cpp                  \
-        TimeSyncer.cpp                  \
         source/Converter.cpp            \
         source/MediaPuller.cpp          \
         source/PlaybackSession.cpp      \
@@ -42,87 +35,3 @@
 LOCAL_MODULE_TAGS:= optional
 
 include $(BUILD_SHARED_LIBRARY)
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
-        wfd.cpp                 \
-
-LOCAL_SHARED_LIBRARIES:= \
-        libbinder                       \
-        libgui                          \
-        libmedia                        \
-        libstagefright                  \
-        libstagefright_foundation       \
-        libstagefright_wfd              \
-        libutils                        \
-        liblog                          \
-
-LOCAL_MODULE:= wfd
-
-include $(BUILD_EXECUTABLE)
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
-        udptest.cpp                 \
-
-LOCAL_SHARED_LIBRARIES:= \
-        libbinder                       \
-        libgui                          \
-        libmedia                        \
-        libstagefright                  \
-        libstagefright_foundation       \
-        libstagefright_wfd              \
-        libutils                        \
-        liblog                          \
-
-LOCAL_MODULE:= udptest
-
-include $(BUILD_EXECUTABLE)
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
-        rtptest.cpp                 \
-
-LOCAL_SHARED_LIBRARIES:= \
-        libbinder                       \
-        libgui                          \
-        libmedia                        \
-        libstagefright                  \
-        libstagefright_foundation       \
-        libstagefright_wfd              \
-        libutils                        \
-        liblog                          \
-
-LOCAL_MODULE:= rtptest
-
-include $(BUILD_EXECUTABLE)
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
-        nettest.cpp                     \
-
-LOCAL_SHARED_LIBRARIES:= \
-        libbinder                       \
-        libgui                          \
-        libmedia                        \
-        libstagefright                  \
-        libstagefright_foundation       \
-        libstagefright_wfd              \
-        libutils                        \
-        liblog                          \
-
-LOCAL_MODULE:= nettest
-
-include $(BUILD_EXECUTABLE)
diff --git a/media/libstagefright/wifi-display/MediaReceiver.cpp b/media/libstagefright/wifi-display/MediaReceiver.cpp
deleted file mode 100644
index 5524235..0000000
--- a/media/libstagefright/wifi-display/MediaReceiver.cpp
+++ /dev/null
@@ -1,328 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "MediaReceiver"
-#include <utils/Log.h>
-
-#include "MediaReceiver.h"
-
-#include "AnotherPacketSource.h"
-#include "rtp/RTPReceiver.h"
-
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/ANetworkSession.h>
-#include <media/stagefright/MetaData.h>
-#include <media/stagefright/Utils.h>
-
-namespace android {
-
-MediaReceiver::MediaReceiver(
-        const sp<ANetworkSession> &netSession,
-        const sp<AMessage> &notify)
-    : mNetSession(netSession),
-      mNotify(notify),
-      mMode(MODE_UNDEFINED),
-      mGeneration(0),
-      mInitStatus(OK),
-      mInitDoneCount(0) {
-}
-
-MediaReceiver::~MediaReceiver() {
-}
-
-ssize_t MediaReceiver::addTrack(
-        RTPReceiver::TransportMode rtpMode,
-        RTPReceiver::TransportMode rtcpMode,
-        int32_t *localRTPPort) {
-    if (mMode != MODE_UNDEFINED) {
-        return INVALID_OPERATION;
-    }
-
-    size_t trackIndex = mTrackInfos.size();
-
-    TrackInfo info;
-
-    sp<AMessage> notify = new AMessage(kWhatReceiverNotify, id());
-    notify->setInt32("generation", mGeneration);
-    notify->setSize("trackIndex", trackIndex);
-
-    info.mReceiver = new RTPReceiver(mNetSession, notify);
-    looper()->registerHandler(info.mReceiver);
-
-    info.mReceiver->registerPacketType(
-            33, RTPReceiver::PACKETIZATION_TRANSPORT_STREAM);
-
-    info.mReceiver->registerPacketType(
-            96, RTPReceiver::PACKETIZATION_AAC);
-
-    info.mReceiver->registerPacketType(
-            97, RTPReceiver::PACKETIZATION_H264);
-
-    status_t err = info.mReceiver->initAsync(
-            rtpMode,
-            rtcpMode,
-            localRTPPort);
-
-    if (err != OK) {
-        looper()->unregisterHandler(info.mReceiver->id());
-        info.mReceiver.clear();
-
-        return err;
-    }
-
-    mTrackInfos.push_back(info);
-
-    return trackIndex;
-}
-
-status_t MediaReceiver::connectTrack(
-        size_t trackIndex,
-        const char *remoteHost,
-        int32_t remoteRTPPort,
-        int32_t remoteRTCPPort) {
-    if (trackIndex >= mTrackInfos.size()) {
-        return -ERANGE;
-    }
-
-    TrackInfo *info = &mTrackInfos.editItemAt(trackIndex);
-    return info->mReceiver->connect(remoteHost, remoteRTPPort, remoteRTCPPort);
-}
-
-status_t MediaReceiver::initAsync(Mode mode) {
-    if ((mode == MODE_TRANSPORT_STREAM || mode == MODE_TRANSPORT_STREAM_RAW)
-            && mTrackInfos.size() > 1) {
-        return INVALID_OPERATION;
-    }
-
-    sp<AMessage> msg = new AMessage(kWhatInit, id());
-    msg->setInt32("mode", mode);
-    msg->post();
-
-    return OK;
-}
-
-void MediaReceiver::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatInit:
-        {
-            int32_t mode;
-            CHECK(msg->findInt32("mode", &mode));
-
-            CHECK_EQ(mMode, MODE_UNDEFINED);
-            mMode = (Mode)mode;
-
-            if (mInitStatus != OK || mInitDoneCount == mTrackInfos.size()) {
-                notifyInitDone(mInitStatus);
-            }
-
-            mTSParser = new ATSParser(
-                    ATSParser::ALIGNED_VIDEO_DATA
-                        | ATSParser::TS_TIMESTAMPS_ARE_ABSOLUTE);
-
-            mFormatKnownMask = 0;
-            break;
-        }
-
-        case kWhatReceiverNotify:
-        {
-            int32_t generation;
-            CHECK(msg->findInt32("generation", &generation));
-            if (generation != mGeneration) {
-                break;
-            }
-
-            onReceiverNotify(msg);
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void MediaReceiver::onReceiverNotify(const sp<AMessage> &msg) {
-    int32_t what;
-    CHECK(msg->findInt32("what", &what));
-
-    switch (what) {
-        case RTPReceiver::kWhatInitDone:
-        {
-            ++mInitDoneCount;
-
-            int32_t err;
-            CHECK(msg->findInt32("err", &err));
-
-            if (err != OK) {
-                mInitStatus = err;
-                ++mGeneration;
-            }
-
-            if (mMode != MODE_UNDEFINED) {
-                if (mInitStatus != OK || mInitDoneCount == mTrackInfos.size()) {
-                    notifyInitDone(mInitStatus);
-                }
-            }
-            break;
-        }
-
-        case RTPReceiver::kWhatError:
-        {
-            int32_t err;
-            CHECK(msg->findInt32("err", &err));
-
-            notifyError(err);
-            break;
-        }
-
-        case RTPReceiver::kWhatAccessUnit:
-        {
-            size_t trackIndex;
-            CHECK(msg->findSize("trackIndex", &trackIndex));
-
-            sp<ABuffer> accessUnit;
-            CHECK(msg->findBuffer("accessUnit", &accessUnit));
-
-            int32_t followsDiscontinuity;
-            if (!msg->findInt32(
-                        "followsDiscontinuity", &followsDiscontinuity)) {
-                followsDiscontinuity = 0;
-            }
-
-            if (mMode == MODE_TRANSPORT_STREAM) {
-                if (followsDiscontinuity) {
-                    mTSParser->signalDiscontinuity(
-                            ATSParser::DISCONTINUITY_TIME, NULL /* extra */);
-                }
-
-                for (size_t offset = 0;
-                        offset < accessUnit->size(); offset += 188) {
-                    status_t err = mTSParser->feedTSPacket(
-                             accessUnit->data() + offset, 188);
-
-                    if (err != OK) {
-                        notifyError(err);
-                        break;
-                    }
-                }
-
-                drainPackets(0 /* trackIndex */, ATSParser::VIDEO);
-                drainPackets(1 /* trackIndex */, ATSParser::AUDIO);
-            } else {
-                postAccessUnit(trackIndex, accessUnit, NULL);
-            }
-            break;
-        }
-
-        case RTPReceiver::kWhatPacketLost:
-        {
-            notifyPacketLost();
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void MediaReceiver::drainPackets(
-        size_t trackIndex, ATSParser::SourceType type) {
-    sp<AnotherPacketSource> source =
-        static_cast<AnotherPacketSource *>(
-                mTSParser->getSource(type).get());
-
-    if (source == NULL) {
-        return;
-    }
-
-    sp<AMessage> format;
-    if (!(mFormatKnownMask & (1ul << trackIndex))) {
-        sp<MetaData> meta = source->getFormat();
-        CHECK(meta != NULL);
-
-        CHECK_EQ((status_t)OK, convertMetaDataToMessage(meta, &format));
-
-        mFormatKnownMask |= 1ul << trackIndex;
-    }
-
-    status_t finalResult;
-    while (source->hasBufferAvailable(&finalResult)) {
-        sp<ABuffer> accessUnit;
-        status_t err = source->dequeueAccessUnit(&accessUnit);
-        if (err == OK) {
-            postAccessUnit(trackIndex, accessUnit, format);
-            format.clear();
-        } else if (err != INFO_DISCONTINUITY) {
-            notifyError(err);
-        }
-    }
-
-    if (finalResult != OK) {
-        notifyError(finalResult);
-    }
-}
-
-void MediaReceiver::notifyInitDone(status_t err) {
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatInitDone);
-    notify->setInt32("err", err);
-    notify->post();
-}
-
-void MediaReceiver::notifyError(status_t err) {
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatError);
-    notify->setInt32("err", err);
-    notify->post();
-}
-
-void MediaReceiver::notifyPacketLost() {
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatPacketLost);
-    notify->post();
-}
-
-void MediaReceiver::postAccessUnit(
-        size_t trackIndex,
-        const sp<ABuffer> &accessUnit,
-        const sp<AMessage> &format) {
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatAccessUnit);
-    notify->setSize("trackIndex", trackIndex);
-    notify->setBuffer("accessUnit", accessUnit);
-
-    if (format != NULL) {
-        notify->setMessage("format", format);
-    }
-
-    notify->post();
-}
-
-status_t MediaReceiver::informSender(
-        size_t trackIndex, const sp<AMessage> &params) {
-    if (trackIndex >= mTrackInfos.size()) {
-        return -ERANGE;
-    }
-
-    TrackInfo *info = &mTrackInfos.editItemAt(trackIndex);
-    return info->mReceiver->informSender(params);
-}
-
-}  // namespace android
-
-
diff --git a/media/libstagefright/wifi-display/MediaReceiver.h b/media/libstagefright/wifi-display/MediaReceiver.h
deleted file mode 100644
index afbb407..0000000
--- a/media/libstagefright/wifi-display/MediaReceiver.h
+++ /dev/null
@@ -1,111 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <media/stagefright/foundation/AHandler.h>
-
-#include "ATSParser.h"
-#include "rtp/RTPReceiver.h"
-
-namespace android {
-
-struct ABuffer;
-struct ANetworkSession;
-struct AMessage;
-struct ATSParser;
-
-// This class facilitates receiving of media data for one or more tracks
-// over RTP. Either a 1:1 track to RTP channel mapping is used or a single
-// RTP channel provides the data for a transport stream that is consequently
-// demuxed and its track's data provided to the observer.
-struct MediaReceiver : public AHandler {
-    enum {
-        kWhatInitDone,
-        kWhatError,
-        kWhatAccessUnit,
-        kWhatPacketLost,
-    };
-
-    MediaReceiver(
-            const sp<ANetworkSession> &netSession,
-            const sp<AMessage> &notify);
-
-    ssize_t addTrack(
-            RTPReceiver::TransportMode rtpMode,
-            RTPReceiver::TransportMode rtcpMode,
-            int32_t *localRTPPort);
-
-    status_t connectTrack(
-            size_t trackIndex,
-            const char *remoteHost,
-            int32_t remoteRTPPort,
-            int32_t remoteRTCPPort);
-
-    enum Mode {
-        MODE_UNDEFINED,
-        MODE_TRANSPORT_STREAM,
-        MODE_TRANSPORT_STREAM_RAW,
-        MODE_ELEMENTARY_STREAMS,
-    };
-    status_t initAsync(Mode mode);
-
-    status_t informSender(size_t trackIndex, const sp<AMessage> &params);
-
-protected:
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-    virtual ~MediaReceiver();
-
-private:
-    enum {
-        kWhatInit,
-        kWhatReceiverNotify,
-    };
-
-    struct TrackInfo {
-        sp<RTPReceiver> mReceiver;
-    };
-
-    sp<ANetworkSession> mNetSession;
-    sp<AMessage> mNotify;
-
-    Mode mMode;
-    int32_t mGeneration;
-
-    Vector<TrackInfo> mTrackInfos;
-
-    status_t mInitStatus;
-    size_t mInitDoneCount;
-
-    sp<ATSParser> mTSParser;
-    uint32_t mFormatKnownMask;
-
-    void onReceiverNotify(const sp<AMessage> &msg);
-
-    void drainPackets(size_t trackIndex, ATSParser::SourceType type);
-
-    void notifyInitDone(status_t err);
-    void notifyError(status_t err);
-    void notifyPacketLost();
-
-    void postAccessUnit(
-            size_t trackIndex,
-            const sp<ABuffer> &accessUnit,
-            const sp<AMessage> &format);
-
-    DISALLOW_EVIL_CONSTRUCTORS(MediaReceiver);
-};
-
-}  // namespace android
-
diff --git a/media/libstagefright/wifi-display/SNTPClient.cpp b/media/libstagefright/wifi-display/SNTPClient.cpp
deleted file mode 100644
index 5c0af6a..0000000
--- a/media/libstagefright/wifi-display/SNTPClient.cpp
+++ /dev/null
@@ -1,174 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "SNTPClient.h"
-
-#include <media/stagefright/foundation/ALooper.h>
-#include <media/stagefright/Utils.h>
-
-#include <arpa/inet.h>
-#include <netdb.h>
-#include <netinet/in.h>
-#include <sys/socket.h>
-#include <unistd.h>
-
-namespace android {
-
-SNTPClient::SNTPClient() {
-}
-
-status_t SNTPClient::requestTime(const char *host) {
-    struct hostent *ent;
-    int64_t requestTimeNTP, requestTimeUs;
-    ssize_t n;
-    int64_t responseTimeUs, responseTimeNTP;
-    int64_t originateTimeNTP, receiveTimeNTP, transmitTimeNTP;
-    int64_t roundTripTimeNTP, clockOffsetNTP;
-
-    status_t err = UNKNOWN_ERROR;
-
-    int s = socket(AF_INET, SOCK_DGRAM, 0);
-
-    if (s < 0) {
-        err = -errno;
-
-        goto bail;
-    }
-
-    ent = gethostbyname(host);
-
-    if (ent == NULL) {
-        err = -ENOENT;
-        goto bail2;
-    }
-
-    struct sockaddr_in hostAddr;
-    memset(hostAddr.sin_zero, 0, sizeof(hostAddr.sin_zero));
-    hostAddr.sin_family = AF_INET;
-    hostAddr.sin_port = htons(kNTPPort);
-    hostAddr.sin_addr.s_addr = *(in_addr_t *)ent->h_addr;
-
-    uint8_t packet[kNTPPacketSize];
-    memset(packet, 0, sizeof(packet));
-
-    packet[0] = kNTPModeClient | (kNTPVersion << 3);
-
-    requestTimeNTP = getNowNTP();
-    requestTimeUs = ALooper::GetNowUs();
-    writeTimeStamp(&packet[kNTPTransmitTimeOffset], requestTimeNTP);
-
-    n = sendto(
-            s, packet, sizeof(packet), 0,
-            (const struct sockaddr *)&hostAddr, sizeof(hostAddr));
-
-    if (n < 0) {
-        err = -errno;
-        goto bail2;
-    }
-
-    memset(packet, 0, sizeof(packet));
-
-    do {
-        n = recv(s, packet, sizeof(packet), 0);
-    } while (n < 0 && errno == EINTR);
-
-    if (n < 0) {
-        err = -errno;
-        goto bail2;
-    }
-
-    responseTimeUs = ALooper::GetNowUs();
-
-    responseTimeNTP = requestTimeNTP + makeNTP(responseTimeUs - requestTimeUs);
-
-    originateTimeNTP = readTimeStamp(&packet[kNTPOriginateTimeOffset]);
-    receiveTimeNTP = readTimeStamp(&packet[kNTPReceiveTimeOffset]);
-    transmitTimeNTP = readTimeStamp(&packet[kNTPTransmitTimeOffset]);
-
-    roundTripTimeNTP =
-        makeNTP(responseTimeUs - requestTimeUs)
-            - (transmitTimeNTP - receiveTimeNTP);
-
-    clockOffsetNTP =
-        ((receiveTimeNTP - originateTimeNTP)
-            + (transmitTimeNTP - responseTimeNTP)) / 2;
-
-    mTimeReferenceNTP = responseTimeNTP + clockOffsetNTP;
-    mTimeReferenceUs = responseTimeUs;
-    mRoundTripTimeNTP = roundTripTimeNTP;
-
-    err = OK;
-
-bail2:
-    close(s);
-    s = -1;
-
-bail:
-    return err;
-}
-
-int64_t SNTPClient::adjustTimeUs(int64_t timeUs) const {
-    uint64_t nowNTP =
-        mTimeReferenceNTP + makeNTP(timeUs - mTimeReferenceUs);
-
-    int64_t nowUs =
-        (nowNTP >> 32) * 1000000ll
-        + ((nowNTP & 0xffffffff) * 1000000ll) / (1ll << 32);
-
-    nowUs -= ((70ll * 365 + 17) * 24) * 60 * 60 * 1000000ll;
-
-    return nowUs;
-}
-
-// static
-void SNTPClient::writeTimeStamp(uint8_t *dst, uint64_t ntpTime) {
-    *dst++ = (ntpTime >> 56) & 0xff;
-    *dst++ = (ntpTime >> 48) & 0xff;
-    *dst++ = (ntpTime >> 40) & 0xff;
-    *dst++ = (ntpTime >> 32) & 0xff;
-    *dst++ = (ntpTime >> 24) & 0xff;
-    *dst++ = (ntpTime >> 16) & 0xff;
-    *dst++ = (ntpTime >> 8) & 0xff;
-    *dst++ = ntpTime & 0xff;
-}
-
-// static
-uint64_t SNTPClient::readTimeStamp(const uint8_t *dst) {
-    return U64_AT(dst);
-}
-
-// static
-uint64_t SNTPClient::getNowNTP() {
-    struct timeval tv;
-    gettimeofday(&tv, NULL /* time zone */);
-
-    uint64_t nowUs = tv.tv_sec * 1000000ll + tv.tv_usec;
-
-    nowUs += ((70ll * 365 + 17) * 24) * 60 * 60 * 1000000ll;
-
-    return makeNTP(nowUs);
-}
-
-// static
-uint64_t SNTPClient::makeNTP(uint64_t deltaUs) {
-    uint64_t hi = deltaUs / 1000000ll;
-    uint64_t lo = ((1ll << 32) * (deltaUs % 1000000ll)) / 1000000ll;
-
-    return (hi << 32) | lo;
-}
-
-}  // namespace android
-
diff --git a/media/libstagefright/wifi-display/SNTPClient.h b/media/libstagefright/wifi-display/SNTPClient.h
deleted file mode 100644
index 967d1fc..0000000
--- a/media/libstagefright/wifi-display/SNTPClient.h
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SNTP_CLIENT_H_
-
-#define SNTP_CLIENT_H_
-
-#include <media/stagefright/foundation/ABase.h>
-#include <utils/Errors.h>
-
-namespace android {
-
-// Implementation of the SNTP (Simple Network Time Protocol)
-struct SNTPClient {
-    SNTPClient();
-
-    status_t requestTime(const char *host);
-
-    // given a time obtained from ALooper::GetNowUs()
-    // return the number of us elapsed since Jan 1 1970 00:00:00 (UTC).
-    int64_t adjustTimeUs(int64_t timeUs) const;
-
-private:
-    enum {
-        kNTPPort = 123,
-        kNTPPacketSize = 48,
-        kNTPModeClient = 3,
-        kNTPVersion = 3,
-        kNTPTransmitTimeOffset = 40,
-        kNTPOriginateTimeOffset = 24,
-        kNTPReceiveTimeOffset = 32,
-    };
-
-    uint64_t mTimeReferenceNTP;
-    int64_t mTimeReferenceUs;
-    int64_t mRoundTripTimeNTP;
-
-    static void writeTimeStamp(uint8_t *dst, uint64_t ntpTime);
-    static uint64_t readTimeStamp(const uint8_t *dst);
-
-    static uint64_t getNowNTP();
-    static uint64_t makeNTP(uint64_t deltaUs);
-
-    DISALLOW_EVIL_CONSTRUCTORS(SNTPClient);
-};
-
-}  // namespace android
-
-#endif  // SNTP_CLIENT_H_
diff --git a/media/libstagefright/wifi-display/TimeSyncer.cpp b/media/libstagefright/wifi-display/TimeSyncer.cpp
deleted file mode 100644
index 0f4d93a..0000000
--- a/media/libstagefright/wifi-display/TimeSyncer.cpp
+++ /dev/null
@@ -1,337 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NEBUG 0
-#define LOG_TAG "TimeSyncer"
-#include <utils/Log.h>
-
-#include "TimeSyncer.h"
-
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AHandler.h>
-#include <media/stagefright/foundation/ALooper.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/ANetworkSession.h>
-#include <media/stagefright/Utils.h>
-
-namespace android {
-
-TimeSyncer::TimeSyncer(
-        const sp<ANetworkSession> &netSession, const sp<AMessage> &notify)
-    : mNetSession(netSession),
-      mNotify(notify),
-      mIsServer(false),
-      mConnected(false),
-      mUDPSession(0),
-      mSeqNo(0),
-      mTotalTimeUs(0.0),
-      mPendingT1(0ll),
-      mTimeoutGeneration(0) {
-}
-
-TimeSyncer::~TimeSyncer() {
-}
-
-void TimeSyncer::startServer(unsigned localPort) {
-    sp<AMessage> msg = new AMessage(kWhatStartServer, id());
-    msg->setInt32("localPort", localPort);
-    msg->post();
-}
-
-void TimeSyncer::startClient(const char *remoteHost, unsigned remotePort) {
-    sp<AMessage> msg = new AMessage(kWhatStartClient, id());
-    msg->setString("remoteHost", remoteHost);
-    msg->setInt32("remotePort", remotePort);
-    msg->post();
-}
-
-void TimeSyncer::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatStartClient:
-        {
-            AString remoteHost;
-            CHECK(msg->findString("remoteHost", &remoteHost));
-
-            int32_t remotePort;
-            CHECK(msg->findInt32("remotePort", &remotePort));
-
-            sp<AMessage> notify = new AMessage(kWhatUDPNotify, id());
-
-            CHECK_EQ((status_t)OK,
-                     mNetSession->createUDPSession(
-                         0 /* localPort */,
-                         remoteHost.c_str(),
-                         remotePort,
-                         notify,
-                         &mUDPSession));
-
-            postSendPacket();
-            break;
-        }
-
-        case kWhatStartServer:
-        {
-            mIsServer = true;
-
-            int32_t localPort;
-            CHECK(msg->findInt32("localPort", &localPort));
-
-            sp<AMessage> notify = new AMessage(kWhatUDPNotify, id());
-
-            CHECK_EQ((status_t)OK,
-                     mNetSession->createUDPSession(
-                         localPort, notify, &mUDPSession));
-
-            break;
-        }
-
-        case kWhatSendPacket:
-        {
-            if (mHistory.size() == 0) {
-                ALOGI("starting batch");
-            }
-
-            TimeInfo ti;
-            memset(&ti, 0, sizeof(ti));
-
-            ti.mT1 = ALooper::GetNowUs();
-
-            CHECK_EQ((status_t)OK,
-                     mNetSession->sendRequest(
-                         mUDPSession, &ti, sizeof(ti)));
-
-            mPendingT1 = ti.mT1;
-            postTimeout();
-            break;
-        }
-
-        case kWhatTimedOut:
-        {
-            int32_t generation;
-            CHECK(msg->findInt32("generation", &generation));
-
-            if (generation != mTimeoutGeneration) {
-                break;
-            }
-
-            ALOGI("timed out, sending another request");
-            postSendPacket();
-            break;
-        }
-
-        case kWhatUDPNotify:
-        {
-            int32_t reason;
-            CHECK(msg->findInt32("reason", &reason));
-
-            switch (reason) {
-                case ANetworkSession::kWhatError:
-                {
-                    int32_t sessionID;
-                    CHECK(msg->findInt32("sessionID", &sessionID));
-
-                    int32_t err;
-                    CHECK(msg->findInt32("err", &err));
-
-                    AString detail;
-                    CHECK(msg->findString("detail", &detail));
-
-                    ALOGE("An error occurred in session %d (%d, '%s/%s').",
-                          sessionID,
-                          err,
-                          detail.c_str(),
-                          strerror(-err));
-
-                    mNetSession->destroySession(sessionID);
-
-                    cancelTimeout();
-
-                    notifyError(err);
-                    break;
-                }
-
-                case ANetworkSession::kWhatDatagram:
-                {
-                    int32_t sessionID;
-                    CHECK(msg->findInt32("sessionID", &sessionID));
-
-                    sp<ABuffer> packet;
-                    CHECK(msg->findBuffer("data", &packet));
-
-                    int64_t arrivalTimeUs;
-                    CHECK(packet->meta()->findInt64(
-                                "arrivalTimeUs", &arrivalTimeUs));
-
-                    CHECK_EQ(packet->size(), sizeof(TimeInfo));
-
-                    TimeInfo *ti = (TimeInfo *)packet->data();
-
-                    if (mIsServer) {
-                        if (!mConnected) {
-                            AString fromAddr;
-                            CHECK(msg->findString("fromAddr", &fromAddr));
-
-                            int32_t fromPort;
-                            CHECK(msg->findInt32("fromPort", &fromPort));
-
-                            CHECK_EQ((status_t)OK,
-                                     mNetSession->connectUDPSession(
-                                         mUDPSession, fromAddr.c_str(), fromPort));
-
-                            mConnected = true;
-                        }
-
-                        ti->mT2 = arrivalTimeUs;
-                        ti->mT3 = ALooper::GetNowUs();
-
-                        CHECK_EQ((status_t)OK,
-                                 mNetSession->sendRequest(
-                                     mUDPSession, ti, sizeof(*ti)));
-                    } else {
-                        if (ti->mT1 != mPendingT1) {
-                            break;
-                        }
-
-                        cancelTimeout();
-                        mPendingT1 = 0;
-
-                        ti->mT4 = arrivalTimeUs;
-
-                        // One way delay for a packet to travel from client
-                        // to server or back (assumed to be the same either way).
-                        int64_t delay =
-                            (ti->mT2 - ti->mT1 + ti->mT4 - ti->mT3) / 2;
-
-                        // Offset between the client clock (T1, T4) and the
-                        // server clock (T2, T3) timestamps.
-                        int64_t offset =
-                            (ti->mT2 - ti->mT1 - ti->mT4 + ti->mT3) / 2;
-
-                        mHistory.push_back(*ti);
-
-                        ALOGV("delay = %lld us,\toffset %lld us",
-                               delay,
-                               offset);
-
-                        if (mHistory.size() < kNumPacketsPerBatch) {
-                            postSendPacket(1000000ll / 30);
-                        } else {
-                            notifyOffset();
-
-                            ALOGI("batch done");
-
-                            mHistory.clear();
-                            postSendPacket(kBatchDelayUs);
-                        }
-                    }
-                    break;
-                }
-
-                default:
-                    TRESPASS();
-            }
-
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void TimeSyncer::postSendPacket(int64_t delayUs) {
-    (new AMessage(kWhatSendPacket, id()))->post(delayUs);
-}
-
-void TimeSyncer::postTimeout() {
-    sp<AMessage> msg = new AMessage(kWhatTimedOut, id());
-    msg->setInt32("generation", mTimeoutGeneration);
-    msg->post(kTimeoutDelayUs);
-}
-
-void TimeSyncer::cancelTimeout() {
-    ++mTimeoutGeneration;
-}
-
-void TimeSyncer::notifyError(status_t err) {
-    if (mNotify == NULL) {
-        looper()->stop();
-        return;
-    }
-
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatError);
-    notify->setInt32("err", err);
-    notify->post();
-}
-
-// static
-int TimeSyncer::CompareRountripTime(const TimeInfo *ti1, const TimeInfo *ti2) {
-    int64_t rt1 = ti1->mT4 - ti1->mT1;
-    int64_t rt2 = ti2->mT4 - ti2->mT1;
-
-    if (rt1 < rt2) {
-        return -1;
-    } else if (rt1 > rt2) {
-        return 1;
-    }
-
-    return 0;
-}
-
-void TimeSyncer::notifyOffset() {
-    mHistory.sort(CompareRountripTime);
-
-    int64_t sum = 0ll;
-    size_t count = 0;
-
-    // Only consider the third of the information associated with the best
-    // (smallest) roundtrip times.
-    for (size_t i = 0; i < mHistory.size() / 3; ++i) {
-        const TimeInfo *ti = &mHistory[i];
-
-#if 0
-        // One way delay for a packet to travel from client
-        // to server or back (assumed to be the same either way).
-        int64_t delay =
-            (ti->mT2 - ti->mT1 + ti->mT4 - ti->mT3) / 2;
-#endif
-
-        // Offset between the client clock (T1, T4) and the
-        // server clock (T2, T3) timestamps.
-        int64_t offset =
-            (ti->mT2 - ti->mT1 - ti->mT4 + ti->mT3) / 2;
-
-        ALOGV("(%d) RT: %lld us, offset: %lld us",
-              i, ti->mT4 - ti->mT1, offset);
-
-        sum += offset;
-        ++count;
-    }
-
-    if (mNotify == NULL) {
-        ALOGI("avg. offset is %lld", sum / count);
-        return;
-    }
-
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatTimeOffset);
-    notify->setInt64("offset", sum / count);
-    notify->post();
-}
-
-}  // namespace android
diff --git a/media/libstagefright/wifi-display/TimeSyncer.h b/media/libstagefright/wifi-display/TimeSyncer.h
deleted file mode 100644
index 4e7571f..0000000
--- a/media/libstagefright/wifi-display/TimeSyncer.h
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef TIME_SYNCER_H_
-
-#define TIME_SYNCER_H_
-
-#include <media/stagefright/foundation/AHandler.h>
-
-namespace android {
-
-struct ANetworkSession;
-
-/*
-   TimeSyncer allows us to synchronize time between a client and a server.
-   The client sends a UDP packet containing its send-time to the server,
-   the server sends that packet back to the client amended with information
-   about when it was received as well as the time the reply was sent back.
-   Finally the client receives the reply and has now enough information to
-   compute the clock offset between client and server assuming that packet
-   exchange is symmetric, i.e. time for a packet client->server and
-   server->client is roughly equal.
-   This exchange is repeated a number of times and the average offset computed
-   over the 30% of packets that had the lowest roundtrip times.
-   The offset is determined every 10 secs to account for slight differences in
-   clock frequency.
-*/
-struct TimeSyncer : public AHandler {
-    enum {
-        kWhatError,
-        kWhatTimeOffset,
-    };
-    TimeSyncer(
-            const sp<ANetworkSession> &netSession,
-            const sp<AMessage> &notify);
-
-    void startServer(unsigned localPort);
-    void startClient(const char *remoteHost, unsigned remotePort);
-
-protected:
-    virtual ~TimeSyncer();
-
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-
-private:
-    enum {
-        kWhatStartServer,
-        kWhatStartClient,
-        kWhatUDPNotify,
-        kWhatSendPacket,
-        kWhatTimedOut,
-    };
-
-    struct TimeInfo {
-        int64_t mT1;  // client timestamp at send
-        int64_t mT2;  // server timestamp at receive
-        int64_t mT3;  // server timestamp at send
-        int64_t mT4;  // client timestamp at receive
-    };
-
-    enum {
-        kNumPacketsPerBatch = 30,
-    };
-    static const int64_t kTimeoutDelayUs = 500000ll;
-    static const int64_t kBatchDelayUs = 60000000ll;  // every minute
-
-    sp<ANetworkSession> mNetSession;
-    sp<AMessage> mNotify;
-
-    bool mIsServer;
-    bool mConnected;
-    int32_t mUDPSession;
-    uint32_t mSeqNo;
-    double mTotalTimeUs;
-
-    Vector<TimeInfo> mHistory;
-
-    int64_t mPendingT1;
-    int32_t mTimeoutGeneration;
-
-    void postSendPacket(int64_t delayUs = 0ll);
-
-    void postTimeout();
-    void cancelTimeout();
-
-    void notifyError(status_t err);
-    void notifyOffset();
-
-    static int CompareRountripTime(const TimeInfo *ti1, const TimeInfo *ti2);
-
-    DISALLOW_EVIL_CONSTRUCTORS(TimeSyncer);
-};
-
-}  // namespace android
-
-#endif  // TIME_SYNCER_H_
diff --git a/media/libstagefright/wifi-display/nettest.cpp b/media/libstagefright/wifi-display/nettest.cpp
deleted file mode 100644
index 73c0d80..0000000
--- a/media/libstagefright/wifi-display/nettest.cpp
+++ /dev/null
@@ -1,400 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NEBUG 0
-#define LOG_TAG "nettest"
-#include <utils/Log.h>
-
-#include "TimeSyncer.h"
-
-#include <binder/ProcessState.h>
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AHandler.h>
-#include <media/stagefright/foundation/ALooper.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/ANetworkSession.h>
-#include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/DataSource.h>
-#include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/NuMediaExtractor.h>
-#include <media/stagefright/Utils.h>
-
-namespace android {
-
-struct TestHandler : public AHandler {
-    TestHandler(const sp<ANetworkSession> &netSession);
-
-    void listen(int32_t port);
-    void connect(const char *host, int32_t port);
-
-protected:
-    virtual ~TestHandler();
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-
-private:
-    enum {
-        kTimeSyncerPort = 8123,
-    };
-
-    enum {
-        kWhatListen,
-        kWhatConnect,
-        kWhatTimeSyncerNotify,
-        kWhatNetNotify,
-        kWhatSendMore,
-        kWhatStop,
-    };
-
-    sp<ANetworkSession> mNetSession;
-    sp<TimeSyncer> mTimeSyncer;
-
-    int32_t mServerSessionID;
-    int32_t mSessionID;
-
-    int64_t mTimeOffsetUs;
-    bool mTimeOffsetValid;
-
-    int32_t mCounter;
-
-    int64_t mMaxDelayMs;
-
-    void dumpDelay(int32_t counter, int64_t delayMs);
-
-    DISALLOW_EVIL_CONSTRUCTORS(TestHandler);
-};
-
-TestHandler::TestHandler(const sp<ANetworkSession> &netSession)
-    : mNetSession(netSession),
-      mServerSessionID(0),
-      mSessionID(0),
-      mTimeOffsetUs(-1ll),
-      mTimeOffsetValid(false),
-      mCounter(0),
-      mMaxDelayMs(-1ll) {
-}
-
-TestHandler::~TestHandler() {
-}
-
-void TestHandler::listen(int32_t port) {
-    sp<AMessage> msg = new AMessage(kWhatListen, id());
-    msg->setInt32("port", port);
-    msg->post();
-}
-
-void TestHandler::connect(const char *host, int32_t port) {
-    sp<AMessage> msg = new AMessage(kWhatConnect, id());
-    msg->setString("host", host);
-    msg->setInt32("port", port);
-    msg->post();
-}
-
-void TestHandler::dumpDelay(int32_t counter, int64_t delayMs) {
-    static const int64_t kMinDelayMs = 0;
-    static const int64_t kMaxDelayMs = 300;
-
-    const char *kPattern = "########################################";
-    size_t kPatternSize = strlen(kPattern);
-
-    int n = (kPatternSize * (delayMs - kMinDelayMs))
-                / (kMaxDelayMs - kMinDelayMs);
-
-    if (n < 0) {
-        n = 0;
-    } else if ((size_t)n > kPatternSize) {
-        n = kPatternSize;
-    }
-
-    if (delayMs > mMaxDelayMs) {
-        mMaxDelayMs = delayMs;
-    }
-
-    ALOGI("[%d] (%4lld ms / %4lld ms) %s",
-          counter,
-          delayMs,
-          mMaxDelayMs,
-          kPattern + kPatternSize - n);
-}
-
-void TestHandler::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatListen:
-        {
-            sp<AMessage> notify = new AMessage(kWhatTimeSyncerNotify, id());
-            mTimeSyncer = new TimeSyncer(mNetSession, notify);
-            looper()->registerHandler(mTimeSyncer);
-
-            notify = new AMessage(kWhatNetNotify, id());
-
-            int32_t port;
-            CHECK(msg->findInt32("port", &port));
-
-            struct in_addr ifaceAddr;
-            ifaceAddr.s_addr = INADDR_ANY;
-
-            CHECK_EQ((status_t)OK,
-                     mNetSession->createTCPDatagramSession(
-                         ifaceAddr,
-                         port,
-                         notify,
-                         &mServerSessionID));
-            break;
-        }
-
-        case kWhatConnect:
-        {
-            sp<AMessage> notify = new AMessage(kWhatTimeSyncerNotify, id());
-            mTimeSyncer = new TimeSyncer(mNetSession, notify);
-            looper()->registerHandler(mTimeSyncer);
-            mTimeSyncer->startServer(kTimeSyncerPort);
-
-            AString host;
-            CHECK(msg->findString("host", &host));
-
-            int32_t port;
-            CHECK(msg->findInt32("port", &port));
-
-            notify = new AMessage(kWhatNetNotify, id());
-
-            CHECK_EQ((status_t)OK,
-                     mNetSession->createTCPDatagramSession(
-                         0 /* localPort */,
-                         host.c_str(),
-                         port,
-                         notify,
-                         &mSessionID));
-            break;
-        }
-
-        case kWhatNetNotify:
-        {
-            int32_t reason;
-            CHECK(msg->findInt32("reason", &reason));
-
-            switch (reason) {
-                case ANetworkSession::kWhatConnected:
-                {
-                    ALOGI("kWhatConnected");
-
-                    (new AMessage(kWhatSendMore, id()))->post();
-                    break;
-                }
-
-                case ANetworkSession::kWhatClientConnected:
-                {
-                    ALOGI("kWhatClientConnected");
-
-                    CHECK_EQ(mSessionID, 0);
-                    CHECK(msg->findInt32("sessionID", &mSessionID));
-
-                    AString clientIP;
-                    CHECK(msg->findString("client-ip", &clientIP));
-
-                    mTimeSyncer->startClient(clientIP.c_str(), kTimeSyncerPort);
-                    break;
-                }
-
-                case ANetworkSession::kWhatDatagram:
-                {
-                    sp<ABuffer> packet;
-                    CHECK(msg->findBuffer("data", &packet));
-
-                    CHECK_EQ(packet->size(), 12u);
-
-                    int32_t counter = U32_AT(packet->data());
-                    int64_t timeUs = U64_AT(packet->data() + 4);
-
-                    if (mTimeOffsetValid) {
-                        timeUs -= mTimeOffsetUs;
-                        int64_t nowUs = ALooper::GetNowUs();
-                        int64_t delayMs = (nowUs - timeUs) / 1000ll;
-
-                        dumpDelay(counter, delayMs);
-                    } else {
-                        ALOGI("received %d", counter);
-                    }
-                    break;
-                }
-
-                case ANetworkSession::kWhatError:
-                {
-                    ALOGE("kWhatError");
-                    break;
-                }
-
-                default:
-                    TRESPASS();
-            }
-            break;
-        }
-
-        case kWhatTimeSyncerNotify:
-        {
-            CHECK(msg->findInt64("offset", &mTimeOffsetUs));
-            mTimeOffsetValid = true;
-            break;
-        }
-
-        case kWhatSendMore:
-        {
-            uint8_t buffer[4 + 8];
-            buffer[0] = mCounter >> 24;
-            buffer[1] = (mCounter >> 16) & 0xff;
-            buffer[2] = (mCounter >> 8) & 0xff;
-            buffer[3] = mCounter & 0xff;
-
-            int64_t nowUs = ALooper::GetNowUs();
-
-            buffer[4] = nowUs >> 56;
-            buffer[5] = (nowUs >> 48) & 0xff;
-            buffer[6] = (nowUs >> 40) & 0xff;
-            buffer[7] = (nowUs >> 32) & 0xff;
-            buffer[8] = (nowUs >> 24) & 0xff;
-            buffer[9] = (nowUs >> 16) & 0xff;
-            buffer[10] = (nowUs >> 8) & 0xff;
-            buffer[11] = nowUs & 0xff;
-
-            ++mCounter;
-
-            CHECK_EQ((status_t)OK,
-                     mNetSession->sendRequest(
-                         mSessionID,
-                         buffer,
-                         sizeof(buffer),
-                         true /* timeValid */,
-                         nowUs));
-
-            msg->post(100000ll);
-            break;
-        }
-
-        case kWhatStop:
-        {
-            if (mSessionID != 0) {
-                mNetSession->destroySession(mSessionID);
-                mSessionID = 0;
-            }
-
-            if (mServerSessionID != 0) {
-                mNetSession->destroySession(mServerSessionID);
-                mServerSessionID = 0;
-            }
-
-            looper()->stop();
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-}  // namespace android
-
-static void usage(const char *me) {
-    fprintf(stderr,
-            "usage: %s -c host:port\tconnect to remote host\n"
-            "               -l port   \tlisten\n",
-            me);
-}
-
-int main(int argc, char **argv) {
-    using namespace android;
-
-    // srand(time(NULL));
-
-    ProcessState::self()->startThreadPool();
-
-    DataSource::RegisterDefaultSniffers();
-
-    int32_t connectToPort = -1;
-    AString connectToHost;
-
-    int32_t listenOnPort = -1;
-
-    int res;
-    while ((res = getopt(argc, argv, "hc:l:")) >= 0) {
-        switch (res) {
-            case 'c':
-            {
-                const char *colonPos = strrchr(optarg, ':');
-
-                if (colonPos == NULL) {
-                    usage(argv[0]);
-                    exit(1);
-                }
-
-                connectToHost.setTo(optarg, colonPos - optarg);
-
-                char *end;
-                connectToPort = strtol(colonPos + 1, &end, 10);
-
-                if (*end != '\0' || end == colonPos + 1
-                        || connectToPort < 0 || connectToPort > 65535) {
-                    fprintf(stderr, "Illegal port specified.\n");
-                    exit(1);
-                }
-                break;
-            }
-
-            case 'l':
-            {
-                char *end;
-                listenOnPort = strtol(optarg, &end, 10);
-
-                if (*end != '\0' || end == optarg
-                        || listenOnPort < 0 || listenOnPort > 65535) {
-                    fprintf(stderr, "Illegal port specified.\n");
-                    exit(1);
-                }
-                break;
-            }
-
-            case '?':
-            case 'h':
-                usage(argv[0]);
-                exit(1);
-        }
-    }
-
-    if ((listenOnPort < 0 && connectToPort < 0)
-            || (listenOnPort >= 0 && connectToPort >= 0)) {
-        fprintf(stderr,
-                "You need to select either client or server mode.\n");
-        exit(1);
-    }
-
-    sp<ANetworkSession> netSession = new ANetworkSession;
-    netSession->start();
-
-    sp<ALooper> looper = new ALooper;
-
-    sp<TestHandler> handler = new TestHandler(netSession);
-    looper->registerHandler(handler);
-
-    if (listenOnPort) {
-        handler->listen(listenOnPort);
-    }
-
-    if (connectToPort >= 0) {
-        handler->connect(connectToHost.c_str(), connectToPort);
-    }
-
-    looper->start(true /* runOnCallingThread */);
-
-    return 0;
-}
diff --git a/media/libstagefright/wifi-display/rtp/RTPAssembler.cpp b/media/libstagefright/wifi-display/rtp/RTPAssembler.cpp
deleted file mode 100644
index 7a96081..0000000
--- a/media/libstagefright/wifi-display/rtp/RTPAssembler.cpp
+++ /dev/null
@@ -1,328 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "RTPAssembler"
-#include <utils/Log.h>
-
-#include "RTPAssembler.h"
-
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/MediaErrors.h>
-
-namespace android {
-
-RTPReceiver::Assembler::Assembler(const sp<AMessage> &notify)
-    : mNotify(notify) {
-}
-
-void RTPReceiver::Assembler::postAccessUnit(
-        const sp<ABuffer> &accessUnit, bool followsDiscontinuity) {
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", RTPReceiver::kWhatAccessUnit);
-    notify->setBuffer("accessUnit", accessUnit);
-    notify->setInt32("followsDiscontinuity", followsDiscontinuity);
-    notify->post();
-}
-////////////////////////////////////////////////////////////////////////////////
-
-RTPReceiver::TSAssembler::TSAssembler(const sp<AMessage> &notify)
-    : Assembler(notify),
-      mSawDiscontinuity(false) {
-}
-
-void RTPReceiver::TSAssembler::signalDiscontinuity() {
-    mSawDiscontinuity = true;
-}
-
-status_t RTPReceiver::TSAssembler::processPacket(const sp<ABuffer> &packet) {
-    int32_t rtpTime;
-    CHECK(packet->meta()->findInt32("rtp-time", &rtpTime));
-
-    packet->meta()->setInt64("timeUs", (rtpTime * 100ll) / 9);
-
-    postAccessUnit(packet, mSawDiscontinuity);
-
-    if (mSawDiscontinuity) {
-        mSawDiscontinuity = false;
-    }
-
-    return OK;
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-RTPReceiver::H264Assembler::H264Assembler(const sp<AMessage> &notify)
-    : Assembler(notify),
-      mState(0),
-      mIndicator(0),
-      mNALType(0),
-      mAccessUnitRTPTime(0) {
-}
-
-void RTPReceiver::H264Assembler::signalDiscontinuity() {
-    reset();
-}
-
-status_t RTPReceiver::H264Assembler::processPacket(const sp<ABuffer> &packet) {
-    status_t err = internalProcessPacket(packet);
-
-    if (err != OK) {
-        reset();
-    }
-
-    return err;
-}
-
-status_t RTPReceiver::H264Assembler::internalProcessPacket(
-        const sp<ABuffer> &packet) {
-    const uint8_t *data = packet->data();
-    size_t size = packet->size();
-
-    switch (mState) {
-        case 0:
-        {
-            if (size < 1 || (data[0] & 0x80)) {
-                ALOGV("Malformed H264 RTP packet (empty or F-bit set)");
-                return ERROR_MALFORMED;
-            }
-
-            unsigned nalType = data[0] & 0x1f;
-            if (nalType >= 1 && nalType <= 23) {
-                addSingleNALUnit(packet);
-                ALOGV("added single NAL packet");
-            } else if (nalType == 28) {
-                // FU-A
-                unsigned indicator = data[0];
-                CHECK((indicator & 0x1f) == 28);
-
-                if (size < 2) {
-                    ALOGV("Malformed H264 FU-A packet (single byte)");
-                    return ERROR_MALFORMED;
-                }
-
-                if (!(data[1] & 0x80)) {
-                    ALOGV("Malformed H264 FU-A packet (no start bit)");
-                    return ERROR_MALFORMED;
-                }
-
-                mIndicator = data[0];
-                mNALType = data[1] & 0x1f;
-                uint32_t nri = (data[0] >> 5) & 3;
-
-                clearAccumulator();
-
-                uint8_t byte = mNALType | (nri << 5);
-                appendToAccumulator(&byte, 1);
-                appendToAccumulator(data + 2, size - 2);
-
-                int32_t rtpTime;
-                CHECK(packet->meta()->findInt32("rtp-time", &rtpTime));
-                mAccumulator->meta()->setInt32("rtp-time", rtpTime);
-
-                if (data[1] & 0x40) {
-                    // Huh? End bit also set on the first buffer.
-                    addSingleNALUnit(mAccumulator);
-                    clearAccumulator();
-
-                    ALOGV("added FU-A");
-                    break;
-                }
-
-                mState = 1;
-            } else if (nalType == 24) {
-                // STAP-A
-
-                status_t err = addSingleTimeAggregationPacket(packet);
-                if (err != OK) {
-                    return err;
-                }
-            } else {
-                ALOGV("Malformed H264 packet (unknown type %d)", nalType);
-                return ERROR_UNSUPPORTED;
-            }
-            break;
-        }
-
-        case 1:
-        {
-            if (size < 2
-                    || data[0] != mIndicator
-                    || (data[1] & 0x1f) != mNALType
-                    || (data[1] & 0x80)) {
-                ALOGV("Malformed H264 FU-A packet (indicator, "
-                      "type or start bit mismatch)");
-
-                return ERROR_MALFORMED;
-            }
-
-            appendToAccumulator(data + 2, size - 2);
-
-            if (data[1] & 0x40) {
-                addSingleNALUnit(mAccumulator);
-
-                clearAccumulator();
-                mState = 0;
-
-                ALOGV("added FU-A");
-            }
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-
-    int32_t marker;
-    CHECK(packet->meta()->findInt32("M", &marker));
-
-    if (marker) {
-        flushAccessUnit();
-    }
-
-    return OK;
-}
-
-void RTPReceiver::H264Assembler::reset() {
-    mNALUnits.clear();
-
-    clearAccumulator();
-    mState = 0;
-}
-
-void RTPReceiver::H264Assembler::clearAccumulator() {
-    if (mAccumulator != NULL) {
-        // XXX Too expensive.
-        mAccumulator.clear();
-    }
-}
-
-void RTPReceiver::H264Assembler::appendToAccumulator(
-        const void *data, size_t size) {
-    if (mAccumulator == NULL) {
-        mAccumulator = new ABuffer(size);
-        memcpy(mAccumulator->data(), data, size);
-        return;
-    }
-
-    if (mAccumulator->size() + size > mAccumulator->capacity()) {
-        sp<ABuffer> buf = new ABuffer(mAccumulator->size() + size);
-        memcpy(buf->data(), mAccumulator->data(), mAccumulator->size());
-        buf->setRange(0, mAccumulator->size());
-
-        int32_t rtpTime;
-        if (mAccumulator->meta()->findInt32("rtp-time", &rtpTime)) {
-            buf->meta()->setInt32("rtp-time", rtpTime);
-        }
-
-        mAccumulator = buf;
-    }
-
-    memcpy(mAccumulator->data() + mAccumulator->size(), data, size);
-    mAccumulator->setRange(0, mAccumulator->size() + size);
-}
-
-void RTPReceiver::H264Assembler::addSingleNALUnit(const sp<ABuffer> &packet) {
-    if (mNALUnits.empty()) {
-        int32_t rtpTime;
-        CHECK(packet->meta()->findInt32("rtp-time", &rtpTime));
-
-        mAccessUnitRTPTime = rtpTime;
-    }
-
-    mNALUnits.push_back(packet);
-}
-
-void RTPReceiver::H264Assembler::flushAccessUnit() {
-    if (mNALUnits.empty()) {
-        return;
-    }
-
-    size_t totalSize = 0;
-    for (List<sp<ABuffer> >::iterator it = mNALUnits.begin();
-            it != mNALUnits.end(); ++it) {
-        totalSize += 4 + (*it)->size();
-    }
-
-    sp<ABuffer> accessUnit = new ABuffer(totalSize);
-    size_t offset = 0;
-    for (List<sp<ABuffer> >::iterator it = mNALUnits.begin();
-            it != mNALUnits.end(); ++it) {
-        const sp<ABuffer> nalUnit = *it;
-
-        memcpy(accessUnit->data() + offset, "\x00\x00\x00\x01", 4);
-
-        memcpy(accessUnit->data() + offset + 4,
-               nalUnit->data(),
-               nalUnit->size());
-
-        offset += 4 + nalUnit->size();
-    }
-
-    mNALUnits.clear();
-
-    accessUnit->meta()->setInt64("timeUs", mAccessUnitRTPTime * 100ll / 9ll);
-    postAccessUnit(accessUnit, false /* followsDiscontinuity */);
-}
-
-status_t RTPReceiver::H264Assembler::addSingleTimeAggregationPacket(
-        const sp<ABuffer> &packet) {
-    const uint8_t *data = packet->data();
-    size_t size = packet->size();
-
-    if (size < 3) {
-        ALOGV("Malformed H264 STAP-A packet (too small)");
-        return ERROR_MALFORMED;
-    }
-
-    int32_t rtpTime;
-    CHECK(packet->meta()->findInt32("rtp-time", &rtpTime));
-
-    ++data;
-    --size;
-    while (size >= 2) {
-        size_t nalSize = (data[0] << 8) | data[1];
-
-        if (size < nalSize + 2) {
-            ALOGV("Malformed H264 STAP-A packet (incomplete NAL unit)");
-            return ERROR_MALFORMED;
-        }
-
-        sp<ABuffer> unit = new ABuffer(nalSize);
-        memcpy(unit->data(), &data[2], nalSize);
-
-        unit->meta()->setInt32("rtp-time", rtpTime);
-
-        addSingleNALUnit(unit);
-
-        data += 2 + nalSize;
-        size -= 2 + nalSize;
-    }
-
-    if (size != 0) {
-        ALOGV("Unexpected padding at end of STAP-A packet.");
-    }
-
-    ALOGV("added STAP-A");
-
-    return OK;
-}
-
-}  // namespace android
-
diff --git a/media/libstagefright/wifi-display/rtp/RTPAssembler.h b/media/libstagefright/wifi-display/rtp/RTPAssembler.h
deleted file mode 100644
index e456d32..0000000
--- a/media/libstagefright/wifi-display/rtp/RTPAssembler.h
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef RTP_ASSEMBLER_H_
-
-#define RTP_ASSEMBLER_H_
-
-#include "RTPReceiver.h"
-
-namespace android {
-
-// A helper class to reassemble the payload of RTP packets into access
-// units depending on the packetization scheme.
-struct RTPReceiver::Assembler : public RefBase {
-    Assembler(const sp<AMessage> &notify);
-
-    virtual void signalDiscontinuity() = 0;
-    virtual status_t processPacket(const sp<ABuffer> &packet) = 0;
-
-protected:
-    virtual ~Assembler() {}
-
-    void postAccessUnit(
-            const sp<ABuffer> &accessUnit, bool followsDiscontinuity);
-
-private:
-    sp<AMessage> mNotify;
-
-    DISALLOW_EVIL_CONSTRUCTORS(Assembler);
-};
-
-struct RTPReceiver::TSAssembler : public RTPReceiver::Assembler {
-    TSAssembler(const sp<AMessage> &notify);
-
-    virtual void signalDiscontinuity();
-    virtual status_t processPacket(const sp<ABuffer> &packet);
-
-private:
-    bool mSawDiscontinuity;
-
-    DISALLOW_EVIL_CONSTRUCTORS(TSAssembler);
-};
-
-struct RTPReceiver::H264Assembler : public RTPReceiver::Assembler {
-    H264Assembler(const sp<AMessage> &notify);
-
-    virtual void signalDiscontinuity();
-    virtual status_t processPacket(const sp<ABuffer> &packet);
-
-private:
-    int32_t mState;
-
-    uint8_t mIndicator;
-    uint8_t mNALType;
-
-    sp<ABuffer> mAccumulator;
-
-    List<sp<ABuffer> > mNALUnits;
-    int32_t mAccessUnitRTPTime;
-
-    status_t internalProcessPacket(const sp<ABuffer> &packet);
-
-    void addSingleNALUnit(const sp<ABuffer> &packet);
-    status_t addSingleTimeAggregationPacket(const sp<ABuffer> &packet);
-
-    void flushAccessUnit();
-
-    void clearAccumulator();
-    void appendToAccumulator(const void *data, size_t size);
-
-    void reset();
-
-    DISALLOW_EVIL_CONSTRUCTORS(H264Assembler);
-};
-
-}  // namespace android
-
-#endif  // RTP_ASSEMBLER_H_
-
diff --git a/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp b/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp
deleted file mode 100644
index 3b3bd63..0000000
--- a/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp
+++ /dev/null
@@ -1,1152 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "RTPReceiver"
-#include <utils/Log.h>
-
-#include "RTPAssembler.h"
-#include "RTPReceiver.h"
-
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/ANetworkSession.h>
-#include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/Utils.h>
-
-#define TRACK_PACKET_LOSS       0
-
-namespace android {
-
-////////////////////////////////////////////////////////////////////////////////
-
-struct RTPReceiver::Source : public AHandler {
-    Source(RTPReceiver *receiver, uint32_t ssrc);
-
-    void onPacketReceived(uint16_t seq, const sp<ABuffer> &buffer);
-
-    void addReportBlock(uint32_t ssrc, const sp<ABuffer> &buf);
-
-protected:
-    virtual ~Source();
-
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-
-private:
-    enum {
-        kWhatRetransmit,
-        kWhatDeclareLost,
-    };
-
-    static const uint32_t kMinSequential = 2;
-    static const uint32_t kMaxDropout = 3000;
-    static const uint32_t kMaxMisorder = 100;
-    static const uint32_t kRTPSeqMod = 1u << 16;
-    static const int64_t kReportIntervalUs = 10000000ll;
-
-    RTPReceiver *mReceiver;
-    uint32_t mSSRC;
-    bool mFirst;
-    uint16_t mMaxSeq;
-    uint32_t mCycles;
-    uint32_t mBaseSeq;
-    uint32_t mReceived;
-    uint32_t mExpectedPrior;
-    uint32_t mReceivedPrior;
-
-    int64_t mFirstArrivalTimeUs;
-    int64_t mFirstRTPTimeUs;
-
-    // Ordered by extended seq number.
-    List<sp<ABuffer> > mPackets;
-
-    enum StatusBits {
-        STATUS_DECLARED_LOST            = 1,
-        STATUS_REQUESTED_RETRANSMISSION = 2,
-        STATUS_ARRIVED_LATE             = 4,
-    };
-#if TRACK_PACKET_LOSS
-    KeyedVector<int32_t, uint32_t> mLostPackets;
-#endif
-
-    void modifyPacketStatus(int32_t extSeqNo, uint32_t mask);
-
-    int32_t mAwaitingExtSeqNo;
-    bool mRequestedRetransmission;
-
-    int32_t mActivePacketType;
-    sp<Assembler> mActiveAssembler;
-
-    int64_t mNextReportTimeUs;
-
-    int32_t mNumDeclaredLost;
-    int32_t mNumDeclaredLostPrior;
-
-    int32_t mRetransmitGeneration;
-    int32_t mDeclareLostGeneration;
-    bool mDeclareLostTimerPending;
-
-    void queuePacket(const sp<ABuffer> &packet);
-    void dequeueMore();
-
-    sp<ABuffer> getNextPacket();
-    void resync();
-
-    void postRetransmitTimer(int64_t delayUs);
-    void postDeclareLostTimer(int64_t delayUs);
-    void cancelTimers();
-
-    DISALLOW_EVIL_CONSTRUCTORS(Source);
-};
-
-////////////////////////////////////////////////////////////////////////////////
-
-RTPReceiver::Source::Source(RTPReceiver *receiver, uint32_t ssrc)
-    : mReceiver(receiver),
-      mSSRC(ssrc),
-      mFirst(true),
-      mMaxSeq(0),
-      mCycles(0),
-      mBaseSeq(0),
-      mReceived(0),
-      mExpectedPrior(0),
-      mReceivedPrior(0),
-      mFirstArrivalTimeUs(-1ll),
-      mFirstRTPTimeUs(-1ll),
-      mAwaitingExtSeqNo(-1),
-      mRequestedRetransmission(false),
-      mActivePacketType(-1),
-      mNextReportTimeUs(-1ll),
-      mNumDeclaredLost(0),
-      mNumDeclaredLostPrior(0),
-      mRetransmitGeneration(0),
-      mDeclareLostGeneration(0),
-      mDeclareLostTimerPending(false) {
-}
-
-RTPReceiver::Source::~Source() {
-}
-
-void RTPReceiver::Source::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatRetransmit:
-        {
-            int32_t generation;
-            CHECK(msg->findInt32("generation", &generation));
-
-            if (generation != mRetransmitGeneration) {
-                break;
-            }
-
-            mRequestedRetransmission = true;
-            mReceiver->requestRetransmission(mSSRC, mAwaitingExtSeqNo);
-
-            modifyPacketStatus(
-                    mAwaitingExtSeqNo, STATUS_REQUESTED_RETRANSMISSION);
-            break;
-        }
-
-        case kWhatDeclareLost:
-        {
-            int32_t generation;
-            CHECK(msg->findInt32("generation", &generation));
-
-            if (generation != mDeclareLostGeneration) {
-                break;
-            }
-
-            cancelTimers();
-
-            ALOGV("Lost packet extSeqNo %d %s",
-                  mAwaitingExtSeqNo,
-                  mRequestedRetransmission ? "*" : "");
-
-            mRequestedRetransmission = false;
-            if (mActiveAssembler != NULL) {
-                mActiveAssembler->signalDiscontinuity();
-            }
-
-            modifyPacketStatus(mAwaitingExtSeqNo, STATUS_DECLARED_LOST);
-
-            // resync();
-            ++mAwaitingExtSeqNo;
-            ++mNumDeclaredLost;
-
-            mReceiver->notifyPacketLost();
-
-            dequeueMore();
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void RTPReceiver::Source::onPacketReceived(
-        uint16_t seq, const sp<ABuffer> &buffer) {
-    if (mFirst) {
-        buffer->setInt32Data(mCycles | seq);
-        queuePacket(buffer);
-
-        mFirst = false;
-        mBaseSeq = seq;
-        mMaxSeq = seq;
-        ++mReceived;
-        return;
-    }
-
-    uint16_t udelta = seq - mMaxSeq;
-
-    if (udelta < kMaxDropout) {
-        // In order, with permissible gap.
-
-        if (seq < mMaxSeq) {
-            // Sequence number wrapped - count another 64K cycle
-            mCycles += kRTPSeqMod;
-        }
-
-        mMaxSeq = seq;
-
-        ++mReceived;
-    } else if (udelta <= kRTPSeqMod - kMaxMisorder) {
-        // The sequence number made a very large jump
-        return;
-    } else {
-        // Duplicate or reordered packet.
-    }
-
-    buffer->setInt32Data(mCycles | seq);
-    queuePacket(buffer);
-}
-
-void RTPReceiver::Source::queuePacket(const sp<ABuffer> &packet) {
-    int32_t newExtendedSeqNo = packet->int32Data();
-
-    if (mFirstArrivalTimeUs < 0ll) {
-        mFirstArrivalTimeUs = ALooper::GetNowUs();
-
-        uint32_t rtpTime;
-        CHECK(packet->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
-
-        mFirstRTPTimeUs = (rtpTime * 100ll) / 9ll;
-    }
-
-    if (mAwaitingExtSeqNo >= 0 && newExtendedSeqNo < mAwaitingExtSeqNo) {
-        // We're no longer interested in these. They're old.
-        ALOGV("dropping stale extSeqNo %d", newExtendedSeqNo);
-
-        modifyPacketStatus(newExtendedSeqNo, STATUS_ARRIVED_LATE);
-        return;
-    }
-
-    if (mPackets.empty()) {
-        mPackets.push_back(packet);
-        dequeueMore();
-        return;
-    }
-
-    List<sp<ABuffer> >::iterator firstIt = mPackets.begin();
-    List<sp<ABuffer> >::iterator it = --mPackets.end();
-    for (;;) {
-        int32_t extendedSeqNo = (*it)->int32Data();
-
-        if (extendedSeqNo == newExtendedSeqNo) {
-            // Duplicate packet.
-            return;
-        }
-
-        if (extendedSeqNo < newExtendedSeqNo) {
-            // Insert new packet after the one at "it".
-            mPackets.insert(++it, packet);
-            break;
-        }
-
-        if (it == firstIt) {
-            // Insert new packet before the first existing one.
-            mPackets.insert(it, packet);
-            break;
-        }
-
-        --it;
-    }
-
-    dequeueMore();
-}
-
-void RTPReceiver::Source::dequeueMore() {
-    int64_t nowUs = ALooper::GetNowUs();
-    if (mNextReportTimeUs < 0ll || nowUs >= mNextReportTimeUs) {
-        if (mNextReportTimeUs >= 0ll) {
-            uint32_t expected = (mMaxSeq | mCycles) - mBaseSeq + 1;
-
-            uint32_t expectedInterval = expected - mExpectedPrior;
-            mExpectedPrior = expected;
-
-            uint32_t receivedInterval = mReceived - mReceivedPrior;
-            mReceivedPrior = mReceived;
-
-            int64_t lostInterval =
-                (int64_t)expectedInterval - (int64_t)receivedInterval;
-
-            int32_t declaredLostInterval =
-                mNumDeclaredLost - mNumDeclaredLostPrior;
-
-            mNumDeclaredLostPrior = mNumDeclaredLost;
-
-            if (declaredLostInterval > 0) {
-                ALOGI("lost %lld packets (%.2f %%), declared %d lost\n",
-                      lostInterval,
-                      100.0f * lostInterval / expectedInterval,
-                      declaredLostInterval);
-            }
-        }
-
-        mNextReportTimeUs = nowUs + kReportIntervalUs;
-
-#if TRACK_PACKET_LOSS
-        for (size_t i = 0; i < mLostPackets.size(); ++i) {
-            int32_t key = mLostPackets.keyAt(i);
-            uint32_t value = mLostPackets.valueAt(i);
-
-            AString status;
-            if (value & STATUS_REQUESTED_RETRANSMISSION) {
-                status.append("retrans ");
-            }
-            if (value & STATUS_ARRIVED_LATE) {
-                status.append("arrived-late ");
-            }
-            ALOGI("Packet %d declared lost %s", key, status.c_str());
-        }
-#endif
-    }
-
-    sp<ABuffer> packet;
-    while ((packet = getNextPacket()) != NULL) {
-        if (mDeclareLostTimerPending) {
-            cancelTimers();
-        }
-
-        CHECK_GE(mAwaitingExtSeqNo, 0);
-#if TRACK_PACKET_LOSS
-        mLostPackets.removeItem(mAwaitingExtSeqNo);
-#endif
-
-        int32_t packetType;
-        CHECK(packet->meta()->findInt32("PT", &packetType));
-
-        if (packetType != mActivePacketType) {
-            mActiveAssembler = mReceiver->makeAssembler(packetType);
-            mActivePacketType = packetType;
-        }
-
-        if (mActiveAssembler != NULL) {
-            status_t err = mActiveAssembler->processPacket(packet);
-            if (err != OK) {
-                ALOGV("assembler returned error %d", err);
-            }
-        }
-
-        ++mAwaitingExtSeqNo;
-    }
-
-    if (mDeclareLostTimerPending) {
-        return;
-    }
-
-    if (mPackets.empty()) {
-        return;
-    }
-
-    CHECK_GE(mAwaitingExtSeqNo, 0);
-
-    const sp<ABuffer> &firstPacket = *mPackets.begin();
-
-    uint32_t rtpTime;
-    CHECK(firstPacket->meta()->findInt32(
-                "rtp-time", (int32_t *)&rtpTime));
-
-
-    int64_t rtpUs = (rtpTime * 100ll) / 9ll;
-
-    int64_t maxArrivalTimeUs =
-        mFirstArrivalTimeUs + rtpUs - mFirstRTPTimeUs;
-
-    nowUs = ALooper::GetNowUs();
-
-    CHECK_LT(mAwaitingExtSeqNo, firstPacket->int32Data());
-
-    ALOGV("waiting for %d, comparing against %d, %lld us left",
-          mAwaitingExtSeqNo,
-          firstPacket->int32Data(),
-          maxArrivalTimeUs - nowUs);
-
-    postDeclareLostTimer(maxArrivalTimeUs + kPacketLostAfterUs);
-
-    if (kRequestRetransmissionAfterUs > 0ll) {
-        postRetransmitTimer(
-                maxArrivalTimeUs + kRequestRetransmissionAfterUs);
-    }
-}
-
-sp<ABuffer> RTPReceiver::Source::getNextPacket() {
-    if (mPackets.empty()) {
-        return NULL;
-    }
-
-    int32_t extSeqNo = (*mPackets.begin())->int32Data();
-
-    if (mAwaitingExtSeqNo < 0) {
-        mAwaitingExtSeqNo = extSeqNo;
-    } else if (extSeqNo != mAwaitingExtSeqNo) {
-        return NULL;
-    }
-
-    sp<ABuffer> packet = *mPackets.begin();
-    mPackets.erase(mPackets.begin());
-
-    return packet;
-}
-
-void RTPReceiver::Source::resync() {
-    mAwaitingExtSeqNo = -1;
-}
-
-void RTPReceiver::Source::addReportBlock(
-        uint32_t ssrc, const sp<ABuffer> &buf) {
-    uint32_t extMaxSeq = mMaxSeq | mCycles;
-    uint32_t expected = extMaxSeq - mBaseSeq + 1;
-
-    int64_t lost = (int64_t)expected - (int64_t)mReceived;
-    if (lost > 0x7fffff) {
-        lost = 0x7fffff;
-    } else if (lost < -0x800000) {
-        lost = -0x800000;
-    }
-
-    uint32_t expectedInterval = expected - mExpectedPrior;
-    mExpectedPrior = expected;
-
-    uint32_t receivedInterval = mReceived - mReceivedPrior;
-    mReceivedPrior = mReceived;
-
-    int64_t lostInterval = expectedInterval - receivedInterval;
-
-    uint8_t fractionLost;
-    if (expectedInterval == 0 || lostInterval <=0) {
-        fractionLost = 0;
-    } else {
-        fractionLost = (lostInterval << 8) / expectedInterval;
-    }
-
-    uint8_t *ptr = buf->data() + buf->size();
-
-    ptr[0] = ssrc >> 24;
-    ptr[1] = (ssrc >> 16) & 0xff;
-    ptr[2] = (ssrc >> 8) & 0xff;
-    ptr[3] = ssrc & 0xff;
-
-    ptr[4] = fractionLost;
-
-    ptr[5] = (lost >> 16) & 0xff;
-    ptr[6] = (lost >> 8) & 0xff;
-    ptr[7] = lost & 0xff;
-
-    ptr[8] = extMaxSeq >> 24;
-    ptr[9] = (extMaxSeq >> 16) & 0xff;
-    ptr[10] = (extMaxSeq >> 8) & 0xff;
-    ptr[11] = extMaxSeq & 0xff;
-
-    // XXX TODO:
-
-    ptr[12] = 0x00;  // interarrival jitter
-    ptr[13] = 0x00;
-    ptr[14] = 0x00;
-    ptr[15] = 0x00;
-
-    ptr[16] = 0x00;  // last SR
-    ptr[17] = 0x00;
-    ptr[18] = 0x00;
-    ptr[19] = 0x00;
-
-    ptr[20] = 0x00;  // delay since last SR
-    ptr[21] = 0x00;
-    ptr[22] = 0x00;
-    ptr[23] = 0x00;
-
-    buf->setRange(buf->offset(), buf->size() + 24);
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-RTPReceiver::RTPReceiver(
-        const sp<ANetworkSession> &netSession,
-        const sp<AMessage> &notify,
-        uint32_t flags)
-    : mNetSession(netSession),
-      mNotify(notify),
-      mFlags(flags),
-      mRTPMode(TRANSPORT_UNDEFINED),
-      mRTCPMode(TRANSPORT_UNDEFINED),
-      mRTPSessionID(0),
-      mRTCPSessionID(0),
-      mRTPConnected(false),
-      mRTCPConnected(false),
-      mRTPClientSessionID(0),
-      mRTCPClientSessionID(0) {
-}
-
-RTPReceiver::~RTPReceiver() {
-    if (mRTCPClientSessionID != 0) {
-        mNetSession->destroySession(mRTCPClientSessionID);
-        mRTCPClientSessionID = 0;
-    }
-
-    if (mRTPClientSessionID != 0) {
-        mNetSession->destroySession(mRTPClientSessionID);
-        mRTPClientSessionID = 0;
-    }
-
-    if (mRTCPSessionID != 0) {
-        mNetSession->destroySession(mRTCPSessionID);
-        mRTCPSessionID = 0;
-    }
-
-    if (mRTPSessionID != 0) {
-        mNetSession->destroySession(mRTPSessionID);
-        mRTPSessionID = 0;
-    }
-}
-
-status_t RTPReceiver::initAsync(
-        TransportMode rtpMode,
-        TransportMode rtcpMode,
-        int32_t *outLocalRTPPort) {
-    if (mRTPMode != TRANSPORT_UNDEFINED
-            || rtpMode == TRANSPORT_UNDEFINED
-            || rtpMode == TRANSPORT_NONE
-            || rtcpMode == TRANSPORT_UNDEFINED) {
-        return INVALID_OPERATION;
-    }
-
-    CHECK_NE(rtpMode, TRANSPORT_TCP_INTERLEAVED);
-    CHECK_NE(rtcpMode, TRANSPORT_TCP_INTERLEAVED);
-
-    sp<AMessage> rtpNotify = new AMessage(kWhatRTPNotify, id());
-
-    sp<AMessage> rtcpNotify;
-    if (rtcpMode != TRANSPORT_NONE) {
-        rtcpNotify = new AMessage(kWhatRTCPNotify, id());
-    }
-
-    CHECK_EQ(mRTPSessionID, 0);
-    CHECK_EQ(mRTCPSessionID, 0);
-
-    int32_t localRTPPort;
-
-    struct in_addr ifaceAddr;
-    ifaceAddr.s_addr = INADDR_ANY;
-
-    for (;;) {
-        localRTPPort = PickRandomRTPPort();
-
-        status_t err;
-        if (rtpMode == TRANSPORT_UDP) {
-            err = mNetSession->createUDPSession(
-                    localRTPPort,
-                    rtpNotify,
-                    &mRTPSessionID);
-        } else {
-            CHECK_EQ(rtpMode, TRANSPORT_TCP);
-            err = mNetSession->createTCPDatagramSession(
-                    ifaceAddr,
-                    localRTPPort,
-                    rtpNotify,
-                    &mRTPSessionID);
-        }
-
-        if (err != OK) {
-            continue;
-        }
-
-        if (rtcpMode == TRANSPORT_NONE) {
-            break;
-        } else if (rtcpMode == TRANSPORT_UDP) {
-            err = mNetSession->createUDPSession(
-                    localRTPPort + 1,
-                    rtcpNotify,
-                    &mRTCPSessionID);
-        } else {
-            CHECK_EQ(rtpMode, TRANSPORT_TCP);
-            err = mNetSession->createTCPDatagramSession(
-                    ifaceAddr,
-                    localRTPPort + 1,
-                    rtcpNotify,
-                    &mRTCPSessionID);
-        }
-
-        if (err == OK) {
-            break;
-        }
-
-        mNetSession->destroySession(mRTPSessionID);
-        mRTPSessionID = 0;
-    }
-
-    mRTPMode = rtpMode;
-    mRTCPMode = rtcpMode;
-    *outLocalRTPPort = localRTPPort;
-
-    return OK;
-}
-
-status_t RTPReceiver::connect(
-        const char *remoteHost, int32_t remoteRTPPort, int32_t remoteRTCPPort) {
-    status_t err;
-
-    if (mRTPMode == TRANSPORT_UDP) {
-        CHECK(!mRTPConnected);
-
-        err = mNetSession->connectUDPSession(
-                mRTPSessionID, remoteHost, remoteRTPPort);
-
-        if (err != OK) {
-            notifyInitDone(err);
-            return err;
-        }
-
-        ALOGI("connectUDPSession RTP successful.");
-
-        mRTPConnected = true;
-    }
-
-    if (mRTCPMode == TRANSPORT_UDP) {
-        CHECK(!mRTCPConnected);
-
-        err = mNetSession->connectUDPSession(
-                mRTCPSessionID, remoteHost, remoteRTCPPort);
-
-        if (err != OK) {
-            notifyInitDone(err);
-            return err;
-        }
-
-        scheduleSendRR();
-
-        ALOGI("connectUDPSession RTCP successful.");
-
-        mRTCPConnected = true;
-    }
-
-    if (mRTPConnected
-            && (mRTCPConnected || mRTCPMode == TRANSPORT_NONE)) {
-        notifyInitDone(OK);
-    }
-
-    return OK;
-}
-
-status_t RTPReceiver::informSender(const sp<AMessage> &params) {
-    if (!mRTCPConnected) {
-        return INVALID_OPERATION;
-    }
-
-    int64_t avgLatencyUs;
-    CHECK(params->findInt64("avgLatencyUs", &avgLatencyUs));
-
-    int64_t maxLatencyUs;
-    CHECK(params->findInt64("maxLatencyUs", &maxLatencyUs));
-
-    sp<ABuffer> buf = new ABuffer(28);
-
-    uint8_t *ptr = buf->data();
-    ptr[0] = 0x80 | 0;
-    ptr[1] = 204;  // APP
-    ptr[2] = 0;
-
-    CHECK((buf->size() % 4) == 0u);
-    ptr[3] = (buf->size() / 4) - 1;
-
-    ptr[4] = kSourceID >> 24;  // SSRC
-    ptr[5] = (kSourceID >> 16) & 0xff;
-    ptr[6] = (kSourceID >> 8) & 0xff;
-    ptr[7] = kSourceID & 0xff;
-    ptr[8] = 'l';
-    ptr[9] = 'a';
-    ptr[10] = 't';
-    ptr[11] = 'e';
-
-    ptr[12] = avgLatencyUs >> 56;
-    ptr[13] = (avgLatencyUs >> 48) & 0xff;
-    ptr[14] = (avgLatencyUs >> 40) & 0xff;
-    ptr[15] = (avgLatencyUs >> 32) & 0xff;
-    ptr[16] = (avgLatencyUs >> 24) & 0xff;
-    ptr[17] = (avgLatencyUs >> 16) & 0xff;
-    ptr[18] = (avgLatencyUs >> 8) & 0xff;
-    ptr[19] = avgLatencyUs & 0xff;
-
-    ptr[20] = maxLatencyUs >> 56;
-    ptr[21] = (maxLatencyUs >> 48) & 0xff;
-    ptr[22] = (maxLatencyUs >> 40) & 0xff;
-    ptr[23] = (maxLatencyUs >> 32) & 0xff;
-    ptr[24] = (maxLatencyUs >> 24) & 0xff;
-    ptr[25] = (maxLatencyUs >> 16) & 0xff;
-    ptr[26] = (maxLatencyUs >> 8) & 0xff;
-    ptr[27] = maxLatencyUs & 0xff;
-
-    mNetSession->sendRequest(mRTCPSessionID, buf->data(), buf->size());
-
-    return OK;
-}
-
-void RTPReceiver::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatRTPNotify:
-        case kWhatRTCPNotify:
-            onNetNotify(msg->what() == kWhatRTPNotify, msg);
-            break;
-
-        case kWhatSendRR:
-        {
-            onSendRR();
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void RTPReceiver::onNetNotify(bool isRTP, const sp<AMessage> &msg) {
-    int32_t reason;
-    CHECK(msg->findInt32("reason", &reason));
-
-    switch (reason) {
-        case ANetworkSession::kWhatError:
-        {
-            int32_t sessionID;
-            CHECK(msg->findInt32("sessionID", &sessionID));
-
-            int32_t err;
-            CHECK(msg->findInt32("err", &err));
-
-            int32_t errorOccuredDuringSend;
-            CHECK(msg->findInt32("send", &errorOccuredDuringSend));
-
-            AString detail;
-            CHECK(msg->findString("detail", &detail));
-
-            ALOGE("An error occurred during %s in session %d "
-                  "(%d, '%s' (%s)).",
-                  errorOccuredDuringSend ? "send" : "receive",
-                  sessionID,
-                  err,
-                  detail.c_str(),
-                  strerror(-err));
-
-            mNetSession->destroySession(sessionID);
-
-            if (sessionID == mRTPSessionID) {
-                mRTPSessionID = 0;
-            } else if (sessionID == mRTCPSessionID) {
-                mRTCPSessionID = 0;
-            } else if (sessionID == mRTPClientSessionID) {
-                mRTPClientSessionID = 0;
-            } else if (sessionID == mRTCPClientSessionID) {
-                mRTCPClientSessionID = 0;
-            }
-
-            if (!mRTPConnected
-                    || (mRTCPMode != TRANSPORT_NONE && !mRTCPConnected)) {
-                notifyInitDone(err);
-                break;
-            }
-
-            notifyError(err);
-            break;
-        }
-
-        case ANetworkSession::kWhatDatagram:
-        {
-            sp<ABuffer> data;
-            CHECK(msg->findBuffer("data", &data));
-
-            if (isRTP) {
-                if (mFlags & FLAG_AUTO_CONNECT) {
-                    AString fromAddr;
-                    CHECK(msg->findString("fromAddr", &fromAddr));
-
-                    int32_t fromPort;
-                    CHECK(msg->findInt32("fromPort", &fromPort));
-
-                    CHECK_EQ((status_t)OK,
-                             connect(
-                                 fromAddr.c_str(), fromPort, fromPort + 1));
-
-                    mFlags &= ~FLAG_AUTO_CONNECT;
-                }
-
-                onRTPData(data);
-            } else {
-                onRTCPData(data);
-            }
-            break;
-        }
-
-        case ANetworkSession::kWhatClientConnected:
-        {
-            int32_t sessionID;
-            CHECK(msg->findInt32("sessionID", &sessionID));
-
-            if (isRTP) {
-                CHECK_EQ(mRTPMode, TRANSPORT_TCP);
-
-                if (mRTPClientSessionID != 0) {
-                    // We only allow a single client connection.
-                    mNetSession->destroySession(sessionID);
-                    sessionID = 0;
-                    break;
-                }
-
-                mRTPClientSessionID = sessionID;
-                mRTPConnected = true;
-            } else {
-                CHECK_EQ(mRTCPMode, TRANSPORT_TCP);
-
-                if (mRTCPClientSessionID != 0) {
-                    // We only allow a single client connection.
-                    mNetSession->destroySession(sessionID);
-                    sessionID = 0;
-                    break;
-                }
-
-                mRTCPClientSessionID = sessionID;
-                mRTCPConnected = true;
-            }
-
-            if (mRTPConnected
-                    && (mRTCPConnected || mRTCPMode == TRANSPORT_NONE)) {
-                notifyInitDone(OK);
-            }
-            break;
-        }
-    }
-}
-
-void RTPReceiver::notifyInitDone(status_t err) {
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatInitDone);
-    notify->setInt32("err", err);
-    notify->post();
-}
-
-void RTPReceiver::notifyError(status_t err) {
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatError);
-    notify->setInt32("err", err);
-    notify->post();
-}
-
-void RTPReceiver::notifyPacketLost() {
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatPacketLost);
-    notify->post();
-}
-
-status_t RTPReceiver::onRTPData(const sp<ABuffer> &buffer) {
-    size_t size = buffer->size();
-    if (size < 12) {
-        // Too short to be a valid RTP header.
-        return ERROR_MALFORMED;
-    }
-
-    const uint8_t *data = buffer->data();
-
-    if ((data[0] >> 6) != 2) {
-        // Unsupported version.
-        return ERROR_UNSUPPORTED;
-    }
-
-    if (data[0] & 0x20) {
-        // Padding present.
-
-        size_t paddingLength = data[size - 1];
-
-        if (paddingLength + 12 > size) {
-            // If we removed this much padding we'd end up with something
-            // that's too short to be a valid RTP header.
-            return ERROR_MALFORMED;
-        }
-
-        size -= paddingLength;
-    }
-
-    int numCSRCs = data[0] & 0x0f;
-
-    size_t payloadOffset = 12 + 4 * numCSRCs;
-
-    if (size < payloadOffset) {
-        // Not enough data to fit the basic header and all the CSRC entries.
-        return ERROR_MALFORMED;
-    }
-
-    if (data[0] & 0x10) {
-        // Header eXtension present.
-
-        if (size < payloadOffset + 4) {
-            // Not enough data to fit the basic header, all CSRC entries
-            // and the first 4 bytes of the extension header.
-
-            return ERROR_MALFORMED;
-        }
-
-        const uint8_t *extensionData = &data[payloadOffset];
-
-        size_t extensionLength =
-            4 * (extensionData[2] << 8 | extensionData[3]);
-
-        if (size < payloadOffset + 4 + extensionLength) {
-            return ERROR_MALFORMED;
-        }
-
-        payloadOffset += 4 + extensionLength;
-    }
-
-    uint32_t srcId = U32_AT(&data[8]);
-    uint32_t rtpTime = U32_AT(&data[4]);
-    uint16_t seqNo = U16_AT(&data[2]);
-
-    sp<AMessage> meta = buffer->meta();
-    meta->setInt32("ssrc", srcId);
-    meta->setInt32("rtp-time", rtpTime);
-    meta->setInt32("PT", data[1] & 0x7f);
-    meta->setInt32("M", data[1] >> 7);
-
-    buffer->setRange(payloadOffset, size - payloadOffset);
-
-    ssize_t index = mSources.indexOfKey(srcId);
-    sp<Source> source;
-    if (index < 0) {
-        source = new Source(this, srcId);
-        looper()->registerHandler(source);
-
-        mSources.add(srcId, source);
-    } else {
-        source = mSources.valueAt(index);
-    }
-
-    source->onPacketReceived(seqNo, buffer);
-
-    return OK;
-}
-
-status_t RTPReceiver::onRTCPData(const sp<ABuffer> &data) {
-    ALOGI("onRTCPData");
-    return OK;
-}
-
-void RTPReceiver::addSDES(const sp<ABuffer> &buffer) {
-    uint8_t *data = buffer->data() + buffer->size();
-    data[0] = 0x80 | 1;
-    data[1] = 202;  // SDES
-    data[4] = kSourceID >> 24;  // SSRC
-    data[5] = (kSourceID >> 16) & 0xff;
-    data[6] = (kSourceID >> 8) & 0xff;
-    data[7] = kSourceID & 0xff;
-
-    size_t offset = 8;
-
-    data[offset++] = 1;  // CNAME
-
-    AString cname = "stagefright@somewhere";
-    data[offset++] = cname.size();
-
-    memcpy(&data[offset], cname.c_str(), cname.size());
-    offset += cname.size();
-
-    data[offset++] = 6;  // TOOL
-
-    AString tool = "stagefright/1.0";
-    data[offset++] = tool.size();
-
-    memcpy(&data[offset], tool.c_str(), tool.size());
-    offset += tool.size();
-
-    data[offset++] = 0;
-
-    if ((offset % 4) > 0) {
-        size_t count = 4 - (offset % 4);
-        switch (count) {
-            case 3:
-                data[offset++] = 0;
-            case 2:
-                data[offset++] = 0;
-            case 1:
-                data[offset++] = 0;
-        }
-    }
-
-    size_t numWords = (offset / 4) - 1;
-    data[2] = numWords >> 8;
-    data[3] = numWords & 0xff;
-
-    buffer->setRange(buffer->offset(), buffer->size() + offset);
-}
-
-void RTPReceiver::scheduleSendRR() {
-    (new AMessage(kWhatSendRR, id()))->post(5000000ll);
-}
-
-void RTPReceiver::onSendRR() {
-    sp<ABuffer> buf = new ABuffer(kMaxUDPPacketSize);
-    buf->setRange(0, 0);
-
-    uint8_t *ptr = buf->data();
-    ptr[0] = 0x80 | 0;
-    ptr[1] = 201;  // RR
-    ptr[2] = 0;
-    ptr[3] = 1;
-    ptr[4] = kSourceID >> 24;  // SSRC
-    ptr[5] = (kSourceID >> 16) & 0xff;
-    ptr[6] = (kSourceID >> 8) & 0xff;
-    ptr[7] = kSourceID & 0xff;
-
-    buf->setRange(0, 8);
-
-    size_t numReportBlocks = 0;
-    for (size_t i = 0; i < mSources.size(); ++i) {
-        uint32_t ssrc = mSources.keyAt(i);
-        sp<Source> source = mSources.valueAt(i);
-
-        if (numReportBlocks > 31 || buf->size() + 24 > buf->capacity()) {
-            // Cannot fit another report block.
-            break;
-        }
-
-        source->addReportBlock(ssrc, buf);
-        ++numReportBlocks;
-    }
-
-    ptr[0] |= numReportBlocks;  // 5 bit
-
-    size_t sizeInWordsMinus1 = 1 + 6 * numReportBlocks;
-    ptr[2] = sizeInWordsMinus1 >> 8;
-    ptr[3] = sizeInWordsMinus1 & 0xff;
-
-    buf->setRange(0, (sizeInWordsMinus1 + 1) * 4);
-
-    addSDES(buf);
-
-    mNetSession->sendRequest(mRTCPSessionID, buf->data(), buf->size());
-
-    scheduleSendRR();
-}
-
-status_t RTPReceiver::registerPacketType(
-        uint8_t packetType, PacketizationMode mode) {
-    mPacketTypes.add(packetType, mode);
-
-    return OK;
-}
-
-sp<RTPReceiver::Assembler> RTPReceiver::makeAssembler(uint8_t packetType) {
-    ssize_t index = mPacketTypes.indexOfKey(packetType);
-    if (index < 0) {
-        return NULL;
-    }
-
-    PacketizationMode mode = mPacketTypes.valueAt(index);
-
-    switch (mode) {
-        case PACKETIZATION_NONE:
-        case PACKETIZATION_TRANSPORT_STREAM:
-            return new TSAssembler(mNotify);
-
-        case PACKETIZATION_H264:
-            return new H264Assembler(mNotify);
-
-        default:
-            return NULL;
-    }
-}
-
-void RTPReceiver::requestRetransmission(uint32_t senderSSRC, int32_t extSeqNo) {
-    int32_t blp = 0;
-
-    sp<ABuffer> buf = new ABuffer(16);
-    buf->setRange(0, 0);
-
-    uint8_t *ptr = buf->data();
-    ptr[0] = 0x80 | 1;  // generic NACK
-    ptr[1] = 205;  // TSFB
-    ptr[2] = 0;
-    ptr[3] = 3;
-    ptr[8] = (senderSSRC >> 24) & 0xff;
-    ptr[9] = (senderSSRC >> 16) & 0xff;
-    ptr[10] = (senderSSRC >> 8) & 0xff;
-    ptr[11] = (senderSSRC & 0xff);
-    ptr[8] = (kSourceID >> 24) & 0xff;
-    ptr[9] = (kSourceID >> 16) & 0xff;
-    ptr[10] = (kSourceID >> 8) & 0xff;
-    ptr[11] = (kSourceID & 0xff);
-    ptr[12] = (extSeqNo >> 8) & 0xff;
-    ptr[13] = (extSeqNo & 0xff);
-    ptr[14] = (blp >> 8) & 0xff;
-    ptr[15] = (blp & 0xff);
-
-    buf->setRange(0, 16);
-
-     mNetSession->sendRequest(mRTCPSessionID, buf->data(), buf->size());
-}
-
-void RTPReceiver::Source::modifyPacketStatus(int32_t extSeqNo, uint32_t mask) {
-#if TRACK_PACKET_LOSS
-    ssize_t index = mLostPackets.indexOfKey(extSeqNo);
-    if (index < 0) {
-        mLostPackets.add(extSeqNo, mask);
-    } else {
-        mLostPackets.editValueAt(index) |= mask;
-    }
-#endif
-}
-
-void RTPReceiver::Source::postRetransmitTimer(int64_t timeUs) {
-    int64_t delayUs = timeUs - ALooper::GetNowUs();
-    sp<AMessage> msg = new AMessage(kWhatRetransmit, id());
-    msg->setInt32("generation", mRetransmitGeneration);
-    msg->post(delayUs);
-}
-
-void RTPReceiver::Source::postDeclareLostTimer(int64_t timeUs) {
-    CHECK(!mDeclareLostTimerPending);
-    mDeclareLostTimerPending = true;
-
-    int64_t delayUs = timeUs - ALooper::GetNowUs();
-    sp<AMessage> msg = new AMessage(kWhatDeclareLost, id());
-    msg->setInt32("generation", mDeclareLostGeneration);
-    msg->post(delayUs);
-}
-
-void RTPReceiver::Source::cancelTimers() {
-    ++mRetransmitGeneration;
-    ++mDeclareLostGeneration;
-    mDeclareLostTimerPending = false;
-}
-
-}  // namespace android
-
diff --git a/media/libstagefright/wifi-display/rtp/RTPReceiver.h b/media/libstagefright/wifi-display/rtp/RTPReceiver.h
deleted file mode 100644
index 240ab2e..0000000
--- a/media/libstagefright/wifi-display/rtp/RTPReceiver.h
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef RTP_RECEIVER_H_
-
-#define RTP_RECEIVER_H_
-
-#include "RTPBase.h"
-
-#include <media/stagefright/foundation/AHandler.h>
-
-namespace android {
-
-struct ABuffer;
-struct ANetworkSession;
-
-// An object of this class facilitates receiving of media data on an RTP
-// channel. The channel is established over a UDP or TCP connection depending
-// on which "TransportMode" was chosen. In addition different RTP packetization
-// schemes are supported such as "Transport Stream Packets over RTP",
-// or "AVC/H.264 encapsulation as specified in RFC 3984 (non-interleaved mode)"
-struct RTPReceiver : public RTPBase, public AHandler {
-    enum {
-        kWhatInitDone,
-        kWhatError,
-        kWhatAccessUnit,
-        kWhatPacketLost,
-    };
-
-    enum Flags {
-        FLAG_AUTO_CONNECT = 1,
-    };
-    RTPReceiver(
-            const sp<ANetworkSession> &netSession,
-            const sp<AMessage> &notify,
-            uint32_t flags = 0);
-
-    status_t registerPacketType(
-            uint8_t packetType, PacketizationMode mode);
-
-    status_t initAsync(
-            TransportMode rtpMode,
-            TransportMode rtcpMode,
-            int32_t *outLocalRTPPort);
-
-    status_t connect(
-            const char *remoteHost,
-            int32_t remoteRTPPort,
-            int32_t remoteRTCPPort);
-
-    status_t informSender(const sp<AMessage> &params);
-
-protected:
-    virtual ~RTPReceiver();
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-
-private:
-    enum {
-        kWhatRTPNotify,
-        kWhatRTCPNotify,
-        kWhatSendRR,
-    };
-
-    enum {
-        kSourceID                       = 0xdeadbeef,
-        kPacketLostAfterUs              = 100000,
-        kRequestRetransmissionAfterUs   = -1,
-    };
-
-    struct Assembler;
-    struct H264Assembler;
-    struct Source;
-    struct TSAssembler;
-
-    sp<ANetworkSession> mNetSession;
-    sp<AMessage> mNotify;
-    uint32_t mFlags;
-    TransportMode mRTPMode;
-    TransportMode mRTCPMode;
-    int32_t mRTPSessionID;
-    int32_t mRTCPSessionID;
-    bool mRTPConnected;
-    bool mRTCPConnected;
-
-    int32_t mRTPClientSessionID;  // in TRANSPORT_TCP mode.
-    int32_t mRTCPClientSessionID;  // in TRANSPORT_TCP mode.
-
-    KeyedVector<uint8_t, PacketizationMode> mPacketTypes;
-    KeyedVector<uint32_t, sp<Source> > mSources;
-
-    void onNetNotify(bool isRTP, const sp<AMessage> &msg);
-    status_t onRTPData(const sp<ABuffer> &data);
-    status_t onRTCPData(const sp<ABuffer> &data);
-    void onSendRR();
-
-    void scheduleSendRR();
-    void addSDES(const sp<ABuffer> &buffer);
-
-    void notifyInitDone(status_t err);
-    void notifyError(status_t err);
-    void notifyPacketLost();
-
-    sp<Assembler> makeAssembler(uint8_t packetType);
-
-    void requestRetransmission(uint32_t senderSSRC, int32_t extSeqNo);
-
-    DISALLOW_EVIL_CONSTRUCTORS(RTPReceiver);
-};
-
-}  // namespace android
-
-#endif  // RTP_RECEIVER_H_
diff --git a/media/libstagefright/wifi-display/rtptest.cpp b/media/libstagefright/wifi-display/rtptest.cpp
deleted file mode 100644
index b902f29..0000000
--- a/media/libstagefright/wifi-display/rtptest.cpp
+++ /dev/null
@@ -1,565 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NEBUG 0
-#define LOG_TAG "rtptest"
-#include <utils/Log.h>
-
-#include "rtp/RTPSender.h"
-#include "rtp/RTPReceiver.h"
-#include "TimeSyncer.h"
-
-#include <binder/ProcessState.h>
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AHandler.h>
-#include <media/stagefright/foundation/ALooper.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/ANetworkSession.h>
-#include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/DataSource.h>
-#include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/NuMediaExtractor.h>
-#include <media/stagefright/Utils.h>
-
-#define MEDIA_FILENAME "/sdcard/Frame Counter HD 30FPS_1080p.mp4"
-
-namespace android {
-
-struct PacketSource : public RefBase {
-    PacketSource() {}
-
-    virtual sp<ABuffer> getNextAccessUnit() = 0;
-
-protected:
-    virtual ~PacketSource() {}
-
-private:
-    DISALLOW_EVIL_CONSTRUCTORS(PacketSource);
-};
-
-struct MediaPacketSource : public PacketSource {
-    MediaPacketSource()
-        : mMaxSampleSize(1024 * 1024) {
-        mExtractor = new NuMediaExtractor;
-        CHECK_EQ((status_t)OK,
-                 mExtractor->setDataSource(MEDIA_FILENAME));
-
-        bool haveVideo = false;
-        for (size_t i = 0; i < mExtractor->countTracks(); ++i) {
-            sp<AMessage> format;
-            CHECK_EQ((status_t)OK, mExtractor->getTrackFormat(i, &format));
-
-            AString mime;
-            CHECK(format->findString("mime", &mime));
-
-            if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime.c_str())) {
-                mExtractor->selectTrack(i);
-                haveVideo = true;
-                break;
-            }
-        }
-
-        CHECK(haveVideo);
-    }
-
-    virtual sp<ABuffer> getNextAccessUnit() {
-        int64_t timeUs;
-        status_t err = mExtractor->getSampleTime(&timeUs);
-
-        if (err != OK) {
-            return NULL;
-        }
-
-        sp<ABuffer> accessUnit = new ABuffer(mMaxSampleSize);
-        CHECK_EQ((status_t)OK, mExtractor->readSampleData(accessUnit));
-
-        accessUnit->meta()->setInt64("timeUs", timeUs);
-
-        CHECK_EQ((status_t)OK, mExtractor->advance());
-
-        return accessUnit;
-    }
-
-protected:
-    virtual ~MediaPacketSource() {
-    }
-
-private:
-    sp<NuMediaExtractor> mExtractor;
-    size_t mMaxSampleSize;
-
-    DISALLOW_EVIL_CONSTRUCTORS(MediaPacketSource);
-};
-
-struct SimplePacketSource : public PacketSource {
-    SimplePacketSource()
-        : mCounter(0) {
-    }
-
-    virtual sp<ABuffer> getNextAccessUnit() {
-        sp<ABuffer> buffer = new ABuffer(4);
-        uint8_t *dst = buffer->data();
-        dst[0] = mCounter >> 24;
-        dst[1] = (mCounter >> 16) & 0xff;
-        dst[2] = (mCounter >> 8) & 0xff;
-        dst[3] = mCounter & 0xff;
-
-        buffer->meta()->setInt64("timeUs", mCounter * 1000000ll / kFrameRate);
-
-        ++mCounter;
-
-        return buffer;
-    }
-
-protected:
-    virtual ~SimplePacketSource() {
-    }
-
-private:
-    enum {
-        kFrameRate = 30
-    };
-
-    uint32_t mCounter;
-
-    DISALLOW_EVIL_CONSTRUCTORS(SimplePacketSource);
-};
-
-struct TestHandler : public AHandler {
-    TestHandler(const sp<ANetworkSession> &netSession);
-
-    void listen();
-    void connect(const char *host, int32_t port);
-
-protected:
-    virtual ~TestHandler();
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-
-private:
-    enum {
-        kWhatListen,
-        kWhatConnect,
-        kWhatReceiverNotify,
-        kWhatSenderNotify,
-        kWhatSendMore,
-        kWhatStop,
-        kWhatTimeSyncerNotify,
-    };
-
-#if 1
-    static const RTPBase::TransportMode kRTPMode = RTPBase::TRANSPORT_UDP;
-    static const RTPBase::TransportMode kRTCPMode = RTPBase::TRANSPORT_UDP;
-#else
-    static const RTPBase::TransportMode kRTPMode = RTPBase::TRANSPORT_TCP;
-    static const RTPBase::TransportMode kRTCPMode = RTPBase::TRANSPORT_NONE;
-#endif
-
-#if 1
-    static const RTPBase::PacketizationMode kPacketizationMode
-        = RTPBase::PACKETIZATION_H264;
-#else
-    static const RTPBase::PacketizationMode kPacketizationMode
-        = RTPBase::PACKETIZATION_NONE;
-#endif
-
-    sp<ANetworkSession> mNetSession;
-    sp<PacketSource> mSource;
-    sp<RTPSender> mSender;
-    sp<RTPReceiver> mReceiver;
-
-    sp<TimeSyncer> mTimeSyncer;
-    bool mTimeSyncerStarted;
-
-    int64_t mFirstTimeRealUs;
-    int64_t mFirstTimeMediaUs;
-
-    int64_t mTimeOffsetUs;
-    bool mTimeOffsetValid;
-
-    status_t readMore();
-
-    DISALLOW_EVIL_CONSTRUCTORS(TestHandler);
-};
-
-TestHandler::TestHandler(const sp<ANetworkSession> &netSession)
-    : mNetSession(netSession),
-      mTimeSyncerStarted(false),
-      mFirstTimeRealUs(-1ll),
-      mFirstTimeMediaUs(-1ll),
-      mTimeOffsetUs(-1ll),
-      mTimeOffsetValid(false) {
-}
-
-TestHandler::~TestHandler() {
-}
-
-void TestHandler::listen() {
-    sp<AMessage> msg = new AMessage(kWhatListen, id());
-    msg->post();
-}
-
-void TestHandler::connect(const char *host, int32_t port) {
-    sp<AMessage> msg = new AMessage(kWhatConnect, id());
-    msg->setString("host", host);
-    msg->setInt32("port", port);
-    msg->post();
-}
-
-static void dumpDelay(int64_t delayMs) {
-    static const int64_t kMinDelayMs = 0;
-    static const int64_t kMaxDelayMs = 300;
-
-    const char *kPattern = "########################################";
-    size_t kPatternSize = strlen(kPattern);
-
-    int n = (kPatternSize * (delayMs - kMinDelayMs))
-                / (kMaxDelayMs - kMinDelayMs);
-
-    if (n < 0) {
-        n = 0;
-    } else if ((size_t)n > kPatternSize) {
-        n = kPatternSize;
-    }
-
-    ALOGI("(%4lld ms) %s\n",
-          delayMs,
-          kPattern + kPatternSize - n);
-}
-
-void TestHandler::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatListen:
-        {
-            sp<AMessage> notify = new AMessage(kWhatTimeSyncerNotify, id());
-            mTimeSyncer = new TimeSyncer(mNetSession, notify);
-            looper()->registerHandler(mTimeSyncer);
-
-            notify = new AMessage(kWhatReceiverNotify, id());
-            mReceiver = new RTPReceiver(
-                    mNetSession, notify, RTPReceiver::FLAG_AUTO_CONNECT);
-            looper()->registerHandler(mReceiver);
-
-            CHECK_EQ((status_t)OK,
-                     mReceiver->registerPacketType(33, kPacketizationMode));
-
-            int32_t receiverRTPPort;
-            CHECK_EQ((status_t)OK,
-                     mReceiver->initAsync(
-                         kRTPMode,
-                         kRTCPMode,
-                         &receiverRTPPort));
-
-            printf("picked receiverRTPPort %d\n", receiverRTPPort);
-
-#if 0
-            CHECK_EQ((status_t)OK,
-                     mReceiver->connect(
-                         "127.0.0.1", senderRTPPort, senderRTPPort + 1));
-#endif
-            break;
-        }
-
-        case kWhatConnect:
-        {
-            AString host;
-            CHECK(msg->findString("host", &host));
-
-            sp<AMessage> notify = new AMessage(kWhatTimeSyncerNotify, id());
-            mTimeSyncer = new TimeSyncer(mNetSession, notify);
-            looper()->registerHandler(mTimeSyncer);
-            mTimeSyncer->startServer(8123);
-
-            int32_t receiverRTPPort;
-            CHECK(msg->findInt32("port", &receiverRTPPort));
-
-#if 1
-            mSource = new MediaPacketSource;
-#else
-            mSource = new SimplePacketSource;
-#endif
-
-            notify = new AMessage(kWhatSenderNotify, id());
-            mSender = new RTPSender(mNetSession, notify);
-
-            looper()->registerHandler(mSender);
-
-            int32_t senderRTPPort;
-            CHECK_EQ((status_t)OK,
-                     mSender->initAsync(
-                         host.c_str(),
-                         receiverRTPPort,
-                         kRTPMode,
-                         kRTCPMode == RTPBase::TRANSPORT_NONE
-                            ? -1 : receiverRTPPort + 1,
-                         kRTCPMode,
-                         &senderRTPPort));
-
-            printf("picked senderRTPPort %d\n", senderRTPPort);
-            break;
-        }
-
-        case kWhatSenderNotify:
-        {
-            ALOGI("kWhatSenderNotify");
-
-            int32_t what;
-            CHECK(msg->findInt32("what", &what));
-
-            switch (what) {
-                case RTPSender::kWhatInitDone:
-                {
-                    int32_t err;
-                    CHECK(msg->findInt32("err", &err));
-
-                    ALOGI("RTPSender::initAsync completed w/ err %d", err);
-
-                    if (err == OK) {
-                        err = readMore();
-
-                        if (err != OK) {
-                            (new AMessage(kWhatStop, id()))->post();
-                        }
-                    }
-                    break;
-                }
-
-                case RTPSender::kWhatError:
-                    break;
-            }
-            break;
-        }
-
-        case kWhatReceiverNotify:
-        {
-            ALOGV("kWhatReceiverNotify");
-
-            int32_t what;
-            CHECK(msg->findInt32("what", &what));
-
-            switch (what) {
-                case RTPReceiver::kWhatInitDone:
-                {
-                    int32_t err;
-                    CHECK(msg->findInt32("err", &err));
-
-                    ALOGI("RTPReceiver::initAsync completed w/ err %d", err);
-                    break;
-                }
-
-                case RTPReceiver::kWhatError:
-                    break;
-
-                case RTPReceiver::kWhatAccessUnit:
-                {
-#if 0
-                    if (!mTimeSyncerStarted) {
-                        mTimeSyncer->startClient("172.18.41.216", 8123);
-                        mTimeSyncerStarted = true;
-                    }
-
-                    sp<ABuffer> accessUnit;
-                    CHECK(msg->findBuffer("accessUnit", &accessUnit));
-
-                    int64_t timeUs;
-                    CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
-
-                    if (mTimeOffsetValid) {
-                        timeUs -= mTimeOffsetUs;
-                        int64_t nowUs = ALooper::GetNowUs();
-                        int64_t delayMs = (nowUs - timeUs) / 1000ll;
-
-                        dumpDelay(delayMs);
-                    }
-#endif
-                    break;
-                }
-
-                case RTPReceiver::kWhatPacketLost:
-                    ALOGV("kWhatPacketLost");
-                    break;
-
-                default:
-                    TRESPASS();
-            }
-            break;
-        }
-
-        case kWhatSendMore:
-        {
-            sp<ABuffer> accessUnit;
-            CHECK(msg->findBuffer("accessUnit", &accessUnit));
-
-            CHECK_EQ((status_t)OK,
-                     mSender->queueBuffer(
-                         accessUnit,
-                         33,
-                         kPacketizationMode));
-
-            status_t err = readMore();
-
-            if (err != OK) {
-                (new AMessage(kWhatStop, id()))->post();
-            }
-            break;
-        }
-
-        case kWhatStop:
-        {
-            if (mReceiver != NULL) {
-                looper()->unregisterHandler(mReceiver->id());
-                mReceiver.clear();
-            }
-
-            if (mSender != NULL) {
-                looper()->unregisterHandler(mSender->id());
-                mSender.clear();
-            }
-
-            mSource.clear();
-
-            looper()->stop();
-            break;
-        }
-
-        case kWhatTimeSyncerNotify:
-        {
-            CHECK(msg->findInt64("offset", &mTimeOffsetUs));
-            mTimeOffsetValid = true;
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-status_t TestHandler::readMore() {
-    sp<ABuffer> accessUnit = mSource->getNextAccessUnit();
-
-    if (accessUnit == NULL) {
-        return ERROR_END_OF_STREAM;
-    }
-
-    int64_t timeUs;
-    CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
-
-    int64_t nowUs = ALooper::GetNowUs();
-    int64_t whenUs;
-
-    if (mFirstTimeRealUs < 0ll) {
-        mFirstTimeRealUs = whenUs = nowUs;
-        mFirstTimeMediaUs = timeUs;
-    } else {
-        whenUs = mFirstTimeRealUs + timeUs - mFirstTimeMediaUs;
-    }
-
-    accessUnit->meta()->setInt64("timeUs", whenUs);
-
-    sp<AMessage> msg = new AMessage(kWhatSendMore, id());
-    msg->setBuffer("accessUnit", accessUnit);
-    msg->post(whenUs - nowUs);
-
-    return OK;
-}
-
-}  // namespace android
-
-static void usage(const char *me) {
-    fprintf(stderr,
-            "usage: %s -c host:port\tconnect to remote host\n"
-            "               -l       \tlisten\n",
-            me);
-}
-
-int main(int argc, char **argv) {
-    using namespace android;
-
-    // srand(time(NULL));
-
-    ProcessState::self()->startThreadPool();
-
-    DataSource::RegisterDefaultSniffers();
-
-    bool listen = false;
-    int32_t connectToPort = -1;
-    AString connectToHost;
-
-    int res;
-    while ((res = getopt(argc, argv, "hc:l")) >= 0) {
-        switch (res) {
-            case 'c':
-            {
-                const char *colonPos = strrchr(optarg, ':');
-
-                if (colonPos == NULL) {
-                    usage(argv[0]);
-                    exit(1);
-                }
-
-                connectToHost.setTo(optarg, colonPos - optarg);
-
-                char *end;
-                connectToPort = strtol(colonPos + 1, &end, 10);
-
-                if (*end != '\0' || end == colonPos + 1
-                        || connectToPort < 1 || connectToPort > 65535) {
-                    fprintf(stderr, "Illegal port specified.\n");
-                    exit(1);
-                }
-                break;
-            }
-
-            case 'l':
-            {
-                listen = true;
-                break;
-            }
-
-            case '?':
-            case 'h':
-                usage(argv[0]);
-                exit(1);
-        }
-    }
-
-    if (!listen && connectToPort < 0) {
-        fprintf(stderr,
-                "You need to select either client or server mode.\n");
-        exit(1);
-    }
-
-    sp<ANetworkSession> netSession = new ANetworkSession;
-    netSession->start();
-
-    sp<ALooper> looper = new ALooper;
-
-    sp<TestHandler> handler = new TestHandler(netSession);
-    looper->registerHandler(handler);
-
-    if (listen) {
-        handler->listen();
-    }
-
-    if (connectToPort >= 0) {
-        handler->connect(connectToHost.c_str(), connectToPort);
-    }
-
-    looper->start(true /* runOnCallingThread */);
-
-    return 0;
-}
-
diff --git a/media/libstagefright/wifi-display/sink/DirectRenderer.cpp b/media/libstagefright/wifi-display/sink/DirectRenderer.cpp
deleted file mode 100644
index cdb2267..0000000
--- a/media/libstagefright/wifi-display/sink/DirectRenderer.cpp
+++ /dev/null
@@ -1,653 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "DirectRenderer"
-#include <utils/Log.h>
-
-#include "DirectRenderer.h"
-
-#include <gui/SurfaceComposerClient.h>
-#include <gui/Surface.h>
-#include <media/AudioTrack.h>
-#include <media/ICrypto.h>
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/MediaCodec.h>
-#include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MediaErrors.h>
-
-namespace android {
-
-/*
-   Drives the decoding process using a MediaCodec instance. Input buffers
-   queued by calls to "queueInputBuffer" are fed to the decoder as soon
-   as the decoder is ready for them, the client is notified about output
-   buffers as the decoder spits them out.
-*/
-struct DirectRenderer::DecoderContext : public AHandler {
-    enum {
-        kWhatOutputBufferReady,
-    };
-    DecoderContext(const sp<AMessage> &notify);
-
-    status_t init(
-            const sp<AMessage> &format,
-            const sp<IGraphicBufferProducer> &surfaceTex);
-
-    void queueInputBuffer(const sp<ABuffer> &accessUnit);
-
-    status_t renderOutputBufferAndRelease(size_t index);
-    status_t releaseOutputBuffer(size_t index);
-
-protected:
-    virtual ~DecoderContext();
-
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-
-private:
-    enum {
-        kWhatDecoderNotify,
-    };
-
-    sp<AMessage> mNotify;
-    sp<ALooper> mDecoderLooper;
-    sp<MediaCodec> mDecoder;
-    Vector<sp<ABuffer> > mDecoderInputBuffers;
-    Vector<sp<ABuffer> > mDecoderOutputBuffers;
-    List<size_t> mDecoderInputBuffersAvailable;
-    bool mDecoderNotificationPending;
-
-    List<sp<ABuffer> > mAccessUnits;
-
-    void onDecoderNotify();
-    void scheduleDecoderNotification();
-    void queueDecoderInputBuffers();
-
-    void queueOutputBuffer(
-            size_t index, int64_t timeUs, const sp<ABuffer> &buffer);
-
-    DISALLOW_EVIL_CONSTRUCTORS(DecoderContext);
-};
-
-////////////////////////////////////////////////////////////////////////////////
-
-/*
-   A "push" audio renderer. The primary function of this renderer is to use
-   an AudioTrack in push mode and making sure not to block the event loop
-   be ensuring that calls to AudioTrack::write never block. This is done by
-   estimating an upper bound of data that can be written to the AudioTrack
-   buffer without delay.
-*/
-struct DirectRenderer::AudioRenderer : public AHandler {
-    AudioRenderer(const sp<DecoderContext> &decoderContext);
-
-    void queueInputBuffer(
-            size_t index, int64_t timeUs, const sp<ABuffer> &buffer);
-
-protected:
-    virtual ~AudioRenderer();
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-
-private:
-    enum {
-        kWhatPushAudio,
-    };
-
-    struct BufferInfo {
-        size_t mIndex;
-        int64_t mTimeUs;
-        sp<ABuffer> mBuffer;
-    };
-
-    sp<DecoderContext> mDecoderContext;
-    sp<AudioTrack> mAudioTrack;
-
-    List<BufferInfo> mInputBuffers;
-    bool mPushPending;
-
-    size_t mNumFramesWritten;
-
-    void schedulePushIfNecessary();
-    void onPushAudio();
-
-    ssize_t writeNonBlocking(const uint8_t *data, size_t size);
-
-    DISALLOW_EVIL_CONSTRUCTORS(AudioRenderer);
-};
-
-////////////////////////////////////////////////////////////////////////////////
-
-DirectRenderer::DecoderContext::DecoderContext(const sp<AMessage> &notify)
-    : mNotify(notify),
-      mDecoderNotificationPending(false) {
-}
-
-DirectRenderer::DecoderContext::~DecoderContext() {
-    if (mDecoder != NULL) {
-        mDecoder->release();
-        mDecoder.clear();
-
-        mDecoderLooper->stop();
-        mDecoderLooper.clear();
-    }
-}
-
-status_t DirectRenderer::DecoderContext::init(
-        const sp<AMessage> &format,
-        const sp<IGraphicBufferProducer> &surfaceTex) {
-    CHECK(mDecoder == NULL);
-
-    AString mime;
-    CHECK(format->findString("mime", &mime));
-
-    mDecoderLooper = new ALooper;
-    mDecoderLooper->setName("video codec looper");
-
-    mDecoderLooper->start(
-            false /* runOnCallingThread */,
-            false /* canCallJava */,
-            PRIORITY_DEFAULT);
-
-    mDecoder = MediaCodec::CreateByType(
-            mDecoderLooper, mime.c_str(), false /* encoder */);
-
-    CHECK(mDecoder != NULL);
-
-    status_t err = mDecoder->configure(
-            format,
-            surfaceTex == NULL
-                ? NULL : new Surface(surfaceTex),
-            NULL /* crypto */,
-            0 /* flags */);
-    CHECK_EQ(err, (status_t)OK);
-
-    err = mDecoder->start();
-    CHECK_EQ(err, (status_t)OK);
-
-    err = mDecoder->getInputBuffers(
-            &mDecoderInputBuffers);
-    CHECK_EQ(err, (status_t)OK);
-
-    err = mDecoder->getOutputBuffers(
-            &mDecoderOutputBuffers);
-    CHECK_EQ(err, (status_t)OK);
-
-    scheduleDecoderNotification();
-
-    return OK;
-}
-
-void DirectRenderer::DecoderContext::queueInputBuffer(
-        const sp<ABuffer> &accessUnit) {
-    CHECK(mDecoder != NULL);
-
-    mAccessUnits.push_back(accessUnit);
-    queueDecoderInputBuffers();
-}
-
-status_t DirectRenderer::DecoderContext::renderOutputBufferAndRelease(
-        size_t index) {
-    return mDecoder->renderOutputBufferAndRelease(index);
-}
-
-status_t DirectRenderer::DecoderContext::releaseOutputBuffer(size_t index) {
-    return mDecoder->releaseOutputBuffer(index);
-}
-
-void DirectRenderer::DecoderContext::queueDecoderInputBuffers() {
-    if (mDecoder == NULL) {
-        return;
-    }
-
-    bool submittedMore = false;
-
-    while (!mAccessUnits.empty()
-            && !mDecoderInputBuffersAvailable.empty()) {
-        size_t index = *mDecoderInputBuffersAvailable.begin();
-
-        mDecoderInputBuffersAvailable.erase(
-                mDecoderInputBuffersAvailable.begin());
-
-        sp<ABuffer> srcBuffer = *mAccessUnits.begin();
-        mAccessUnits.erase(mAccessUnits.begin());
-
-        const sp<ABuffer> &dstBuffer =
-            mDecoderInputBuffers.itemAt(index);
-
-        memcpy(dstBuffer->data(), srcBuffer->data(), srcBuffer->size());
-
-        int64_t timeUs;
-        CHECK(srcBuffer->meta()->findInt64("timeUs", &timeUs));
-
-        status_t err = mDecoder->queueInputBuffer(
-                index,
-                0 /* offset */,
-                srcBuffer->size(),
-                timeUs,
-                0 /* flags */);
-        CHECK_EQ(err, (status_t)OK);
-
-        submittedMore = true;
-    }
-
-    if (submittedMore) {
-        scheduleDecoderNotification();
-    }
-}
-
-void DirectRenderer::DecoderContext::onMessageReceived(
-        const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatDecoderNotify:
-        {
-            onDecoderNotify();
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void DirectRenderer::DecoderContext::onDecoderNotify() {
-    mDecoderNotificationPending = false;
-
-    for (;;) {
-        size_t index;
-        status_t err = mDecoder->dequeueInputBuffer(&index);
-
-        if (err == OK) {
-            mDecoderInputBuffersAvailable.push_back(index);
-        } else if (err == -EAGAIN) {
-            break;
-        } else {
-            TRESPASS();
-        }
-    }
-
-    queueDecoderInputBuffers();
-
-    for (;;) {
-        size_t index;
-        size_t offset;
-        size_t size;
-        int64_t timeUs;
-        uint32_t flags;
-        status_t err = mDecoder->dequeueOutputBuffer(
-                &index,
-                &offset,
-                &size,
-                &timeUs,
-                &flags);
-
-        if (err == OK) {
-            queueOutputBuffer(
-                    index, timeUs, mDecoderOutputBuffers.itemAt(index));
-        } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
-            err = mDecoder->getOutputBuffers(
-                    &mDecoderOutputBuffers);
-            CHECK_EQ(err, (status_t)OK);
-        } else if (err == INFO_FORMAT_CHANGED) {
-            // We don't care.
-        } else if (err == -EAGAIN) {
-            break;
-        } else {
-            TRESPASS();
-        }
-    }
-
-    scheduleDecoderNotification();
-}
-
-void DirectRenderer::DecoderContext::scheduleDecoderNotification() {
-    if (mDecoderNotificationPending) {
-        return;
-    }
-
-    sp<AMessage> notify =
-        new AMessage(kWhatDecoderNotify, id());
-
-    mDecoder->requestActivityNotification(notify);
-    mDecoderNotificationPending = true;
-}
-
-void DirectRenderer::DecoderContext::queueOutputBuffer(
-        size_t index, int64_t timeUs, const sp<ABuffer> &buffer) {
-    sp<AMessage> msg = mNotify->dup();
-    msg->setInt32("what", kWhatOutputBufferReady);
-    msg->setSize("index", index);
-    msg->setInt64("timeUs", timeUs);
-    msg->setBuffer("buffer", buffer);
-    msg->post();
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-DirectRenderer::AudioRenderer::AudioRenderer(
-        const sp<DecoderContext> &decoderContext)
-    : mDecoderContext(decoderContext),
-      mPushPending(false),
-      mNumFramesWritten(0) {
-    mAudioTrack = new AudioTrack(
-            AUDIO_STREAM_DEFAULT,
-            48000.0f,
-            AUDIO_FORMAT_PCM,
-            AUDIO_CHANNEL_OUT_STEREO,
-            (int)0 /* frameCount */);
-
-    CHECK_EQ((status_t)OK, mAudioTrack->initCheck());
-
-    mAudioTrack->start();
-}
-
-DirectRenderer::AudioRenderer::~AudioRenderer() {
-}
-
-void DirectRenderer::AudioRenderer::queueInputBuffer(
-        size_t index, int64_t timeUs, const sp<ABuffer> &buffer) {
-    BufferInfo info;
-    info.mIndex = index;
-    info.mTimeUs = timeUs;
-    info.mBuffer = buffer;
-
-    mInputBuffers.push_back(info);
-    schedulePushIfNecessary();
-}
-
-void DirectRenderer::AudioRenderer::onMessageReceived(
-        const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatPushAudio:
-        {
-            onPushAudio();
-            break;
-        }
-
-        default:
-            break;
-    }
-}
-
-void DirectRenderer::AudioRenderer::schedulePushIfNecessary() {
-    if (mPushPending || mInputBuffers.empty()) {
-        return;
-    }
-
-    mPushPending = true;
-
-    uint32_t numFramesPlayed;
-    CHECK_EQ(mAudioTrack->getPosition(&numFramesPlayed),
-             (status_t)OK);
-
-    uint32_t numFramesPendingPlayout = mNumFramesWritten - numFramesPlayed;
-
-    // This is how long the audio sink will have data to
-    // play back.
-    const float msecsPerFrame = 1000.0f / mAudioTrack->getSampleRate();
-
-    int64_t delayUs =
-        msecsPerFrame * numFramesPendingPlayout * 1000ll;
-
-    // Let's give it more data after about half that time
-    // has elapsed.
-    (new AMessage(kWhatPushAudio, id()))->post(delayUs / 2);
-}
-
-void DirectRenderer::AudioRenderer::onPushAudio() {
-    mPushPending = false;
-
-    while (!mInputBuffers.empty()) {
-        const BufferInfo &info = *mInputBuffers.begin();
-
-        ssize_t n = writeNonBlocking(
-                info.mBuffer->data(), info.mBuffer->size());
-
-        if (n < (ssize_t)info.mBuffer->size()) {
-            CHECK_GE(n, 0);
-
-            info.mBuffer->setRange(
-                    info.mBuffer->offset() + n, info.mBuffer->size() - n);
-            break;
-        }
-
-        mDecoderContext->releaseOutputBuffer(info.mIndex);
-
-        mInputBuffers.erase(mInputBuffers.begin());
-    }
-
-    schedulePushIfNecessary();
-}
-
-ssize_t DirectRenderer::AudioRenderer::writeNonBlocking(
-        const uint8_t *data, size_t size) {
-    uint32_t numFramesPlayed;
-    status_t err = mAudioTrack->getPosition(&numFramesPlayed);
-    if (err != OK) {
-        return err;
-    }
-
-    ssize_t numFramesAvailableToWrite =
-        mAudioTrack->frameCount() - (mNumFramesWritten - numFramesPlayed);
-
-    size_t numBytesAvailableToWrite =
-        numFramesAvailableToWrite * mAudioTrack->frameSize();
-
-    if (size > numBytesAvailableToWrite) {
-        size = numBytesAvailableToWrite;
-    }
-
-    CHECK_EQ(mAudioTrack->write(data, size), (ssize_t)size);
-
-    size_t numFramesWritten = size / mAudioTrack->frameSize();
-    mNumFramesWritten += numFramesWritten;
-
-    return size;
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-DirectRenderer::DirectRenderer(
-        const sp<IGraphicBufferProducer> &bufferProducer)
-    : mSurfaceTex(bufferProducer),
-      mVideoRenderPending(false),
-      mNumFramesLate(0),
-      mNumFrames(0) {
-}
-
-DirectRenderer::~DirectRenderer() {
-}
-
-void DirectRenderer::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatDecoderNotify:
-        {
-            onDecoderNotify(msg);
-            break;
-        }
-
-        case kWhatRenderVideo:
-        {
-            onRenderVideo();
-            break;
-        }
-
-        case kWhatQueueAccessUnit:
-            onQueueAccessUnit(msg);
-            break;
-
-        case kWhatSetFormat:
-            onSetFormat(msg);
-            break;
-
-        default:
-            TRESPASS();
-    }
-}
-
-void DirectRenderer::setFormat(size_t trackIndex, const sp<AMessage> &format) {
-    sp<AMessage> msg = new AMessage(kWhatSetFormat, id());
-    msg->setSize("trackIndex", trackIndex);
-    msg->setMessage("format", format);
-    msg->post();
-}
-
-void DirectRenderer::onSetFormat(const sp<AMessage> &msg) {
-    size_t trackIndex;
-    CHECK(msg->findSize("trackIndex", &trackIndex));
-
-    sp<AMessage> format;
-    CHECK(msg->findMessage("format", &format));
-
-    internalSetFormat(trackIndex, format);
-}
-
-void DirectRenderer::internalSetFormat(
-        size_t trackIndex, const sp<AMessage> &format) {
-    CHECK_LT(trackIndex, 2u);
-
-    CHECK(mDecoderContext[trackIndex] == NULL);
-
-    sp<AMessage> notify = new AMessage(kWhatDecoderNotify, id());
-    notify->setSize("trackIndex", trackIndex);
-
-    mDecoderContext[trackIndex] = new DecoderContext(notify);
-    looper()->registerHandler(mDecoderContext[trackIndex]);
-
-    CHECK_EQ((status_t)OK,
-             mDecoderContext[trackIndex]->init(
-                 format, trackIndex == 0 ? mSurfaceTex : NULL));
-
-    if (trackIndex == 1) {
-        // Audio
-        mAudioRenderer = new AudioRenderer(mDecoderContext[1]);
-        looper()->registerHandler(mAudioRenderer);
-    }
-}
-
-void DirectRenderer::queueAccessUnit(
-        size_t trackIndex, const sp<ABuffer> &accessUnit) {
-    sp<AMessage> msg = new AMessage(kWhatQueueAccessUnit, id());
-    msg->setSize("trackIndex", trackIndex);
-    msg->setBuffer("accessUnit", accessUnit);
-    msg->post();
-}
-
-void DirectRenderer::onQueueAccessUnit(const sp<AMessage> &msg) {
-    size_t trackIndex;
-    CHECK(msg->findSize("trackIndex", &trackIndex));
-
-    sp<ABuffer> accessUnit;
-    CHECK(msg->findBuffer("accessUnit", &accessUnit));
-
-    CHECK_LT(trackIndex, 2u);
-    CHECK(mDecoderContext[trackIndex] != NULL);
-
-    mDecoderContext[trackIndex]->queueInputBuffer(accessUnit);
-}
-
-void DirectRenderer::onDecoderNotify(const sp<AMessage> &msg) {
-    size_t trackIndex;
-    CHECK(msg->findSize("trackIndex", &trackIndex));
-
-    int32_t what;
-    CHECK(msg->findInt32("what", &what));
-
-    switch (what) {
-        case DecoderContext::kWhatOutputBufferReady:
-        {
-            size_t index;
-            CHECK(msg->findSize("index", &index));
-
-            int64_t timeUs;
-            CHECK(msg->findInt64("timeUs", &timeUs));
-
-            sp<ABuffer> buffer;
-            CHECK(msg->findBuffer("buffer", &buffer));
-
-            queueOutputBuffer(trackIndex, index, timeUs, buffer);
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void DirectRenderer::queueOutputBuffer(
-        size_t trackIndex,
-        size_t index, int64_t timeUs, const sp<ABuffer> &buffer) {
-    if (trackIndex == 1) {
-        // Audio
-        mAudioRenderer->queueInputBuffer(index, timeUs, buffer);
-        return;
-    }
-
-    OutputInfo info;
-    info.mIndex = index;
-    info.mTimeUs = timeUs;
-    info.mBuffer = buffer;
-    mVideoOutputBuffers.push_back(info);
-
-    scheduleVideoRenderIfNecessary();
-}
-
-void DirectRenderer::scheduleVideoRenderIfNecessary() {
-    if (mVideoRenderPending || mVideoOutputBuffers.empty()) {
-        return;
-    }
-
-    mVideoRenderPending = true;
-
-    int64_t timeUs = (*mVideoOutputBuffers.begin()).mTimeUs;
-    int64_t nowUs = ALooper::GetNowUs();
-
-    int64_t delayUs = timeUs - nowUs;
-
-    (new AMessage(kWhatRenderVideo, id()))->post(delayUs);
-}
-
-void DirectRenderer::onRenderVideo() {
-    mVideoRenderPending = false;
-
-    int64_t nowUs = ALooper::GetNowUs();
-
-    while (!mVideoOutputBuffers.empty()) {
-        const OutputInfo &info = *mVideoOutputBuffers.begin();
-
-        if (info.mTimeUs > nowUs) {
-            break;
-        }
-
-        if (info.mTimeUs + 15000ll < nowUs) {
-            ++mNumFramesLate;
-        }
-        ++mNumFrames;
-
-        status_t err =
-            mDecoderContext[0]->renderOutputBufferAndRelease(info.mIndex);
-        CHECK_EQ(err, (status_t)OK);
-
-        mVideoOutputBuffers.erase(mVideoOutputBuffers.begin());
-    }
-
-    scheduleVideoRenderIfNecessary();
-}
-
-}  // namespace android
-
diff --git a/media/libstagefright/wifi-display/sink/DirectRenderer.h b/media/libstagefright/wifi-display/sink/DirectRenderer.h
deleted file mode 100644
index 07c2170..0000000
--- a/media/libstagefright/wifi-display/sink/DirectRenderer.h
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef DIRECT_RENDERER_H_
-
-#define DIRECT_RENDERER_H_
-
-#include <media/stagefright/foundation/AHandler.h>
-
-namespace android {
-
-struct ABuffer;
-struct IGraphicBufferProducer;
-
-// Renders audio and video data queued by calls to "queueAccessUnit".
-struct DirectRenderer : public AHandler {
-    DirectRenderer(const sp<IGraphicBufferProducer> &bufferProducer);
-
-    void setFormat(size_t trackIndex, const sp<AMessage> &format);
-    void queueAccessUnit(size_t trackIndex, const sp<ABuffer> &accessUnit);
-
-protected:
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-    virtual ~DirectRenderer();
-
-private:
-    struct DecoderContext;
-    struct AudioRenderer;
-
-    enum {
-        kWhatDecoderNotify,
-        kWhatRenderVideo,
-        kWhatQueueAccessUnit,
-        kWhatSetFormat,
-    };
-
-    struct OutputInfo {
-        size_t mIndex;
-        int64_t mTimeUs;
-        sp<ABuffer> mBuffer;
-    };
-
-    sp<IGraphicBufferProducer> mSurfaceTex;
-
-    sp<DecoderContext> mDecoderContext[2];
-    List<OutputInfo> mVideoOutputBuffers;
-
-    bool mVideoRenderPending;
-
-    sp<AudioRenderer> mAudioRenderer;
-
-    int32_t mNumFramesLate;
-    int32_t mNumFrames;
-
-    void onDecoderNotify(const sp<AMessage> &msg);
-
-    void queueOutputBuffer(
-            size_t trackIndex,
-            size_t index, int64_t timeUs, const sp<ABuffer> &buffer);
-
-    void scheduleVideoRenderIfNecessary();
-    void onRenderVideo();
-
-    void onSetFormat(const sp<AMessage> &msg);
-    void onQueueAccessUnit(const sp<AMessage> &msg);
-
-    void internalSetFormat(size_t trackIndex, const sp<AMessage> &format);
-
-    DISALLOW_EVIL_CONSTRUCTORS(DirectRenderer);
-};
-
-}  // namespace android
-
-#endif  // DIRECT_RENDERER_H_
diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp
deleted file mode 100644
index bc88f1e..0000000
--- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp
+++ /dev/null
@@ -1,917 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "WifiDisplaySink"
-#include <utils/Log.h>
-
-#include "WifiDisplaySink.h"
-
-#include "DirectRenderer.h"
-#include "MediaReceiver.h"
-#include "TimeSyncer.h"
-
-#include <cutils/properties.h>
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/ParsedMessage.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/Utils.h>
-
-namespace android {
-
-// static
-const AString WifiDisplaySink::sUserAgent = MakeUserAgent();
-
-WifiDisplaySink::WifiDisplaySink(
-        uint32_t flags,
-        const sp<ANetworkSession> &netSession,
-        const sp<IGraphicBufferProducer> &bufferProducer,
-        const sp<AMessage> &notify)
-    : mState(UNDEFINED),
-      mFlags(flags),
-      mNetSession(netSession),
-      mSurfaceTex(bufferProducer),
-      mNotify(notify),
-      mUsingTCPTransport(false),
-      mUsingTCPInterleaving(false),
-      mSessionID(0),
-      mNextCSeq(1),
-      mIDRFrameRequestPending(false),
-      mTimeOffsetUs(0ll),
-      mTimeOffsetValid(false),
-      mSetupDeferred(false),
-      mLatencyCount(0),
-      mLatencySumUs(0ll),
-      mLatencyMaxUs(0ll),
-      mMaxDelayMs(-1ll) {
-    // We support any and all resolutions, but prefer 720p30
-    mSinkSupportedVideoFormats.setNativeResolution(
-            VideoFormats::RESOLUTION_CEA, 5);  // 1280 x 720 p30
-
-    mSinkSupportedVideoFormats.enableAll();
-}
-
-WifiDisplaySink::~WifiDisplaySink() {
-}
-
-void WifiDisplaySink::start(const char *sourceHost, int32_t sourcePort) {
-    sp<AMessage> msg = new AMessage(kWhatStart, id());
-    msg->setString("sourceHost", sourceHost);
-    msg->setInt32("sourcePort", sourcePort);
-    msg->post();
-}
-
-void WifiDisplaySink::start(const char *uri) {
-    sp<AMessage> msg = new AMessage(kWhatStart, id());
-    msg->setString("setupURI", uri);
-    msg->post();
-}
-
-// static
-bool WifiDisplaySink::ParseURL(
-        const char *url, AString *host, int32_t *port, AString *path,
-        AString *user, AString *pass) {
-    host->clear();
-    *port = 0;
-    path->clear();
-    user->clear();
-    pass->clear();
-
-    if (strncasecmp("rtsp://", url, 7)) {
-        return false;
-    }
-
-    const char *slashPos = strchr(&url[7], '/');
-
-    if (slashPos == NULL) {
-        host->setTo(&url[7]);
-        path->setTo("/");
-    } else {
-        host->setTo(&url[7], slashPos - &url[7]);
-        path->setTo(slashPos);
-    }
-
-    ssize_t atPos = host->find("@");
-
-    if (atPos >= 0) {
-        // Split of user:pass@ from hostname.
-
-        AString userPass(*host, 0, atPos);
-        host->erase(0, atPos + 1);
-
-        ssize_t colonPos = userPass.find(":");
-
-        if (colonPos < 0) {
-            *user = userPass;
-        } else {
-            user->setTo(userPass, 0, colonPos);
-            pass->setTo(userPass, colonPos + 1, userPass.size() - colonPos - 1);
-        }
-    }
-
-    const char *colonPos = strchr(host->c_str(), ':');
-
-    if (colonPos != NULL) {
-        char *end;
-        unsigned long x = strtoul(colonPos + 1, &end, 10);
-
-        if (end == colonPos + 1 || *end != '\0' || x >= 65536) {
-            return false;
-        }
-
-        *port = x;
-
-        size_t colonOffset = colonPos - host->c_str();
-        size_t trailing = host->size() - colonOffset;
-        host->erase(colonOffset, trailing);
-    } else {
-        *port = 554;
-    }
-
-    return true;
-}
-
-void WifiDisplaySink::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatStart:
-        {
-            sleep(2);  // XXX
-
-            int32_t sourcePort;
-            CHECK(msg->findString("sourceHost", &mRTSPHost));
-            CHECK(msg->findInt32("sourcePort", &sourcePort));
-
-            sp<AMessage> notify = new AMessage(kWhatRTSPNotify, id());
-
-            status_t err = mNetSession->createRTSPClient(
-                    mRTSPHost.c_str(), sourcePort, notify, &mSessionID);
-            CHECK_EQ(err, (status_t)OK);
-
-            mState = CONNECTING;
-            break;
-        }
-
-        case kWhatRTSPNotify:
-        {
-            int32_t reason;
-            CHECK(msg->findInt32("reason", &reason));
-
-            switch (reason) {
-                case ANetworkSession::kWhatError:
-                {
-                    int32_t sessionID;
-                    CHECK(msg->findInt32("sessionID", &sessionID));
-
-                    int32_t err;
-                    CHECK(msg->findInt32("err", &err));
-
-                    AString detail;
-                    CHECK(msg->findString("detail", &detail));
-
-                    ALOGE("An error occurred in session %d (%d, '%s/%s').",
-                          sessionID,
-                          err,
-                          detail.c_str(),
-                          strerror(-err));
-
-                    if (sessionID == mSessionID) {
-                        ALOGI("Lost control connection.");
-
-                        // The control connection is dead now.
-                        mNetSession->destroySession(mSessionID);
-                        mSessionID = 0;
-
-                        if (mNotify == NULL) {
-                            looper()->stop();
-                        } else {
-                            sp<AMessage> notify = mNotify->dup();
-                            notify->setInt32("what", kWhatDisconnected);
-                            notify->post();
-                        }
-                    }
-                    break;
-                }
-
-                case ANetworkSession::kWhatConnected:
-                {
-                    ALOGI("We're now connected.");
-                    mState = CONNECTED;
-
-                    if (mFlags & FLAG_SPECIAL_MODE) {
-                        sp<AMessage> notify = new AMessage(
-                                kWhatTimeSyncerNotify, id());
-
-                        mTimeSyncer = new TimeSyncer(mNetSession, notify);
-                        looper()->registerHandler(mTimeSyncer);
-
-                        mTimeSyncer->startClient(mRTSPHost.c_str(), 8123);
-                    }
-                    break;
-                }
-
-                case ANetworkSession::kWhatData:
-                {
-                    onReceiveClientData(msg);
-                    break;
-                }
-
-                default:
-                    TRESPASS();
-            }
-            break;
-        }
-
-        case kWhatStop:
-        {
-            looper()->stop();
-            break;
-        }
-
-        case kWhatMediaReceiverNotify:
-        {
-            onMediaReceiverNotify(msg);
-            break;
-        }
-
-        case kWhatTimeSyncerNotify:
-        {
-            int32_t what;
-            CHECK(msg->findInt32("what", &what));
-
-            if (what == TimeSyncer::kWhatTimeOffset) {
-                CHECK(msg->findInt64("offset", &mTimeOffsetUs));
-                mTimeOffsetValid = true;
-
-                if (mSetupDeferred) {
-                    CHECK_EQ((status_t)OK,
-                             sendSetup(
-                                mSessionID,
-                                "rtsp://x.x.x.x:x/wfd1.0/streamid=0"));
-
-                    mSetupDeferred = false;
-                }
-            }
-            break;
-        }
-
-        case kWhatReportLateness:
-        {
-            if (mLatencyCount > 0) {
-                int64_t avgLatencyUs = mLatencySumUs / mLatencyCount;
-
-                ALOGV("avg. latency = %lld ms (max %lld ms)",
-                      avgLatencyUs / 1000ll,
-                      mLatencyMaxUs / 1000ll);
-
-                sp<AMessage> params = new AMessage;
-                params->setInt64("avgLatencyUs", avgLatencyUs);
-                params->setInt64("maxLatencyUs", mLatencyMaxUs);
-                mMediaReceiver->informSender(0 /* trackIndex */, params);
-            }
-
-            mLatencyCount = 0;
-            mLatencySumUs = 0ll;
-            mLatencyMaxUs = 0ll;
-
-            msg->post(kReportLatenessEveryUs);
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void WifiDisplaySink::dumpDelay(size_t trackIndex, int64_t timeUs) {
-    int64_t delayMs = (ALooper::GetNowUs() - timeUs) / 1000ll;
-
-    if (delayMs > mMaxDelayMs) {
-        mMaxDelayMs = delayMs;
-    }
-
-    static const int64_t kMinDelayMs = 0;
-    static const int64_t kMaxDelayMs = 300;
-
-    const char *kPattern = "########################################";
-    size_t kPatternSize = strlen(kPattern);
-
-    int n = (kPatternSize * (delayMs - kMinDelayMs))
-                / (kMaxDelayMs - kMinDelayMs);
-
-    if (n < 0) {
-        n = 0;
-    } else if ((size_t)n > kPatternSize) {
-        n = kPatternSize;
-    }
-
-    ALOGI("[%lld]: (%4lld ms / %4lld ms) %s",
-          timeUs / 1000,
-          delayMs,
-          mMaxDelayMs,
-          kPattern + kPatternSize - n);
-}
-
-void WifiDisplaySink::onMediaReceiverNotify(const sp<AMessage> &msg) {
-    int32_t what;
-    CHECK(msg->findInt32("what", &what));
-
-    switch (what) {
-        case MediaReceiver::kWhatInitDone:
-        {
-            status_t err;
-            CHECK(msg->findInt32("err", &err));
-
-            ALOGI("MediaReceiver initialization completed w/ err %d", err);
-            break;
-        }
-
-        case MediaReceiver::kWhatError:
-        {
-            status_t err;
-            CHECK(msg->findInt32("err", &err));
-
-            ALOGE("MediaReceiver signaled error %d", err);
-            break;
-        }
-
-        case MediaReceiver::kWhatAccessUnit:
-        {
-            if (mRenderer == NULL) {
-                mRenderer = new DirectRenderer(mSurfaceTex);
-                looper()->registerHandler(mRenderer);
-            }
-
-            sp<ABuffer> accessUnit;
-            CHECK(msg->findBuffer("accessUnit", &accessUnit));
-
-            int64_t timeUs;
-            CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
-
-            if (!mTimeOffsetValid && !(mFlags & FLAG_SPECIAL_MODE)) {
-                mTimeOffsetUs = timeUs - ALooper::GetNowUs();
-                mTimeOffsetValid = true;
-            }
-
-            CHECK(mTimeOffsetValid);
-
-            // We are the timesync _client_,
-            // client time = server time - time offset.
-            timeUs -= mTimeOffsetUs;
-
-            size_t trackIndex;
-            CHECK(msg->findSize("trackIndex", &trackIndex));
-
-            int64_t nowUs = ALooper::GetNowUs();
-            int64_t delayUs = nowUs - timeUs;
-
-            mLatencySumUs += delayUs;
-            if (mLatencyCount == 0 || delayUs > mLatencyMaxUs) {
-                mLatencyMaxUs = delayUs;
-            }
-            ++mLatencyCount;
-
-            // dumpDelay(trackIndex, timeUs);
-
-            timeUs += 220000ll;  // Assume 220 ms of latency
-            accessUnit->meta()->setInt64("timeUs", timeUs);
-
-            sp<AMessage> format;
-            if (msg->findMessage("format", &format)) {
-                mRenderer->setFormat(trackIndex, format);
-            }
-
-            mRenderer->queueAccessUnit(trackIndex, accessUnit);
-            break;
-        }
-
-        case MediaReceiver::kWhatPacketLost:
-        {
-#if 0
-            if (!mIDRFrameRequestPending) {
-                ALOGI("requesting IDR frame");
-
-                sendIDRFrameRequest(mSessionID);
-            }
-#endif
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void WifiDisplaySink::registerResponseHandler(
-        int32_t sessionID, int32_t cseq, HandleRTSPResponseFunc func) {
-    ResponseID id;
-    id.mSessionID = sessionID;
-    id.mCSeq = cseq;
-    mResponseHandlers.add(id, func);
-}
-
-status_t WifiDisplaySink::sendM2(int32_t sessionID) {
-    AString request = "OPTIONS * RTSP/1.0\r\n";
-    AppendCommonResponse(&request, mNextCSeq);
-
-    request.append(
-            "Require: org.wfa.wfd1.0\r\n"
-            "\r\n");
-
-    status_t err =
-        mNetSession->sendRequest(sessionID, request.c_str(), request.size());
-
-    if (err != OK) {
-        return err;
-    }
-
-    registerResponseHandler(
-            sessionID, mNextCSeq, &WifiDisplaySink::onReceiveM2Response);
-
-    ++mNextCSeq;
-
-    return OK;
-}
-
-status_t WifiDisplaySink::onReceiveM2Response(
-        int32_t sessionID, const sp<ParsedMessage> &msg) {
-    int32_t statusCode;
-    if (!msg->getStatusCode(&statusCode)) {
-        return ERROR_MALFORMED;
-    }
-
-    if (statusCode != 200) {
-        return ERROR_UNSUPPORTED;
-    }
-
-    return OK;
-}
-
-status_t WifiDisplaySink::onReceiveSetupResponse(
-        int32_t sessionID, const sp<ParsedMessage> &msg) {
-    int32_t statusCode;
-    if (!msg->getStatusCode(&statusCode)) {
-        return ERROR_MALFORMED;
-    }
-
-    if (statusCode != 200) {
-        return ERROR_UNSUPPORTED;
-    }
-
-    if (!msg->findString("session", &mPlaybackSessionID)) {
-        return ERROR_MALFORMED;
-    }
-
-    if (!ParsedMessage::GetInt32Attribute(
-                mPlaybackSessionID.c_str(),
-                "timeout",
-                &mPlaybackSessionTimeoutSecs)) {
-        mPlaybackSessionTimeoutSecs = -1;
-    }
-
-    ssize_t colonPos = mPlaybackSessionID.find(";");
-    if (colonPos >= 0) {
-        // Strip any options from the returned session id.
-        mPlaybackSessionID.erase(
-                colonPos, mPlaybackSessionID.size() - colonPos);
-    }
-
-    status_t err = configureTransport(msg);
-
-    if (err != OK) {
-        return err;
-    }
-
-    mState = PAUSED;
-
-    return sendPlay(
-            sessionID,
-            "rtsp://x.x.x.x:x/wfd1.0/streamid=0");
-}
-
-status_t WifiDisplaySink::configureTransport(const sp<ParsedMessage> &msg) {
-    if (mUsingTCPTransport && !(mFlags & FLAG_SPECIAL_MODE)) {
-        // In "special" mode we still use a UDP RTCP back-channel that
-        // needs connecting.
-        return OK;
-    }
-
-    AString transport;
-    if (!msg->findString("transport", &transport)) {
-        ALOGE("Missing 'transport' field in SETUP response.");
-        return ERROR_MALFORMED;
-    }
-
-    AString sourceHost;
-    if (!ParsedMessage::GetAttribute(
-                transport.c_str(), "source", &sourceHost)) {
-        sourceHost = mRTSPHost;
-    }
-
-    AString serverPortStr;
-    if (!ParsedMessage::GetAttribute(
-                transport.c_str(), "server_port", &serverPortStr)) {
-        ALOGE("Missing 'server_port' in Transport field.");
-        return ERROR_MALFORMED;
-    }
-
-    int rtpPort, rtcpPort;
-    if (sscanf(serverPortStr.c_str(), "%d-%d", &rtpPort, &rtcpPort) != 2
-            || rtpPort <= 0 || rtpPort > 65535
-            || rtcpPort <=0 || rtcpPort > 65535
-            || rtcpPort != rtpPort + 1) {
-        ALOGE("Invalid server_port description '%s'.",
-                serverPortStr.c_str());
-
-        return ERROR_MALFORMED;
-    }
-
-    if (rtpPort & 1) {
-        ALOGW("Server picked an odd numbered RTP port.");
-    }
-
-    return mMediaReceiver->connectTrack(
-            0 /* trackIndex */, sourceHost.c_str(), rtpPort, rtcpPort);
-}
-
-status_t WifiDisplaySink::onReceivePlayResponse(
-        int32_t sessionID, const sp<ParsedMessage> &msg) {
-    int32_t statusCode;
-    if (!msg->getStatusCode(&statusCode)) {
-        return ERROR_MALFORMED;
-    }
-
-    if (statusCode != 200) {
-        return ERROR_UNSUPPORTED;
-    }
-
-    mState = PLAYING;
-
-    (new AMessage(kWhatReportLateness, id()))->post(kReportLatenessEveryUs);
-
-    return OK;
-}
-
-status_t WifiDisplaySink::onReceiveIDRFrameRequestResponse(
-        int32_t sessionID, const sp<ParsedMessage> &msg) {
-    CHECK(mIDRFrameRequestPending);
-    mIDRFrameRequestPending = false;
-
-    return OK;
-}
-
-void WifiDisplaySink::onReceiveClientData(const sp<AMessage> &msg) {
-    int32_t sessionID;
-    CHECK(msg->findInt32("sessionID", &sessionID));
-
-    sp<RefBase> obj;
-    CHECK(msg->findObject("data", &obj));
-
-    sp<ParsedMessage> data =
-        static_cast<ParsedMessage *>(obj.get());
-
-    ALOGV("session %d received '%s'",
-          sessionID, data->debugString().c_str());
-
-    AString method;
-    AString uri;
-    data->getRequestField(0, &method);
-
-    int32_t cseq;
-    if (!data->findInt32("cseq", &cseq)) {
-        sendErrorResponse(sessionID, "400 Bad Request", -1 /* cseq */);
-        return;
-    }
-
-    if (method.startsWith("RTSP/")) {
-        // This is a response.
-
-        ResponseID id;
-        id.mSessionID = sessionID;
-        id.mCSeq = cseq;
-
-        ssize_t index = mResponseHandlers.indexOfKey(id);
-
-        if (index < 0) {
-            ALOGW("Received unsolicited server response, cseq %d", cseq);
-            return;
-        }
-
-        HandleRTSPResponseFunc func = mResponseHandlers.valueAt(index);
-        mResponseHandlers.removeItemsAt(index);
-
-        status_t err = (this->*func)(sessionID, data);
-        CHECK_EQ(err, (status_t)OK);
-    } else {
-        AString version;
-        data->getRequestField(2, &version);
-        if (!(version == AString("RTSP/1.0"))) {
-            sendErrorResponse(sessionID, "505 RTSP Version not supported", cseq);
-            return;
-        }
-
-        if (method == "OPTIONS") {
-            onOptionsRequest(sessionID, cseq, data);
-        } else if (method == "GET_PARAMETER") {
-            onGetParameterRequest(sessionID, cseq, data);
-        } else if (method == "SET_PARAMETER") {
-            onSetParameterRequest(sessionID, cseq, data);
-        } else {
-            sendErrorResponse(sessionID, "405 Method Not Allowed", cseq);
-        }
-    }
-}
-
-void WifiDisplaySink::onOptionsRequest(
-        int32_t sessionID,
-        int32_t cseq,
-        const sp<ParsedMessage> &data) {
-    AString response = "RTSP/1.0 200 OK\r\n";
-    AppendCommonResponse(&response, cseq);
-    response.append("Public: org.wfa.wfd1.0, GET_PARAMETER, SET_PARAMETER\r\n");
-    response.append("\r\n");
-
-    status_t err = mNetSession->sendRequest(sessionID, response.c_str());
-    CHECK_EQ(err, (status_t)OK);
-
-    err = sendM2(sessionID);
-    CHECK_EQ(err, (status_t)OK);
-}
-
-void WifiDisplaySink::onGetParameterRequest(
-        int32_t sessionID,
-        int32_t cseq,
-        const sp<ParsedMessage> &data) {
-    AString body;
-
-    if (mState == CONNECTED) {
-        mUsingTCPTransport = false;
-        mUsingTCPInterleaving = false;
-
-        char val[PROPERTY_VALUE_MAX];
-        if (property_get("media.wfd-sink.tcp-mode", val, NULL)) {
-            if (!strcasecmp("true", val) || !strcmp("1", val)) {
-                ALOGI("Using TCP unicast transport.");
-                mUsingTCPTransport = true;
-                mUsingTCPInterleaving = false;
-            } else if (!strcasecmp("interleaved", val)) {
-                ALOGI("Using TCP interleaved transport.");
-                mUsingTCPTransport = true;
-                mUsingTCPInterleaving = true;
-            }
-        } else if (mFlags & FLAG_SPECIAL_MODE) {
-            mUsingTCPTransport = true;
-        }
-
-        body = "wfd_video_formats: ";
-        body.append(mSinkSupportedVideoFormats.getFormatSpec());
-
-        body.append(
-                "\r\nwfd_audio_codecs: AAC 0000000F 00\r\n"
-                "wfd_client_rtp_ports: RTP/AVP/");
-
-        if (mUsingTCPTransport) {
-            body.append("TCP;");
-            if (mUsingTCPInterleaving) {
-                body.append("interleaved");
-            } else {
-                body.append("unicast 19000 0");
-            }
-        } else {
-            body.append("UDP;unicast 19000 0");
-        }
-
-        body.append(" mode=play\r\n");
-    }
-
-    AString response = "RTSP/1.0 200 OK\r\n";
-    AppendCommonResponse(&response, cseq);
-    response.append("Content-Type: text/parameters\r\n");
-    response.append(StringPrintf("Content-Length: %d\r\n", body.size()));
-    response.append("\r\n");
-    response.append(body);
-
-    status_t err = mNetSession->sendRequest(sessionID, response.c_str());
-    CHECK_EQ(err, (status_t)OK);
-}
-
-status_t WifiDisplaySink::sendSetup(int32_t sessionID, const char *uri) {
-    sp<AMessage> notify = new AMessage(kWhatMediaReceiverNotify, id());
-
-    mMediaReceiverLooper = new ALooper;
-    mMediaReceiverLooper->setName("media_receiver");
-
-    mMediaReceiverLooper->start(
-            false /* runOnCallingThread */,
-            false /* canCallJava */,
-            PRIORITY_AUDIO);
-
-    mMediaReceiver = new MediaReceiver(mNetSession, notify);
-    mMediaReceiverLooper->registerHandler(mMediaReceiver);
-
-    RTPReceiver::TransportMode rtpMode = RTPReceiver::TRANSPORT_UDP;
-    if (mUsingTCPTransport) {
-        if (mUsingTCPInterleaving) {
-            rtpMode = RTPReceiver::TRANSPORT_TCP_INTERLEAVED;
-        } else {
-            rtpMode = RTPReceiver::TRANSPORT_TCP;
-        }
-    }
-
-    int32_t localRTPPort;
-    status_t err = mMediaReceiver->addTrack(
-            rtpMode, RTPReceiver::TRANSPORT_UDP /* rtcpMode */, &localRTPPort);
-
-    if (err == OK) {
-        err = mMediaReceiver->initAsync(MediaReceiver::MODE_TRANSPORT_STREAM);
-    }
-
-    if (err != OK) {
-        mMediaReceiverLooper->unregisterHandler(mMediaReceiver->id());
-        mMediaReceiver.clear();
-
-        mMediaReceiverLooper->stop();
-        mMediaReceiverLooper.clear();
-
-        return err;
-    }
-
-    AString request = StringPrintf("SETUP %s RTSP/1.0\r\n", uri);
-
-    AppendCommonResponse(&request, mNextCSeq);
-
-    if (rtpMode == RTPReceiver::TRANSPORT_TCP_INTERLEAVED) {
-        request.append("Transport: RTP/AVP/TCP;interleaved=0-1\r\n");
-    } else if (rtpMode == RTPReceiver::TRANSPORT_TCP) {
-        if (mFlags & FLAG_SPECIAL_MODE) {
-            // This isn't quite true, since the RTP connection is through TCP
-            // and the RTCP connection through UDP...
-            request.append(
-                    StringPrintf(
-                        "Transport: RTP/AVP/TCP;unicast;client_port=%d-%d\r\n",
-                        localRTPPort, localRTPPort + 1));
-        } else {
-            request.append(
-                    StringPrintf(
-                        "Transport: RTP/AVP/TCP;unicast;client_port=%d\r\n",
-                        localRTPPort));
-        }
-    } else {
-        request.append(
-                StringPrintf(
-                    "Transport: RTP/AVP/UDP;unicast;client_port=%d-%d\r\n",
-                    localRTPPort,
-                    localRTPPort + 1));
-    }
-
-    request.append("\r\n");
-
-    ALOGV("request = '%s'", request.c_str());
-
-    err = mNetSession->sendRequest(sessionID, request.c_str(), request.size());
-
-    if (err != OK) {
-        return err;
-    }
-
-    registerResponseHandler(
-            sessionID, mNextCSeq, &WifiDisplaySink::onReceiveSetupResponse);
-
-    ++mNextCSeq;
-
-    return OK;
-}
-
-status_t WifiDisplaySink::sendPlay(int32_t sessionID, const char *uri) {
-    AString request = StringPrintf("PLAY %s RTSP/1.0\r\n", uri);
-
-    AppendCommonResponse(&request, mNextCSeq);
-
-    request.append(StringPrintf("Session: %s\r\n", mPlaybackSessionID.c_str()));
-    request.append("\r\n");
-
-    status_t err =
-        mNetSession->sendRequest(sessionID, request.c_str(), request.size());
-
-    if (err != OK) {
-        return err;
-    }
-
-    registerResponseHandler(
-            sessionID, mNextCSeq, &WifiDisplaySink::onReceivePlayResponse);
-
-    ++mNextCSeq;
-
-    return OK;
-}
-
-status_t WifiDisplaySink::sendIDRFrameRequest(int32_t sessionID) {
-    CHECK(!mIDRFrameRequestPending);
-
-    AString request = "SET_PARAMETER rtsp://localhost/wfd1.0 RTSP/1.0\r\n";
-
-    AppendCommonResponse(&request, mNextCSeq);
-
-    AString content = "wfd_idr_request\r\n";
-
-    request.append(StringPrintf("Session: %s\r\n", mPlaybackSessionID.c_str()));
-    request.append(StringPrintf("Content-Length: %d\r\n", content.size()));
-    request.append("\r\n");
-    request.append(content);
-
-    status_t err =
-        mNetSession->sendRequest(sessionID, request.c_str(), request.size());
-
-    if (err != OK) {
-        return err;
-    }
-
-    registerResponseHandler(
-            sessionID,
-            mNextCSeq,
-            &WifiDisplaySink::onReceiveIDRFrameRequestResponse);
-
-    ++mNextCSeq;
-
-    mIDRFrameRequestPending = true;
-
-    return OK;
-}
-
-void WifiDisplaySink::onSetParameterRequest(
-        int32_t sessionID,
-        int32_t cseq,
-        const sp<ParsedMessage> &data) {
-    const char *content = data->getContent();
-
-    if (strstr(content, "wfd_trigger_method: SETUP\r\n") != NULL) {
-        if ((mFlags & FLAG_SPECIAL_MODE) && !mTimeOffsetValid) {
-            mSetupDeferred = true;
-        } else {
-            status_t err =
-                sendSetup(
-                        sessionID,
-                        "rtsp://x.x.x.x:x/wfd1.0/streamid=0");
-
-            CHECK_EQ(err, (status_t)OK);
-        }
-    }
-
-    AString response = "RTSP/1.0 200 OK\r\n";
-    AppendCommonResponse(&response, cseq);
-    response.append("\r\n");
-
-    status_t err = mNetSession->sendRequest(sessionID, response.c_str());
-    CHECK_EQ(err, (status_t)OK);
-}
-
-void WifiDisplaySink::sendErrorResponse(
-        int32_t sessionID,
-        const char *errorDetail,
-        int32_t cseq) {
-    AString response;
-    response.append("RTSP/1.0 ");
-    response.append(errorDetail);
-    response.append("\r\n");
-
-    AppendCommonResponse(&response, cseq);
-
-    response.append("\r\n");
-
-    status_t err = mNetSession->sendRequest(sessionID, response.c_str());
-    CHECK_EQ(err, (status_t)OK);
-}
-
-// static
-void WifiDisplaySink::AppendCommonResponse(AString *response, int32_t cseq) {
-    time_t now = time(NULL);
-    struct tm *now2 = gmtime(&now);
-    char buf[128];
-    strftime(buf, sizeof(buf), "%a, %d %b %Y %H:%M:%S %z", now2);
-
-    response->append("Date: ");
-    response->append(buf);
-    response->append("\r\n");
-
-    response->append(StringPrintf("User-Agent: %s\r\n", sUserAgent.c_str()));
-
-    if (cseq >= 0) {
-        response->append(StringPrintf("CSeq: %d\r\n", cseq));
-    }
-}
-
-}  // namespace android
diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h b/media/libstagefright/wifi-display/sink/WifiDisplaySink.h
deleted file mode 100644
index dc1fc32..0000000
--- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h
+++ /dev/null
@@ -1,195 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef WIFI_DISPLAY_SINK_H_
-
-#define WIFI_DISPLAY_SINK_H_
-
-#include "VideoFormats.h"
-
-#include <gui/Surface.h>
-#include <media/stagefright/foundation/AHandler.h>
-#include <media/stagefright/foundation/ANetworkSession.h>
-
-namespace android {
-
-struct AMessage;
-struct DirectRenderer;
-struct MediaReceiver;
-struct ParsedMessage;
-struct TimeSyncer;
-
-// Represents the RTSP client acting as a wifi display sink.
-// Connects to a wifi display source and renders the incoming
-// transport stream using a MediaPlayer instance.
-struct WifiDisplaySink : public AHandler {
-    enum {
-        kWhatDisconnected,
-    };
-
-    enum Flags {
-        FLAG_SPECIAL_MODE = 1,
-    };
-
-    // If no notification message is specified (notify == NULL)
-    // the sink will stop its looper() once the session ends,
-    // otherwise it will post an appropriate notification but leave
-    // the looper() running.
-    WifiDisplaySink(
-            uint32_t flags,
-            const sp<ANetworkSession> &netSession,
-            const sp<IGraphicBufferProducer> &bufferProducer = NULL,
-            const sp<AMessage> &notify = NULL);
-
-    void start(const char *sourceHost, int32_t sourcePort);
-    void start(const char *uri);
-
-protected:
-    virtual ~WifiDisplaySink();
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-
-private:
-    enum State {
-        UNDEFINED,
-        CONNECTING,
-        CONNECTED,
-        PAUSED,
-        PLAYING,
-    };
-
-    enum {
-        kWhatStart,
-        kWhatRTSPNotify,
-        kWhatStop,
-        kWhatMediaReceiverNotify,
-        kWhatTimeSyncerNotify,
-        kWhatReportLateness,
-    };
-
-    struct ResponseID {
-        int32_t mSessionID;
-        int32_t mCSeq;
-
-        bool operator<(const ResponseID &other) const {
-            return mSessionID < other.mSessionID
-                || (mSessionID == other.mSessionID
-                        && mCSeq < other.mCSeq);
-        }
-    };
-
-    typedef status_t (WifiDisplaySink::*HandleRTSPResponseFunc)(
-            int32_t sessionID, const sp<ParsedMessage> &msg);
-
-    static const int64_t kReportLatenessEveryUs = 1000000ll;
-
-    static const AString sUserAgent;
-
-    State mState;
-    uint32_t mFlags;
-    VideoFormats mSinkSupportedVideoFormats;
-    sp<ANetworkSession> mNetSession;
-    sp<IGraphicBufferProducer> mSurfaceTex;
-    sp<AMessage> mNotify;
-    sp<TimeSyncer> mTimeSyncer;
-    bool mUsingTCPTransport;
-    bool mUsingTCPInterleaving;
-    AString mRTSPHost;
-    int32_t mSessionID;
-
-    int32_t mNextCSeq;
-
-    KeyedVector<ResponseID, HandleRTSPResponseFunc> mResponseHandlers;
-
-    sp<ALooper> mMediaReceiverLooper;
-    sp<MediaReceiver> mMediaReceiver;
-    sp<DirectRenderer> mRenderer;
-
-    AString mPlaybackSessionID;
-    int32_t mPlaybackSessionTimeoutSecs;
-
-    bool mIDRFrameRequestPending;
-
-    int64_t mTimeOffsetUs;
-    bool mTimeOffsetValid;
-
-    bool mSetupDeferred;
-
-    size_t mLatencyCount;
-    int64_t mLatencySumUs;
-    int64_t mLatencyMaxUs;
-
-    int64_t mMaxDelayMs;
-
-    status_t sendM2(int32_t sessionID);
-    status_t sendSetup(int32_t sessionID, const char *uri);
-    status_t sendPlay(int32_t sessionID, const char *uri);
-    status_t sendIDRFrameRequest(int32_t sessionID);
-
-    status_t onReceiveM2Response(
-            int32_t sessionID, const sp<ParsedMessage> &msg);
-
-    status_t onReceiveSetupResponse(
-            int32_t sessionID, const sp<ParsedMessage> &msg);
-
-    status_t configureTransport(const sp<ParsedMessage> &msg);
-
-    status_t onReceivePlayResponse(
-            int32_t sessionID, const sp<ParsedMessage> &msg);
-
-    status_t onReceiveIDRFrameRequestResponse(
-            int32_t sessionID, const sp<ParsedMessage> &msg);
-
-    void registerResponseHandler(
-            int32_t sessionID, int32_t cseq, HandleRTSPResponseFunc func);
-
-    void onReceiveClientData(const sp<AMessage> &msg);
-
-    void onOptionsRequest(
-            int32_t sessionID,
-            int32_t cseq,
-            const sp<ParsedMessage> &data);
-
-    void onGetParameterRequest(
-            int32_t sessionID,
-            int32_t cseq,
-            const sp<ParsedMessage> &data);
-
-    void onSetParameterRequest(
-            int32_t sessionID,
-            int32_t cseq,
-            const sp<ParsedMessage> &data);
-
-    void onMediaReceiverNotify(const sp<AMessage> &msg);
-
-    void sendErrorResponse(
-            int32_t sessionID,
-            const char *errorDetail,
-            int32_t cseq);
-
-    static void AppendCommonResponse(AString *response, int32_t cseq);
-
-    bool ParseURL(
-            const char *url, AString *host, int32_t *port, AString *path,
-            AString *user, AString *pass);
-
-    void dumpDelay(size_t trackIndex, int64_t timeUs);
-
-    DISALLOW_EVIL_CONSTRUCTORS(WifiDisplaySink);
-};
-
-}  // namespace android
-
-#endif  // WIFI_DISPLAY_SINK_H_
diff --git a/media/libstagefright/wifi-display/source/Converter.cpp b/media/libstagefright/wifi-display/source/Converter.cpp
index 6f23854..753b3ec 100644
--- a/media/libstagefright/wifi-display/source/Converter.cpp
+++ b/media/libstagefright/wifi-display/source/Converter.cpp
@@ -833,7 +833,7 @@
 void Converter::setVideoBitrate(int32_t bitRate) {
     if (mIsVideo && mEncoder != NULL && bitRate != mPrevVideoBitrate) {
         sp<AMessage> params = new AMessage;
-        params->setInt32("videoBitrate", bitRate);
+        params->setInt32("video-bitrate", bitRate);
 
         mEncoder->setParameters(params);
 
diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp
index d72349d..05e4018 100644
--- a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp
+++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp
@@ -22,7 +22,6 @@
 #include "PlaybackSession.h"
 #include "Parameters.h"
 #include "rtp/RTPSender.h"
-#include "TimeSyncer.h"
 
 #include <binder/IServiceManager.h>
 #include <gui/IGraphicBufferProducer.h>
@@ -173,15 +172,7 @@
                 }
             }
 
-            if (err == OK) {
-                sp<AMessage> notify = new AMessage(kWhatTimeSyncerNotify, id());
-                mTimeSyncer = new TimeSyncer(mNetSession, notify);
-                looper()->registerHandler(mTimeSyncer);
-
-                mTimeSyncer->startServer(8123);
-
-                mState = AWAITING_CLIENT_CONNECTION;
-            }
+            mState = AWAITING_CLIENT_CONNECTION;
 
             sp<AMessage> response = new AMessage;
             response->setInt32("err", err);
@@ -556,11 +547,6 @@
             break;
         }
 
-        case kWhatTimeSyncerNotify:
-        {
-            break;
-        }
-
         default:
             TRESPASS();
     }
diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.h b/media/libstagefright/wifi-display/source/WifiDisplaySource.h
index 4f11712..750265f 100644
--- a/media/libstagefright/wifi-display/source/WifiDisplaySource.h
+++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.h
@@ -30,7 +30,6 @@
 struct IHDCP;
 struct IRemoteDisplayClient;
 struct ParsedMessage;
-struct TimeSyncer;
 
 // Represents the RTSP server acting as a wifi display source.
 // Manages incoming connections, sets up Playback sessions as necessary.
@@ -83,7 +82,6 @@
         kWhatHDCPNotify,
         kWhatFinishStop2,
         kWhatTeardownTriggerTimedOut,
-        kWhatTimeSyncerNotify,
     };
 
     struct ResponseID {
@@ -120,7 +118,6 @@
     sp<ANetworkSession> mNetSession;
     sp<IRemoteDisplayClient> mClient;
     AString mMediaPath;
-    sp<TimeSyncer> mTimeSyncer;
     struct in_addr mInterfaceAddr;
     int32_t mSessionID;
 
diff --git a/media/libstagefright/wifi-display/udptest.cpp b/media/libstagefright/wifi-display/udptest.cpp
deleted file mode 100644
index 61eb9f9..0000000
--- a/media/libstagefright/wifi-display/udptest.cpp
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NEBUG 0
-#define LOG_TAG "udptest"
-#include <utils/Log.h>
-
-#include "TimeSyncer.h"
-
-#include <binder/ProcessState.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/ANetworkSession.h>
-
-namespace android {
-
-}  // namespace android
-
-static void usage(const char *me) {
-    fprintf(stderr,
-            "usage: %s -c host[:port]\tconnect to test server\n"
-            "           -l            \tcreate a test server\n",
-            me);
-}
-
-int main(int argc, char **argv) {
-    using namespace android;
-
-    ProcessState::self()->startThreadPool();
-
-    int32_t localPort = -1;
-    int32_t connectToPort = -1;
-    AString connectToHost;
-
-    int res;
-    while ((res = getopt(argc, argv, "hc:l:")) >= 0) {
-        switch (res) {
-            case 'c':
-            {
-                const char *colonPos = strrchr(optarg, ':');
-
-                if (colonPos == NULL) {
-                    connectToHost = optarg;
-                    connectToPort = 49152;
-                } else {
-                    connectToHost.setTo(optarg, colonPos - optarg);
-
-                    char *end;
-                    connectToPort = strtol(colonPos + 1, &end, 10);
-
-                    if (*end != '\0' || end == colonPos + 1
-                            || connectToPort < 1 || connectToPort > 65535) {
-                        fprintf(stderr, "Illegal port specified.\n");
-                        exit(1);
-                    }
-                }
-                break;
-            }
-
-            case 'l':
-            {
-                char *end;
-                localPort = strtol(optarg, &end, 10);
-
-                if (*end != '\0' || end == optarg
-                        || localPort < 1 || localPort > 65535) {
-                    fprintf(stderr, "Illegal port specified.\n");
-                    exit(1);
-                }
-                break;
-            }
-
-            case '?':
-            case 'h':
-                usage(argv[0]);
-                exit(1);
-        }
-    }
-
-    if (localPort < 0 && connectToPort < 0) {
-        fprintf(stderr,
-                "You need to select either client or server mode.\n");
-        exit(1);
-    }
-
-    sp<ANetworkSession> netSession = new ANetworkSession;
-    netSession->start();
-
-    sp<ALooper> looper = new ALooper;
-
-    sp<TimeSyncer> handler = new TimeSyncer(netSession, NULL /* notify */);
-    looper->registerHandler(handler);
-
-    if (localPort >= 0) {
-        handler->startServer(localPort);
-    } else {
-        handler->startClient(connectToHost.c_str(), connectToPort);
-    }
-
-    looper->start(true /* runOnCallingThread */);
-
-    return 0;
-}
-
diff --git a/media/libstagefright/wifi-display/wfd.cpp b/media/libstagefright/wifi-display/wfd.cpp
deleted file mode 100644
index 52e4e26..0000000
--- a/media/libstagefright/wifi-display/wfd.cpp
+++ /dev/null
@@ -1,363 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "wfd"
-#include <utils/Log.h>
-
-#include "sink/WifiDisplaySink.h"
-#include "source/WifiDisplaySource.h"
-
-#include <binder/ProcessState.h>
-#include <binder/IServiceManager.h>
-#include <gui/ISurfaceComposer.h>
-#include <gui/SurfaceComposerClient.h>
-#include <media/AudioSystem.h>
-#include <media/IMediaPlayerService.h>
-#include <media/IRemoteDisplay.h>
-#include <media/IRemoteDisplayClient.h>
-#include <media/stagefright/DataSource.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <ui/DisplayInfo.h>
-
-namespace android {
-
-static void usage(const char *me) {
-    fprintf(stderr,
-            "usage:\n"
-            "           %s -c host[:port]\tconnect to wifi source\n"
-            "               -u uri        \tconnect to an rtsp uri\n"
-            "               -l ip[:port] \tlisten on the specified port "
-            "               -f(ilename)  \tstream media "
-            "(create a sink)\n"
-            "               -s(pecial)   \trun in 'special' mode\n",
-            me);
-}
-
-struct RemoteDisplayClient : public BnRemoteDisplayClient {
-    RemoteDisplayClient();
-
-    virtual void onDisplayConnected(
-            const sp<IGraphicBufferProducer> &bufferProducer,
-            uint32_t width,
-            uint32_t height,
-            uint32_t flags,
-            uint32_t session);
-
-    virtual void onDisplayDisconnected();
-    virtual void onDisplayError(int32_t error);
-
-    void waitUntilDone();
-
-protected:
-    virtual ~RemoteDisplayClient();
-
-private:
-    Mutex mLock;
-    Condition mCondition;
-
-    bool mDone;
-
-    sp<SurfaceComposerClient> mComposerClient;
-    sp<IGraphicBufferProducer> mSurfaceTexture;
-    sp<IBinder> mDisplayBinder;
-
-    DISALLOW_EVIL_CONSTRUCTORS(RemoteDisplayClient);
-};
-
-RemoteDisplayClient::RemoteDisplayClient()
-    : mDone(false) {
-    mComposerClient = new SurfaceComposerClient;
-    CHECK_EQ(mComposerClient->initCheck(), (status_t)OK);
-}
-
-RemoteDisplayClient::~RemoteDisplayClient() {
-}
-
-void RemoteDisplayClient::onDisplayConnected(
-        const sp<IGraphicBufferProducer> &bufferProducer,
-        uint32_t width,
-        uint32_t height,
-        uint32_t flags,
-        uint32_t session) {
-    ALOGI("onDisplayConnected width=%u, height=%u, flags = 0x%08x, session = %d",
-          width, height, flags, session);
-
-    if (bufferProducer != NULL) {
-        mSurfaceTexture = bufferProducer;
-        mDisplayBinder = mComposerClient->createDisplay(
-                String8("foo"), false /* secure */);
-
-        SurfaceComposerClient::openGlobalTransaction();
-        mComposerClient->setDisplaySurface(mDisplayBinder, mSurfaceTexture);
-
-        Rect layerStackRect(1280, 720);  // XXX fix this.
-        Rect displayRect(1280, 720);
-
-        mComposerClient->setDisplayProjection(
-                mDisplayBinder, 0 /* 0 degree rotation */,
-                layerStackRect,
-                displayRect);
-
-        SurfaceComposerClient::closeGlobalTransaction();
-    }
-}
-
-void RemoteDisplayClient::onDisplayDisconnected() {
-    ALOGI("onDisplayDisconnected");
-
-    Mutex::Autolock autoLock(mLock);
-    mDone = true;
-    mCondition.broadcast();
-}
-
-void RemoteDisplayClient::onDisplayError(int32_t error) {
-    ALOGI("onDisplayError error=%d", error);
-
-    Mutex::Autolock autoLock(mLock);
-    mDone = true;
-    mCondition.broadcast();
-}
-
-void RemoteDisplayClient::waitUntilDone() {
-    Mutex::Autolock autoLock(mLock);
-    while (!mDone) {
-        mCondition.wait(mLock);
-    }
-}
-
-static void createSource(const AString &addr, int32_t port) {
-    sp<IServiceManager> sm = defaultServiceManager();
-    sp<IBinder> binder = sm->getService(String16("media.player"));
-    sp<IMediaPlayerService> service =
-        interface_cast<IMediaPlayerService>(binder);
-
-    CHECK(service.get() != NULL);
-
-    String8 iface;
-    iface.append(addr.c_str());
-    iface.append(StringPrintf(":%d", port).c_str());
-
-    sp<RemoteDisplayClient> client = new RemoteDisplayClient;
-    sp<IRemoteDisplay> display =
-        service->listenForRemoteDisplay(client, iface);
-
-    client->waitUntilDone();
-
-    display->dispose();
-    display.clear();
-}
-
-static void createFileSource(
-        const AString &addr, int32_t port, const char *path) {
-    sp<ANetworkSession> session = new ANetworkSession;
-    session->start();
-
-    sp<ALooper> looper = new ALooper;
-    looper->start();
-
-    sp<RemoteDisplayClient> client = new RemoteDisplayClient;
-    sp<WifiDisplaySource> source = new WifiDisplaySource(session, client, path);
-    looper->registerHandler(source);
-
-    AString iface = StringPrintf("%s:%d", addr.c_str(), port);
-    CHECK_EQ((status_t)OK, source->start(iface.c_str()));
-
-    client->waitUntilDone();
-
-    source->stop();
-}
-
-}  // namespace android
-
-int main(int argc, char **argv) {
-    using namespace android;
-
-    ProcessState::self()->startThreadPool();
-
-    DataSource::RegisterDefaultSniffers();
-
-    AString connectToHost;
-    int32_t connectToPort = -1;
-    AString uri;
-
-    AString listenOnAddr;
-    int32_t listenOnPort = -1;
-
-    AString path;
-
-    bool specialMode = false;
-
-    int res;
-    while ((res = getopt(argc, argv, "hc:l:u:f:s")) >= 0) {
-        switch (res) {
-            case 'c':
-            {
-                const char *colonPos = strrchr(optarg, ':');
-
-                if (colonPos == NULL) {
-                    connectToHost = optarg;
-                    connectToPort = WifiDisplaySource::kWifiDisplayDefaultPort;
-                } else {
-                    connectToHost.setTo(optarg, colonPos - optarg);
-
-                    char *end;
-                    connectToPort = strtol(colonPos + 1, &end, 10);
-
-                    if (*end != '\0' || end == colonPos + 1
-                            || connectToPort < 1 || connectToPort > 65535) {
-                        fprintf(stderr, "Illegal port specified.\n");
-                        exit(1);
-                    }
-                }
-                break;
-            }
-
-            case 'u':
-            {
-                uri = optarg;
-                break;
-            }
-
-            case 'f':
-            {
-                path = optarg;
-                break;
-            }
-
-            case 'l':
-            {
-                const char *colonPos = strrchr(optarg, ':');
-
-                if (colonPos == NULL) {
-                    listenOnAddr = optarg;
-                    listenOnPort = WifiDisplaySource::kWifiDisplayDefaultPort;
-                } else {
-                    listenOnAddr.setTo(optarg, colonPos - optarg);
-
-                    char *end;
-                    listenOnPort = strtol(colonPos + 1, &end, 10);
-
-                    if (*end != '\0' || end == colonPos + 1
-                            || listenOnPort < 1 || listenOnPort > 65535) {
-                        fprintf(stderr, "Illegal port specified.\n");
-                        exit(1);
-                    }
-                }
-                break;
-            }
-
-            case 's':
-            {
-                specialMode = true;
-                break;
-            }
-
-            case '?':
-            case 'h':
-            default:
-                usage(argv[0]);
-                exit(1);
-        }
-    }
-
-    if (connectToPort >= 0 && listenOnPort >= 0) {
-        fprintf(stderr,
-                "You can connect to a source or create one, "
-                "but not both at the same time.\n");
-        exit(1);
-    }
-
-    if (listenOnPort >= 0) {
-        if (path.empty()) {
-            createSource(listenOnAddr, listenOnPort);
-        } else {
-            createFileSource(listenOnAddr, listenOnPort, path.c_str());
-        }
-
-        exit(0);
-    }
-
-    if (connectToPort < 0 && uri.empty()) {
-        fprintf(stderr,
-                "You need to select either source host or uri.\n");
-
-        exit(1);
-    }
-
-    if (connectToPort >= 0 && !uri.empty()) {
-        fprintf(stderr,
-                "You need to either connect to a wfd host or an rtsp url, "
-                "not both.\n");
-        exit(1);
-    }
-
-    sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
-    CHECK_EQ(composerClient->initCheck(), (status_t)OK);
-
-    sp<IBinder> display(SurfaceComposerClient::getBuiltInDisplay(
-            ISurfaceComposer::eDisplayIdMain));
-    DisplayInfo info;
-    SurfaceComposerClient::getDisplayInfo(display, &info);
-    ssize_t displayWidth = info.w;
-    ssize_t displayHeight = info.h;
-
-    ALOGV("display is %d x %d\n", displayWidth, displayHeight);
-
-    sp<SurfaceControl> control =
-        composerClient->createSurface(
-                String8("A Surface"),
-                displayWidth,
-                displayHeight,
-                PIXEL_FORMAT_RGB_565,
-                0);
-
-    CHECK(control != NULL);
-    CHECK(control->isValid());
-
-    SurfaceComposerClient::openGlobalTransaction();
-    CHECK_EQ(control->setLayer(INT_MAX), (status_t)OK);
-    CHECK_EQ(control->show(), (status_t)OK);
-    SurfaceComposerClient::closeGlobalTransaction();
-
-    sp<Surface> surface = control->getSurface();
-    CHECK(surface != NULL);
-
-    sp<ANetworkSession> session = new ANetworkSession;
-    session->start();
-
-    sp<ALooper> looper = new ALooper;
-
-    sp<WifiDisplaySink> sink = new WifiDisplaySink(
-            specialMode ? WifiDisplaySink::FLAG_SPECIAL_MODE : 0 /* flags */,
-            session,
-            surface->getIGraphicBufferProducer());
-
-    looper->registerHandler(sink);
-
-    if (connectToPort >= 0) {
-        sink->start(connectToHost.c_str(), connectToPort);
-    } else {
-        sink->start(uri.c_str());
-    }
-
-    looper->start(true /* runOnCallingThread */);
-
-    composerClient->dispose();
-
-    return 0;
-}
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 626b5c2..a9c9b56 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -98,7 +98,6 @@
 size_t AudioFlinger::mTeeSinkTrackFrames = kTeeSinkTrackFramesDefault;
 #endif
 
-//TODO: remove when effect offload is implemented
 // In order to avoid invalidating offloaded tracks each time a Visualizer is turned on and off
 // we define a minimum time during which a global effect is considered enabled.
 static const nsecs_t kMinGlobalEffectEnabletimeNs = seconds(7200);
@@ -265,6 +264,12 @@
         }
     }
 
+    result.append("Notification Clients:\n");
+    for (size_t i = 0; i < mNotificationClients.size(); ++i) {
+        snprintf(buffer, SIZE, "  pid: %d\n", mNotificationClients.keyAt(i));
+        result.append(buffer);
+    }
+
     result.append("Global session refs:\n");
     result.append(" session pid count\n");
     for (size_t i = 0; i < mAudioSessionRefs.size(); i++) {
@@ -1237,6 +1242,7 @@
 
     // check calling permissions
     if (!recordingAllowed()) {
+        ALOGE("openRecord() permission denied: recording not allowed");
         lStatus = PERMISSION_DENIED;
         goto Exit;
     }
@@ -1252,12 +1258,14 @@
         Mutex::Autolock _l(mLock);
         thread = checkRecordThread_l(input);
         if (thread == NULL) {
+            ALOGE("openRecord() checkRecordThread_l failed");
             lStatus = BAD_VALUE;
             goto Exit;
         }
 
         if (deviceRequiresCaptureAudioOutputPermission(thread->inDevice())
                 && !captureAudioOutputAllowed()) {
+            ALOGE("openRecord() permission denied: capture not allowed");
             lStatus = PERMISSION_DENIED;
             goto Exit;
         }
@@ -1278,6 +1286,7 @@
         // The record track uses one track in mHardwareMixerThread by convention.
         recordTrack = thread->createRecordTrack_l(client, sampleRate, format, channelMask,
                                                   frameCount, lSessionId, flags, tid, &lStatus);
+        LOG_ALWAYS_FATAL_IF((recordTrack != 0) != (lStatus == NO_ERROR));
     }
     if (lStatus != NO_ERROR) {
         // remove local strong reference to Client before deleting the RecordTrack so that the
@@ -1851,6 +1860,16 @@
     Mutex::Autolock _l(mLock);
     pid_t caller = IPCThreadState::self()->getCallingPid();
     ALOGV("acquiring %d from %d", audioSession, caller);
+
+    // Ignore requests received from processes not known as notification client. The request
+    // is likely proxied by mediaserver (e.g CameraService) and releaseAudioSessionId() can be
+    // called from a different pid leaving a stale session reference.  Also we don't know how
+    // to clear this reference if the client process dies.
+    if (mNotificationClients.indexOfKey(caller) < 0) {
+        ALOGV("acquireAudioSessionId() unknown client %d for session %d", caller, audioSession);
+        return;
+    }
+
     size_t num = mAudioSessionRefs.size();
     for (size_t i = 0; i< num; i++) {
         AudioSessionRef *ref = mAudioSessionRefs.editItemAt(i);
@@ -1883,7 +1902,9 @@
             return;
         }
     }
-    ALOGW("session id %d not found for pid %d", audioSession, caller);
+    // If the caller is mediaserver it is likely that the session being released was acquired
+    // on behalf of a process not in notification clients and we ignore the warning.
+    ALOGW_IF(caller != getpid_cached, "session id %d not found for pid %d", audioSession, caller);
 }
 
 void AudioFlinger::purgeStaleEffects_l() {
@@ -2084,24 +2105,7 @@
         goto Exit;
     }
 
-    if (io == 0) {
-        if (sessionId == AUDIO_SESSION_OUTPUT_STAGE) {
-            // output must be specified by AudioPolicyManager when using session
-            // AUDIO_SESSION_OUTPUT_STAGE
-            lStatus = BAD_VALUE;
-            goto Exit;
-        } else if (sessionId == AUDIO_SESSION_OUTPUT_MIX) {
-            // if the output returned by getOutputForEffect() is removed before we lock the
-            // mutex below, the call to checkPlaybackThread_l(io) below will detect it
-            // and we will exit safely
-            io = AudioSystem::getOutputForEffect(&desc);
-        }
-    }
-
     {
-        Mutex::Autolock _l(mLock);
-
-
         if (!EffectIsNullUuid(&pDesc->uuid)) {
             // if uuid is specified, request effect descriptor
             lStatus = EffectGetDescriptor(&pDesc->uuid, &desc);
@@ -2174,6 +2178,15 @@
 
         // return effect descriptor
         *pDesc = desc;
+        if (io == 0 && sessionId == AUDIO_SESSION_OUTPUT_MIX) {
+            // if the output returned by getOutputForEffect() is removed before we lock the
+            // mutex below, the call to checkPlaybackThread_l(io) below will detect it
+            // and we will exit safely
+            io = AudioSystem::getOutputForEffect(&desc);
+            ALOGV("createEffect got output %d", io);
+        }
+
+        Mutex::Autolock _l(mLock);
 
         // If output is not specified try to find a matching audio session ID in one of the
         // output threads.
@@ -2181,6 +2194,12 @@
         // because of code checking output when entering the function.
         // Note: io is never 0 when creating an effect on an input
         if (io == 0) {
+            if (sessionId == AUDIO_SESSION_OUTPUT_STAGE) {
+                // output must be specified by AudioPolicyManager when using session
+                // AUDIO_SESSION_OUTPUT_STAGE
+                lStatus = BAD_VALUE;
+                goto Exit;
+            }
             // look for the thread where the specified audio session is present
             for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
                 if (mPlaybackThreads.valueAt(i)->hasAudioSession(sessionId) != 0) {
@@ -2254,9 +2273,7 @@
 
     Mutex::Autolock _dl(dstThread->mLock);
     Mutex::Autolock _sl(srcThread->mLock);
-    moveEffectChain_l(sessionId, srcThread, dstThread, false);
-
-    return NO_ERROR;
+    return moveEffectChain_l(sessionId, srcThread, dstThread, false);
 }
 
 // moveEffectChain_l must be called with both srcThread and dstThread mLocks held
@@ -2283,13 +2300,18 @@
 
     // transfer all effects one by one so that new effect chain is created on new thread with
     // correct buffer sizes and audio parameters and effect engines reconfigured accordingly
-    audio_io_handle_t dstOutput = dstThread->id();
     sp<EffectChain> dstChain;
     uint32_t strategy = 0; // prevent compiler warning
     sp<EffectModule> effect = chain->getEffectFromId_l(0);
+    Vector< sp<EffectModule> > removed;
+    status_t status = NO_ERROR;
     while (effect != 0) {
         srcThread->removeEffect_l(effect);
-        dstThread->addEffect_l(effect);
+        removed.add(effect);
+        status = dstThread->addEffect_l(effect);
+        if (status != NO_ERROR) {
+            break;
+        }
         // removeEffect_l() has stopped the effect if it was active so it must be restarted
         if (effect->state() == EffectModule::ACTIVE ||
                 effect->state() == EffectModule::STOPPING) {
@@ -2301,15 +2323,15 @@
             dstChain = effect->chain().promote();
             if (dstChain == 0) {
                 ALOGW("moveEffectChain_l() cannot get chain from effect %p", effect.get());
-                srcThread->addEffect_l(effect);
-                return NO_INIT;
+                status = NO_INIT;
+                break;
             }
             strategy = dstChain->strategy();
         }
         if (reRegister) {
             AudioSystem::unregisterEffect(effect->id());
             AudioSystem::registerEffect(&effect->desc(),
-                                        dstOutput,
+                                        dstThread->id(),
                                         strategy,
                                         sessionId,
                                         effect->id());
@@ -2317,10 +2339,24 @@
         effect = chain->getEffectFromId_l(0);
     }
 
-    return NO_ERROR;
+    if (status != NO_ERROR) {
+        for (size_t i = 0; i < removed.size(); i++) {
+            srcThread->addEffect_l(removed[i]);
+            if (dstChain != 0 && reRegister) {
+                AudioSystem::unregisterEffect(removed[i]->id());
+                AudioSystem::registerEffect(&removed[i]->desc(),
+                                            srcThread->id(),
+                                            strategy,
+                                            sessionId,
+                                            removed[i]->id());
+            }
+        }
+    }
+
+    return status;
 }
 
-bool AudioFlinger::isGlobalEffectEnabled_l()
+bool AudioFlinger::isNonOffloadableGlobalEffectEnabled_l()
 {
     if (mGlobalEffectEnableTime != 0 &&
             ((systemTime() - mGlobalEffectEnableTime) < kMinGlobalEffectEnabletimeNs)) {
@@ -2330,14 +2366,14 @@
     for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
         sp<EffectChain> ec =
                 mPlaybackThreads.valueAt(i)->getEffectChain_l(AUDIO_SESSION_OUTPUT_MIX);
-        if (ec != 0 && ec->isEnabled()) {
+        if (ec != 0 && ec->isNonOffloadableEnabled()) {
             return true;
         }
     }
     return false;
 }
 
-void AudioFlinger::onGlobalEffectEnable()
+void AudioFlinger::onNonOffloadableGlobalEffectEnable()
 {
     Mutex::Autolock _l(mLock);
 
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 0992308..2aeb263 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -412,6 +412,7 @@
                                                   int target);
         virtual status_t    setParameters(const String8& keyValuePairs);
         virtual status_t    getTimestamp(AudioTimestamp& timestamp);
+        virtual void        signal(); // signal playback thread for a change in control block
 
         virtual status_t onTransact(
             uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags);
@@ -466,9 +467,8 @@
                 void        removeClient_l(pid_t pid);
                 void        removeNotificationClient(pid_t pid);
 
-                //TODO: remove when effect offload is implemented
-                bool isGlobalEffectEnabled_l();
-                void onGlobalEffectEnable();
+                bool isNonOffloadableGlobalEffectEnabled_l();
+                void onNonOffloadableGlobalEffectEnable();
 
     class AudioHwDevice {
     public:
@@ -645,7 +645,6 @@
 private:
     bool    mIsLowRamDevice;
     bool    mIsDeviceTypeKnown;
-    //TODO: remove when effect offload is implemented
     nsecs_t mGlobalEffectEnableTime;  // when a global effect was last enabled
 };
 
diff --git a/services/audioflinger/AudioPolicyService.cpp b/services/audioflinger/AudioPolicyService.cpp
index 4be292f..35e816b 100644
--- a/services/audioflinger/AudioPolicyService.cpp
+++ b/services/audioflinger/AudioPolicyService.cpp
@@ -975,7 +975,6 @@
 {
     ssize_t i;  // not size_t because i will count down to -1
     Vector <AudioCommand *> removedCommands;
-    nsecs_t time = 0;
     command->mTime = systemTime() + milliseconds(delayMs);
 
     // acquire wake lock to make sure delayed commands are processed
@@ -1021,7 +1020,10 @@
             } else {
                 data2->mKeyValuePairs = param2.toString();
             }
-            time = command2->mTime;
+            command->mTime = command2->mTime;
+            // force delayMs to non 0 so that code below does not request to wait for
+            // command status as the command is now delayed
+            delayMs = 1;
         } break;
 
         case SET_VOLUME: {
@@ -1032,7 +1034,10 @@
             ALOGV("Filtering out volume command on output %d for stream %d",
                     data->mIO, data->mStream);
             removedCommands.add(command2);
-            time = command2->mTime;
+            command->mTime = command2->mTime;
+            // force delayMs to non 0 so that code below does not request to wait for
+            // command status as the command is now delayed
+            delayMs = 1;
         } break;
         case START_TONE:
         case STOP_TONE:
@@ -1054,16 +1059,12 @@
     }
     removedCommands.clear();
 
-    // wait for status only if delay is 0 and command time was not modified above
-    if (delayMs == 0 && time == 0) {
+    // wait for status only if delay is 0
+    if (delayMs == 0) {
         command->mWaitStatus = true;
     } else {
         command->mWaitStatus = false;
     }
-    // update command time if modified above
-    if (time != 0) {
-        command->mTime = time;
-    }
 
     // insert command at the right place according to its time stamp
     ALOGV("inserting command: %d at index %d, num commands %d",
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 86671a9..a8a5169 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -764,6 +764,46 @@
     return enabled;
 }
 
+status_t AudioFlinger::EffectModule::setOffloaded(bool offloaded, audio_io_handle_t io)
+{
+    Mutex::Autolock _l(mLock);
+    if (mStatus != NO_ERROR) {
+        return mStatus;
+    }
+    status_t status = NO_ERROR;
+    if ((mDescriptor.flags & EFFECT_FLAG_OFFLOAD_SUPPORTED) != 0) {
+        status_t cmdStatus;
+        uint32_t size = sizeof(status_t);
+        effect_offload_param_t cmd;
+
+        cmd.isOffload = offloaded;
+        cmd.ioHandle = io;
+        status = (*mEffectInterface)->command(mEffectInterface,
+                                              EFFECT_CMD_OFFLOAD,
+                                              sizeof(effect_offload_param_t),
+                                              &cmd,
+                                              &size,
+                                              &cmdStatus);
+        if (status == NO_ERROR) {
+            status = cmdStatus;
+        }
+        mOffloaded = (status == NO_ERROR) ? offloaded : false;
+    } else {
+        if (offloaded) {
+            status = INVALID_OPERATION;
+        }
+        mOffloaded = false;
+    }
+    ALOGV("setOffloaded() offloaded %d io %d status %d", offloaded, io, status);
+    return status;
+}
+
+bool AudioFlinger::EffectModule::isOffloaded() const
+{
+    Mutex::Autolock _l(mLock);
+    return mOffloaded;
+}
+
 void AudioFlinger::EffectModule::dump(int fd, const Vector<String16>& args)
 {
     const size_t SIZE = 256;
@@ -932,14 +972,20 @@
         }
         mEnabled = false;
     } else {
-        //TODO: remove when effect offload is implemented
         if (thread != 0) {
-            if ((thread->type() == ThreadBase::OFFLOAD)) {
+            if (thread->type() == ThreadBase::OFFLOAD) {
                 PlaybackThread *t = (PlaybackThread *)thread.get();
-                t->invalidateTracks(AUDIO_STREAM_MUSIC);
+                Mutex::Autolock _l(t->mLock);
+                t->broadcast_l();
             }
-            if (mEffect->sessionId() == AUDIO_SESSION_OUTPUT_MIX) {
-                thread->mAudioFlinger->onGlobalEffectEnable();
+            if (!mEffect->isOffloadable()) {
+                if (thread->type() == ThreadBase::OFFLOAD) {
+                    PlaybackThread *t = (PlaybackThread *)thread.get();
+                    t->invalidateTracks(AUDIO_STREAM_MUSIC);
+                }
+                if (mEffect->sessionId() == AUDIO_SESSION_OUTPUT_MIX) {
+                    thread->mAudioFlinger->onNonOffloadableGlobalEffectEnable();
+                }
             }
         }
     }
@@ -970,6 +1016,11 @@
     sp<ThreadBase> thread = mEffect->thread().promote();
     if (thread != 0) {
         thread->checkSuspendOnEffectEnabled(mEffect, false, mEffect->sessionId());
+        if (thread->type() == ThreadBase::OFFLOAD) {
+            PlaybackThread *t = (PlaybackThread *)thread.get();
+            Mutex::Autolock _l(t->mLock);
+            t->broadcast_l();
+        }
     }
 
     return status;
@@ -1240,9 +1291,10 @@
     }
     bool isGlobalSession = (mSessionId == AUDIO_SESSION_OUTPUT_MIX) ||
             (mSessionId == AUDIO_SESSION_OUTPUT_STAGE);
-    // always process effects unless no more tracks are on the session and the effect tail
-    // has been rendered
-    bool doProcess = true;
+    // never process effects when:
+    // - on an OFFLOAD thread
+    // - no more tracks are on the session and the effect tail has been rendered
+    bool doProcess = (thread->type() != ThreadBase::OFFLOAD);
     if (!isGlobalSession) {
         bool tracksOnSession = (trackCnt() != 0);
 
@@ -1728,12 +1780,12 @@
     }
 }
 
-bool AudioFlinger::EffectChain::isEnabled()
+bool AudioFlinger::EffectChain::isNonOffloadableEnabled()
 {
     Mutex::Autolock _l(mLock);
     size_t size = mEffects.size();
     for (size_t i = 0; i < size; i++) {
-        if (mEffects[i]->isEnabled()) {
+        if (mEffects[i]->isEnabled() && !mEffects[i]->isOffloadable()) {
             return true;
         }
     }
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index bac50f2..b717857 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -25,6 +25,10 @@
 // state changes or resource modifications. Always respect the following order
 // if multiple mutexes must be acquired to avoid cross deadlock:
 // AudioFlinger -> ThreadBase -> EffectChain -> EffectModule
+// In addition, methods that lock the AudioPolicyService mutex (getOutputForEffect(),
+// startOutput()...) should never be called with AudioFlinger or Threadbase mutex locked
+// to avoid cross deadlock with other clients calling AudioPolicyService methods that in turn
+// call AudioFlinger thus locking the same mutexes in the reverse order.
 
 // The EffectModule class is a wrapper object controlling the effect engine implementation
 // in the effect library. It prevents concurrent calls to process() and command() functions
@@ -111,6 +115,10 @@
     bool             purgeHandles();
     void             lock() { mLock.lock(); }
     void             unlock() { mLock.unlock(); }
+    bool             isOffloadable() const
+                        { return (mDescriptor.flags & EFFECT_FLAG_OFFLOAD_SUPPORTED) != 0; }
+    status_t         setOffloaded(bool offloaded, audio_io_handle_t io);
+    bool             isOffloaded() const;
 
     void             dump(int fd, const Vector<String16>& args);
 
@@ -144,6 +152,7 @@
                                     // sending disable command.
     uint32_t mDisableWaitCnt;       // current process() calls count during disable period.
     bool     mSuspended;            // effect is suspended: temporarily disabled by framework
+    bool     mOffloaded;            // effect is currently offloaded to the audio DSP
 };
 
 // The EffectHandle class implements the IEffect interface. It provides resources
@@ -303,8 +312,8 @@
 
     void clearInputBuffer();
 
-    // At least one effect in the chain is enabled
-    bool isEnabled();
+    // At least one non offloadable effect in the chain is enabled
+    bool isNonOffloadableEnabled();
 
 
     void dump(int fd, const Vector<String16>& args);
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 0308b99..a2e2511 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -60,6 +60,7 @@
             int16_t     *mainBuffer() const { return mMainBuffer; }
             int         auxEffectId() const { return mAuxEffectId; }
     virtual status_t    getTimestamp(AudioTimestamp& timestamp);
+            void        signal();
 
 // implement FastMixerState::VolumeProvider interface
     virtual uint32_t    getVolumeLR();
@@ -117,7 +118,10 @@
     enum {FS_INVALID, FS_FILLING, FS_FILLED, FS_ACTIVE};
     mutable uint8_t     mFillingUpStatus;
     int8_t              mRetryCount;
-    const sp<IMemory>   mSharedBuffer;
+
+    // see comment at AudioFlinger::PlaybackThread::Track::~Track for why this can't be const
+    sp<IMemory>         mSharedBuffer;
+
     bool                mResetDone;
     const audio_stream_type_t mStreamType;
     int                 mName;      // track name on the normal mixer,
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 1b5a9a9..c46242d 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -109,6 +109,9 @@
 // maximum normal mix buffer size
 static const uint32_t kMaxNormalMixBufferSizeMs = 24;
 
+// Offloaded output thread standby delay: allows track transition without going to standby
+static const nsecs_t kOffloadStandbyDelayNs = seconds(1);
+
 // Whether to use fast mixer
 static const enum {
     FastMixer_Never,    // never initialize or use: for debugging only
@@ -473,13 +476,13 @@
     }
 }
 
-void AudioFlinger::ThreadBase::acquireWakeLock()
+void AudioFlinger::ThreadBase::acquireWakeLock(int uid)
 {
     Mutex::Autolock _l(mLock);
-    acquireWakeLock_l();
+    acquireWakeLock_l(uid);
 }
 
-void AudioFlinger::ThreadBase::acquireWakeLock_l()
+void AudioFlinger::ThreadBase::acquireWakeLock_l(int uid)
 {
     if (mPowerManager == 0) {
         // use checkService() to avoid blocking if power service is not up yet
@@ -494,10 +497,19 @@
     }
     if (mPowerManager != 0) {
         sp<IBinder> binder = new BBinder();
-        status_t status = mPowerManager->acquireWakeLock(POWERMANAGER_PARTIAL_WAKE_LOCK,
-                                                         binder,
-                                                         String16(mName),
-                                                         String16("media"));
+        status_t status;
+        if (uid >= 0) {
+            status = mPowerManager->acquireWakeLockWithUid(POWERMANAGER_PARTIAL_WAKE_LOCK,
+                    binder,
+                    String16(mName),
+                    String16("media"),
+                    uid);
+        } else {
+            status = mPowerManager->acquireWakeLock(POWERMANAGER_PARTIAL_WAKE_LOCK,
+                    binder,
+                    String16(mName),
+                    String16("media"));
+        }
         if (status == NO_ERROR) {
             mWakeLockToken = binder;
         }
@@ -699,14 +711,22 @@
         goto Exit;
     }
 
-    // Do not allow effects with session ID 0 on direct output or duplicating threads
-    // TODO: add rule for hw accelerated effects on direct outputs with non PCM format
-    if (sessionId == AUDIO_SESSION_OUTPUT_MIX && mType != MIXER) {
-        ALOGW("createEffect_l() Cannot add auxiliary effect %s to session %d",
-                desc->name, sessionId);
-        lStatus = BAD_VALUE;
-        goto Exit;
+    // Allow global effects only on offloaded and mixer threads
+    if (sessionId == AUDIO_SESSION_OUTPUT_MIX) {
+        switch (mType) {
+        case MIXER:
+        case OFFLOAD:
+            break;
+        case DIRECT:
+        case DUPLICATING:
+        case RECORD:
+        default:
+            ALOGW("createEffect_l() Cannot add global effect %s on thread %s", desc->name, mName);
+            lStatus = BAD_VALUE;
+            goto Exit;
+        }
     }
+
     // Only Pre processor effects are allowed on input threads and only on input threads
     if ((mType == RECORD) != ((desc->flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC)) {
         ALOGW("createEffect_l() effect %s (flags %08x) created on wrong thread type %d",
@@ -749,6 +769,8 @@
             if (lStatus != NO_ERROR) {
                 goto Exit;
             }
+            effect->setOffloaded(mType == OFFLOAD, mId);
+
             lStatus = chain->addEffect_l(effect);
             if (lStatus != NO_ERROR) {
                 goto Exit;
@@ -810,6 +832,10 @@
     sp<EffectChain> chain = getEffectChain_l(sessionId);
     bool chainCreated = false;
 
+    ALOGD_IF((mType == OFFLOAD) && !effect->isOffloadable(),
+             "addEffect_l() on offloaded thread %p: effect %s does not support offload flags %x",
+                    this, effect->desc().name, effect->desc().flags);
+
     if (chain == 0) {
         // create a new chain for this session
         ALOGV("addEffect_l() new effect chain for session %d", sessionId);
@@ -826,6 +852,8 @@
         return BAD_VALUE;
     }
 
+    effect->setOffloaded(mType == OFFLOAD, mId);
+
     status_t status = chain->addEffect_l(effect);
     if (status != NO_ERROR) {
         if (chainCreated) {
@@ -941,6 +969,7 @@
         mUseAsyncWrite(false),
         mWriteAckSequence(0),
         mDrainSequence(0),
+        mSignalPending(false),
         mScreenState(AudioFlinger::mScreenState),
         // index 0 is reserved for normal mixer's submix
         mFastTrackAvailMask(((1 << FastMixerState::kMaxFastTracks) - 1) & ~1),
@@ -1329,14 +1358,14 @@
 {
     Mutex::Autolock _l(mLock);
     mStreamTypes[stream].volume = value;
-    signal_l();
+    broadcast_l();
 }
 
 void AudioFlinger::PlaybackThread::setStreamMute(audio_stream_type_t stream, bool muted)
 {
     Mutex::Autolock _l(mLock);
     mStreamTypes[stream].mute = muted;
-    signal_l();
+    broadcast_l();
 }
 
 float AudioFlinger::PlaybackThread::streamVolume(audio_stream_type_t stream) const
@@ -1394,8 +1423,8 @@
         status = NO_ERROR;
     }
 
-    ALOGV("mWaitWorkCV.broadcast");
-    mWaitWorkCV.broadcast();
+    ALOGV("signal playback thread");
+    broadcast_l();
 
     return status;
 }
@@ -1436,14 +1465,14 @@
     }
 }
 
-void AudioFlinger::PlaybackThread::signal_l()
+void AudioFlinger::PlaybackThread::broadcast_l()
 {
     // Thread could be blocked waiting for async
     // so signal it to handle state changes immediately
     // If threadLoop is currently unlocked a signal of mWaitWorkCV will
     // be lost so we also flag to prevent it blocking on mWaitWorkCV
     mSignalPending = true;
-    mWaitWorkCV.signal();
+    mWaitWorkCV.broadcast();
 }
 
 String8 AudioFlinger::PlaybackThread::getParameters(const String8& keys)
@@ -1574,6 +1603,7 @@
         if (mOutput->stream->set_callback(mOutput->stream,
                                       AudioFlinger::PlaybackThread::asyncCallback, this) == 0) {
             mUseAsyncWrite = true;
+            mCallbackThread = new AudioFlinger::AsyncCallbackThread(this);
         }
     }
 
@@ -2095,6 +2125,8 @@
     // and then that string will be logged at the next convenient opportunity.
     const char *logString = NULL;
 
+    checkSilentMode_l();
+
     while (!exitPending())
     {
         cpuStats.sample(myName);
@@ -2124,7 +2156,6 @@
             }
 
             saveOutputTracks();
-
             if (mSignalPending) {
                 // A signal was raised while we were unlocked
                 mSignalPending = false;
@@ -2137,14 +2168,12 @@
                 mWaitWorkCV.wait(mLock);
                 ALOGV("async completion/wake");
                 acquireWakeLock_l();
-                if (exitPending()) {
-                    break;
-                }
-                if (!mActiveTracks.size() && (systemTime() > standbyTime)) {
-                    continue;
-                }
+                standbyTime = systemTime() + standbyDelay;
                 sleepTime = 0;
-            } else if ((!mActiveTracks.size() && systemTime() > standbyTime) ||
+
+                continue;
+            }
+            if ((!mActiveTracks.size() && systemTime() > standbyTime) ||
                                    isSuspended()) {
                 // put audio hardware into standby after short delay
                 if (shouldStandby_l()) {
@@ -2186,7 +2215,6 @@
                     continue;
                 }
             }
-
             // mMixerStatusIgnoringFastTracks is also updated internally
             mMixerStatus = prepareTracks_l(&tracksToRemove);
 
@@ -2219,12 +2247,21 @@
             }
 
             // only process effects if we're going to write
-            if (sleepTime == 0) {
+            if (sleepTime == 0 && mType != OFFLOAD) {
                 for (size_t i = 0; i < effectChains.size(); i ++) {
                     effectChains[i]->process_l();
                 }
             }
         }
+        // Process effect chains for offloaded thread even if no audio
+        // was read from audio track: process only updates effect state
+        // and thus does have to be synchronized with audio writes but may have
+        // to be called while waiting for async write callback
+        if (mType == OFFLOAD) {
+            for (size_t i = 0; i < effectChains.size(); i ++) {
+                effectChains[i]->process_l();
+            }
+        }
 
         // enable changes in effect chain
         unlockEffectChains(effectChains);
@@ -2323,6 +2360,22 @@
 
 }
 
+status_t AudioFlinger::PlaybackThread::getTimestamp_l(AudioTimestamp& timestamp)
+{
+    if (mNormalSink != 0) {
+        return mNormalSink->getTimestamp(timestamp);
+    }
+    if (mType == OFFLOAD && mOutput->stream->get_presentation_position) {
+        uint64_t position64;
+        int ret = mOutput->stream->get_presentation_position(
+                                                mOutput->stream, &position64, &timestamp.mTime);
+        if (ret == 0) {
+            timestamp.mPosition = (uint32_t)position64;
+            return NO_ERROR;
+        }
+    }
+    return INVALID_OPERATION;
+}
 // ----------------------------------------------------------------------------
 
 AudioFlinger::MixerThread::MixerThread(const sp<AudioFlinger>& audioFlinger, AudioStreamOut* output,
@@ -3509,7 +3562,8 @@
 
             if (track->mFillingUpStatus == Track::FS_FILLED) {
                 track->mFillingUpStatus = Track::FS_ACTIVE;
-                mLeftVolFloat = mRightVolFloat = 0;
+                // make sure processVolume_l() will apply new volume even if 0
+                mLeftVolFloat = mRightVolFloat = -1.0;
                 if (track->mState == TrackBase::RESUMING) {
                     track->mState = TrackBase::ACTIVE;
                 }
@@ -3701,15 +3755,19 @@
 
     // use shorter standby delay as on normal output to release
     // hardware resources as soon as possible
-    standbyDelay = microseconds(activeSleepTime*2);
+    if (audio_is_linear_pcm(mFormat)) {
+        standbyDelay = microseconds(activeSleepTime*2);
+    } else {
+        standbyDelay = kOffloadStandbyDelayNs;
+    }
 }
 
 // ----------------------------------------------------------------------------
 
 AudioFlinger::AsyncCallbackThread::AsyncCallbackThread(
-        const sp<AudioFlinger::OffloadThread>& offloadThread)
+        const wp<AudioFlinger::PlaybackThread>& playbackThread)
     :   Thread(false /*canCallJava*/),
-        mOffloadThread(offloadThread),
+        mPlaybackThread(playbackThread),
         mWriteAckSequence(0),
         mDrainSequence(0)
 {
@@ -3744,13 +3802,13 @@
             mDrainSequence &= ~1;
         }
         {
-            sp<AudioFlinger::OffloadThread> offloadThread = mOffloadThread.promote();
-            if (offloadThread != 0) {
+            sp<AudioFlinger::PlaybackThread> playbackThread = mPlaybackThread.promote();
+            if (playbackThread != 0) {
                 if (writeAckSequence & 1) {
-                    offloadThread->resetWriteBlocked(writeAckSequence >> 1);
+                    playbackThread->resetWriteBlocked(writeAckSequence >> 1);
                 }
                 if (drainSequence & 1) {
-                    offloadThread->resetDraining(drainSequence >> 1);
+                    playbackThread->resetDraining(drainSequence >> 1);
                 }
             }
         }
@@ -3806,9 +3864,9 @@
         AudioStreamOut* output, audio_io_handle_t id, uint32_t device)
     :   DirectOutputThread(audioFlinger, output, id, device, OFFLOAD),
         mHwPaused(false),
+        mFlushPending(false),
         mPausedBytesRemaining(0)
 {
-    mCallbackThread = new AudioFlinger::AsyncCallbackThread(this);
 }
 
 AudioFlinger::OffloadThread::~OffloadThread()
@@ -3833,10 +3891,14 @@
     Vector< sp<Track> > *tracksToRemove
 )
 {
-    ALOGV("OffloadThread::prepareTracks_l");
     size_t count = mActiveTracks.size();
 
     mixer_state mixerStatus = MIXER_IDLE;
+    bool doHwPause = false;
+    bool doHwResume = false;
+
+    ALOGV("OffloadThread::prepareTracks_l active tracks %d", count);
+
     // find out which tracks need to be processed
     for (size_t i = 0; i < count; i++) {
         sp<Track> t = mActiveTracks[i].promote();
@@ -3868,7 +3930,7 @@
             track->setPaused();
             if (last) {
                 if (!mHwPaused) {
-                    mOutput->stream->pause(mOutput->stream);
+                    doHwPause = true;
                     mHwPaused = true;
                 }
                 // If we were part way through writing the mixbuffer to
@@ -3887,25 +3949,30 @@
             ALOGVV("OffloadThread: track %d s=%08x [OK]", track->name(), cblk->mServer);
             if (track->mFillingUpStatus == Track::FS_FILLED) {
                 track->mFillingUpStatus = Track::FS_ACTIVE;
-                mLeftVolFloat = mRightVolFloat = 0;
+                // make sure processVolume_l() will apply new volume even if 0
+                mLeftVolFloat = mRightVolFloat = -1.0;
                 if (track->mState == TrackBase::RESUMING) {
-                    if (mPausedBytesRemaining) {
-                        // Need to continue write that was interrupted
-                        mCurrentWriteLength = mPausedWriteLength;
-                        mBytesRemaining = mPausedBytesRemaining;
-                        mPausedBytesRemaining = 0;
-                    }
                     track->mState = TrackBase::ACTIVE;
+                    if (last) {
+                        if (mPausedBytesRemaining) {
+                            // Need to continue write that was interrupted
+                            mCurrentWriteLength = mPausedWriteLength;
+                            mBytesRemaining = mPausedBytesRemaining;
+                            mPausedBytesRemaining = 0;
+                        }
+                        if (mHwPaused) {
+                            doHwResume = true;
+                            mHwPaused = false;
+                            // threadLoop_mix() will handle the case that we need to
+                            // resume an interrupted write
+                        }
+                        // enable write to audio HAL
+                        sleepTime = 0;
+                    }
                 }
             }
 
             if (last) {
-                if (mHwPaused) {
-                    mOutput->stream->resume(mOutput->stream);
-                    mHwPaused = false;
-                    // threadLoop_mix() will handle the case that we need to
-                    // resume an interrupted write
-                }
                 // reset retry count
                 track->mRetryCount = kMaxTrackRetriesOffload;
                 mActiveTrack = t;
@@ -3922,9 +3989,9 @@
                     // has been written
                     ALOGV("OffloadThread: underrun and STOPPING_1 -> draining, STOPPING_2");
                     track->mState = TrackBase::STOPPING_2; // so presentation completes after drain
-                    sleepTime = 0;
-                    standbyTime = systemTime() + standbyDelay;
                     if (last) {
+                        sleepTime = 0;
+                        standbyTime = systemTime() + standbyDelay;
                         mixerStatus = MIXER_DRAIN_TRACK;
                         mDrainSequence += 2;
                         if (mHwPaused) {
@@ -3963,10 +4030,23 @@
         processVolume_l(track, last);
     }
 
+    // make sure the pause/flush/resume sequence is executed in the right order.
+    // If a flush is pending and a track is active but the HW is not paused, force a HW pause
+    // before flush and then resume HW. This can happen in case of pause/flush/resume
+    // if resume is received before pause is executed.
+    if (doHwPause || (mFlushPending && !mHwPaused && (count != 0))) {
+        mOutput->stream->pause(mOutput->stream);
+        if (!doHwPause) {
+            doHwResume = true;
+        }
+    }
     if (mFlushPending) {
         flushHw_l();
         mFlushPending = false;
     }
+    if (doHwResume) {
+        mOutput->stream->resume(mOutput->stream);
+    }
 
     // remove all the tracks that need to be...
     removeTracks_l(*tracksToRemove);
@@ -4219,7 +4299,7 @@
     snprintf(mName, kNameLength, "AudioIn_%X", id);
 
     readInputParameters();
-
+    mClientUid = IPCThreadState::self()->getCallingUid();
 }
 
 
@@ -4251,7 +4331,7 @@
     nsecs_t lastWarning = 0;
 
     inputStandBy();
-    acquireWakeLock();
+    acquireWakeLock(mClientUid);
 
     // used to verify we've read at least once before evaluating how many bytes were read
     bool readOnce = false;
@@ -4276,7 +4356,7 @@
                 // go to sleep
                 mWaitWorkCV.wait(mLock);
                 ALOGV("RecordThread: loop starting");
-                acquireWakeLock_l();
+                acquireWakeLock_l(mClientUid);
                 continue;
             }
             if (mActiveTrack != 0) {
@@ -4304,6 +4384,7 @@
                     mStandby = false;
                 }
             }
+
             lockEffectChains_l(effectChains);
         }
 
@@ -4494,10 +4575,9 @@
 
     lStatus = initCheck();
     if (lStatus != NO_ERROR) {
-        ALOGE("Audio driver not initialized.");
+        ALOGE("createRecordTrack_l() audio driver not initialized");
         goto Exit;
     }
-
     // client expresses a preference for FAST, but we get the final say
     if (*flags & IAudioFlinger::TRACK_FAST) {
       if (
@@ -4558,7 +4638,9 @@
                       format, channelMask, frameCount, sessionId);
 
         if (track->getCblk() == 0) {
+            ALOGE("createRecordTrack_l() no control block");
             lStatus = NO_MEMORY;
+            track.clear();
             goto Exit;
         }
         mTracks.add(track);
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 3fe470c..0cb3ef7 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -237,8 +237,8 @@
                     effect_uuid_t mType;    // effect type UUID
                 };
 
-                void        acquireWakeLock();
-                void        acquireWakeLock_l();
+                void        acquireWakeLock(int uid = -1);
+                void        acquireWakeLock_l(int uid = -1);
                 void        releaseWakeLock();
                 void        releaseWakeLock_l();
                 void setEffectSuspended_l(const effect_uuid_t *type,
@@ -466,6 +466,8 @@
                 // Return's the HAL's frame count i.e. fast mixer buffer size.
                 size_t      frameCountHAL() const { return mFrameCount; }
 
+                status_t         getTimestamp_l(AudioTimestamp& timestamp);
+
 protected:
     // updated by readOutputParameters()
     size_t                          mNormalFrameCount;  // normal mixer and effects
@@ -526,7 +528,7 @@
     status_t    addTrack_l(const sp<Track>& track);
     bool        destroyTrack_l(const sp<Track>& track);
     void        removeTrack_l(const sp<Track>& track);
-    void        signal_l();
+    void        broadcast_l();
 
     void        readOutputParameters();
 
@@ -590,6 +592,8 @@
     // Bit 0 is reset by the async callback thread calling resetDraining(). Out of sequence
     // callbacks are ignored.
     uint32_t                        mDrainSequence;
+    // A condition that must be evaluated by prepareTrack_l() has changed and we must not wait
+    // for async write callback in the thread loop before evaluating it
     bool                            mSignalPending;
     sp<AsyncCallbackThread>         mCallbackThread;
 
@@ -755,7 +759,7 @@
 class AsyncCallbackThread : public Thread {
 public:
 
-    AsyncCallbackThread(const sp<OffloadThread>& offloadThread);
+    AsyncCallbackThread(const wp<PlaybackThread>& playbackThread);
 
     virtual             ~AsyncCallbackThread();
 
@@ -772,17 +776,17 @@
             void        resetDraining();
 
 private:
-    wp<OffloadThread>   mOffloadThread;
+    const wp<PlaybackThread>   mPlaybackThread;
     // mWriteAckSequence corresponds to the last write sequence passed by the offload thread via
     // setWriteBlocked(). The sequence is shifted one bit to the left and the lsb is used
     // to indicate that the callback has been received via resetWriteBlocked()
-    uint32_t            mWriteAckSequence;
+    uint32_t                   mWriteAckSequence;
     // mDrainSequence corresponds to the last drain sequence passed by the offload thread via
     // setDraining(). The sequence is shifted one bit to the left and the lsb is used
     // to indicate that the callback has been received via resetDraining()
-    uint32_t            mDrainSequence;
-    Condition           mWaitWorkCV;
-    Mutex               mLock;
+    uint32_t                   mDrainSequence;
+    Condition                  mWaitWorkCV;
+    Mutex                      mLock;
 };
 
 class DuplicatingThread : public MixerThread {
@@ -947,4 +951,5 @@
 
             // For dumpsys
             const sp<NBAIO_Sink>                mTeeSink;
+            int                                 mClientUid;
 };
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 821e4a5..9c6e724 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -288,6 +288,12 @@
     return mTrack->getTimestamp(timestamp);
 }
 
+
+void AudioFlinger::TrackHandle::signal()
+{
+    return mTrack->signal();
+}
+
 status_t AudioFlinger::TrackHandle::onTransact(
     uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
 {
@@ -364,6 +370,16 @@
 AudioFlinger::PlaybackThread::Track::~Track()
 {
     ALOGV("PlaybackThread::Track destructor");
+
+    // The destructor would clear mSharedBuffer,
+    // but it will not push the decremented reference count,
+    // leaving the client's IMemory dangling indefinitely.
+    // This prevents that leak.
+    if (mSharedBuffer != 0) {
+        mSharedBuffer.clear();
+        // flush the binder command buffer
+        IPCThreadState::self()->flushCommands();
+    }
 }
 
 void AudioFlinger::PlaybackThread::Track::destroy()
@@ -392,7 +408,7 @@
 
 /*static*/ void AudioFlinger::PlaybackThread::Track::appendDumpHeader(String8& result)
 {
-    result.append("   Name Client Type Fmt Chn mask Session fCount S F SRate  "
+    result.append("   Name Client Type      Fmt Chn mask Session fCount S F SRate  "
                   "L dB  R dB    Server Main buf  Aux Buf Flags UndFrmCnt\n");
 }
 
@@ -457,7 +473,7 @@
         nowInUnderrun = '?';
         break;
     }
-    snprintf(&buffer[7], size-7, " %6u %4u %3u %08X %7u %6u %1c %1d %5u %5.2g %5.2g  "
+    snprintf(&buffer[7], size-7, " %6u %4u %08X %08X %7u %6u %1c %1d %5u %5.2g %5.2g  "
                                  "%08X %08X %08X 0x%03X %9u%c\n",
             (mClient == 0) ? getpid_cached : mClient->pid(),
             mStreamType,
@@ -543,12 +559,12 @@
 
     sp<ThreadBase> thread = mThread.promote();
     if (thread != 0) {
-        //TODO: remove when effect offload is implemented
         if (isOffloaded()) {
             Mutex::Autolock _laf(thread->mAudioFlinger->mLock);
             Mutex::Autolock _lth(thread->mLock);
             sp<EffectChain> ec = thread->getEffectChain_l(mSessionId);
-            if (thread->mAudioFlinger->isGlobalEffectEnabled_l() || (ec != 0 && ec->isEnabled())) {
+            if (thread->mAudioFlinger->isNonOffloadableGlobalEffectEnabled_l() ||
+                    (ec != 0 && ec->isNonOffloadableEnabled())) {
                 invalidate();
                 return PERMISSION_DENIED;
             }
@@ -641,7 +657,7 @@
         case RESUMING:
             mState = PAUSING;
             ALOGV("ACTIVE/RESUMING => PAUSING (%d) on thread %p", mName, thread.get());
-            playbackThread->signal_l();
+            playbackThread->broadcast_l();
             break;
 
         default:
@@ -701,7 +717,7 @@
         // before mixer thread can run. This is important when offloading
         // because the hardware buffer could hold a large amount of audio
         playbackThread->flushOutput_l();
-        playbackThread->signal_l();
+        playbackThread->broadcast_l();
     }
 }
 
@@ -747,19 +763,23 @@
     }
     Mutex::Autolock _l(thread->mLock);
     PlaybackThread *playbackThread = (PlaybackThread *)thread.get();
-    if (!playbackThread->mLatchQValid) {
-        return INVALID_OPERATION;
+    if (!isOffloaded()) {
+        if (!playbackThread->mLatchQValid) {
+            return INVALID_OPERATION;
+        }
+        uint32_t unpresentedFrames =
+                ((int64_t) playbackThread->mLatchQ.mUnpresentedFrames * mSampleRate) /
+                playbackThread->mSampleRate;
+        uint32_t framesWritten = mAudioTrackServerProxy->framesReleased();
+        if (framesWritten < unpresentedFrames) {
+            return INVALID_OPERATION;
+        }
+        timestamp.mPosition = framesWritten - unpresentedFrames;
+        timestamp.mTime = playbackThread->mLatchQ.mTimestamp.mTime;
+        return NO_ERROR;
     }
-    uint32_t unpresentedFrames =
-            ((int64_t) playbackThread->mLatchQ.mUnpresentedFrames * mSampleRate) /
-            playbackThread->mSampleRate;
-    uint32_t framesWritten = mAudioTrackServerProxy->framesReleased();
-    if (framesWritten < unpresentedFrames) {
-        return INVALID_OPERATION;
-    }
-    timestamp.mPosition = framesWritten - unpresentedFrames;
-    timestamp.mTime = playbackThread->mLatchQ.mTimestamp.mTime;
-    return NO_ERROR;
+
+    return playbackThread->getTimestamp_l(timestamp);
 }
 
 status_t AudioFlinger::PlaybackThread::Track::attachAuxEffect(int EffectId)
@@ -787,7 +807,11 @@
                 return INVALID_OPERATION;
             }
             srcThread->removeEffect_l(effect);
-            playbackThread->addEffect_l(effect);
+            status = playbackThread->addEffect_l(effect);
+            if (status != NO_ERROR) {
+                srcThread->addEffect_l(effect);
+                return INVALID_OPERATION;
+            }
             // removeEffect_l() has stopped the effect if it was active so it must be restarted
             if (effect->state() == EffectModule::ACTIVE ||
                     effect->state() == EffectModule::STOPPING) {
@@ -908,6 +932,16 @@
     mIsInvalid = true;
 }
 
+void AudioFlinger::PlaybackThread::Track::signal()
+{
+    sp<ThreadBase> thread = mThread.promote();
+    if (thread != 0) {
+        PlaybackThread *t = (PlaybackThread *)thread.get();
+        Mutex::Autolock _l(t->mLock);
+        t->broadcast_l();
+    }
+}
+
 // ----------------------------------------------------------------------------
 
 sp<AudioFlinger::PlaybackThread::TimedTrack>
diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk
index d659ebb..d23f8b9 100644
--- a/services/camera/libcameraservice/Android.mk
+++ b/services/camera/libcameraservice/Android.mk
@@ -33,6 +33,7 @@
     device3/Camera3InputStream.cpp \
     device3/Camera3OutputStream.cpp \
     device3/Camera3ZslStream.cpp \
+    device3/StatusTracker.cpp \
     gui/RingBufferConsumer.cpp \
 
 LOCAL_SHARED_LIBRARIES:= \
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index fe16314..34a5b15 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -225,6 +225,49 @@
     return rc;
 }
 
+status_t CameraService::getCameraCharacteristics(int cameraId,
+                                                CameraMetadata* cameraInfo) {
+    if (!cameraInfo) {
+        ALOGE("%s: cameraInfo is NULL", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    if (!mModule) {
+        ALOGE("%s: camera hardware module doesn't exist", __FUNCTION__);
+        return -ENODEV;
+    }
+
+    if (mModule->common.module_api_version < CAMERA_MODULE_API_VERSION_2_0) {
+        // TODO: Remove this check once HAL1 shim is in place.
+        ALOGE("%s: Only HAL module version V2 or higher supports static metadata", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    if (cameraId < 0 || cameraId >= mNumberOfCameras) {
+        ALOGE("%s: Invalid camera id: %d", __FUNCTION__, cameraId);
+        return BAD_VALUE;
+    }
+
+    int facing;
+    if (getDeviceVersion(cameraId, &facing) == CAMERA_DEVICE_API_VERSION_1_0) {
+        // TODO: Remove this check once HAL1 shim is in place.
+        ALOGE("%s: HAL1 doesn't support static metadata yet", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    if (getDeviceVersion(cameraId, &facing) <= CAMERA_DEVICE_API_VERSION_2_1) {
+        // Disable HAL2.x support for camera2 API for now.
+        ALOGW("%s: HAL2.x doesn't support getCameraCharacteristics for now", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    struct camera_info info;
+    status_t ret = mModule->get_camera_info(cameraId, &info);
+    *cameraInfo = info.static_camera_characteristics;
+
+    return ret;
+}
+
 int CameraService::getDeviceVersion(int cameraId, int* facing) {
     struct camera_info info;
     if (mModule->get_camera_info(cameraId, &info) != OK) {
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index b34a0f6..ad6a582 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -71,6 +71,8 @@
     virtual int32_t     getNumberOfCameras();
     virtual status_t    getCameraInfo(int cameraId,
                                       struct CameraInfo* cameraInfo);
+    virtual status_t    getCameraCharacteristics(int cameraId,
+                                                 CameraMetadata* cameraInfo);
 
     virtual status_t connect(const sp<ICameraClient>& cameraClient, int cameraId,
             const String16& clientPackageName, int clientUid,
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index bda2887..09829ea 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -283,6 +283,7 @@
         CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_INACTIVE)
         CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN)
         CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED)
+        CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED)
         CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN)
         CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED)
         CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED)
@@ -326,6 +327,10 @@
     result.appendFormat("    Video stabilization is %s\n",
             p.videoStabilization ? "enabled" : "disabled");
 
+    result.appendFormat("    Selected still capture FPS range: %d - %d\n",
+            p.fastInfo.bestStillCaptureFpsRange[0],
+            p.fastInfo.bestStillCaptureFpsRange[1]);
+
     result.append("  Current streams:\n");
     result.appendFormat("    Preview stream ID: %d\n",
             getPreviewStreamId());
@@ -604,7 +609,7 @@
     }
     if (params.previewCallbackFlags != (uint32_t)flag) {
 
-        if (flag != CAMERA_FRAME_CALLBACK_FLAG_NOOP) {
+        if (params.previewCallbackSurface && flag != CAMERA_FRAME_CALLBACK_FLAG_NOOP) {
             // Disable any existing preview callback window when enabling
             // preview callback flags
             res = mCallbackProcessor->setCallbackWindow(NULL);
@@ -1483,6 +1488,7 @@
                     case ANDROID_CONTROL_AF_STATE_INACTIVE:
                     case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
                     case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
+                    case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
                     default:
                         // Unexpected in AUTO/MACRO mode
                         ALOGE("%s: Unexpected AF state transition in AUTO/MACRO mode: %d",
@@ -1525,6 +1531,7 @@
                         afInMotion = true;
                         // no break
                     case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
+                    case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
                         // Stop passive scan, inform upstream
                         if (l.mParameters.enableFocusMoveMessages) {
                             sendMovingMessage = true;
diff --git a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
index 9d8c4a1..d2ac79c 100644
--- a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
@@ -312,6 +312,16 @@
             return OK;
         }
 
+        if (imgBuffer.width != static_cast<uint32_t>(l.mParameters.previewWidth) ||
+                imgBuffer.height != static_cast<uint32_t>(l.mParameters.previewHeight)) {
+            ALOGW("%s: The preview size has changed to %d x %d from %d x %d, this buffer is"
+                    " no longer valid, dropping",__FUNCTION__,
+                    l.mParameters.previewWidth, l.mParameters.previewHeight,
+                    imgBuffer.width, imgBuffer.height);
+            mCallbackConsumer->unlockBuffer(imgBuffer);
+            return OK;
+        }
+
         previewFormat = l.mParameters.previewFormat;
         useFlexibleYuv = l.mParameters.fastInfo.useFlexibleYuv &&
                 (previewFormat == HAL_PIXEL_FORMAT_YCrCb_420_SP ||
diff --git a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
index ca3198f..1a1b27b 100644
--- a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
+++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
@@ -103,12 +103,12 @@
     }
 }
 
-void CaptureSequencer::onFrameAvailable(int32_t frameId,
+void CaptureSequencer::onFrameAvailable(int32_t requestId,
         const CameraMetadata &frame) {
     ALOGV("%s: Listener found new frame", __FUNCTION__);
     ATRACE_CALL();
     Mutex::Autolock l(mInputMutex);
-    mNewFrameId = frameId;
+    mNewFrameId = requestId;
     mNewFrame = frame;
     if (!mNewFrameReceived) {
         mNewFrameReceived = true;
diff --git a/services/camera/libcameraservice/api1/client2/CaptureSequencer.h b/services/camera/libcameraservice/api1/client2/CaptureSequencer.h
index 7ad461a..e1e6201 100644
--- a/services/camera/libcameraservice/api1/client2/CaptureSequencer.h
+++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.h
@@ -62,7 +62,7 @@
     void notifyAutoExposure(uint8_t newState, int triggerId);
 
     // Notifications from the frame processor
-    virtual void onFrameAvailable(int32_t frameId, const CameraMetadata &frame);
+    virtual void onFrameAvailable(int32_t requestId, const CameraMetadata &frame);
 
     // Notifications from the JPEG processor
     void onCaptureAvailable(nsecs_t timestamp, sp<MemoryBase> captureBuffer);
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index 0459866..8e197a9 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -58,13 +58,13 @@
     res = buildQuirks();
     if (res != OK) return res;
 
-    camera_metadata_ro_entry_t availableProcessedSizes =
-        staticInfo(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, 2);
-    if (!availableProcessedSizes.count) return NO_INIT;
+    const Size MAX_PREVIEW_SIZE = { MAX_PREVIEW_WIDTH, MAX_PREVIEW_HEIGHT };
+    res = getFilteredPreviewSizes(MAX_PREVIEW_SIZE, &availablePreviewSizes);
+    if (res != OK) return res;
 
     // TODO: Pick more intelligently
-    previewWidth = availableProcessedSizes.data.i32[0];
-    previewHeight = availableProcessedSizes.data.i32[1];
+    previewWidth = availablePreviewSizes[0].width;
+    previewHeight = availablePreviewSizes[0].height;
     videoWidth = previewWidth;
     videoHeight = previewHeight;
 
@@ -75,12 +75,13 @@
                     previewWidth, previewHeight));
     {
         String8 supportedPreviewSizes;
-        for (size_t i=0; i < availableProcessedSizes.count; i += 2) {
+        for (size_t i = 0; i < availablePreviewSizes.size(); i++) {
             if (i != 0) supportedPreviewSizes += ",";
             supportedPreviewSizes += String8::format("%dx%d",
-                    availableProcessedSizes.data.i32[i],
-                    availableProcessedSizes.data.i32[i+1]);
+                    availablePreviewSizes[i].width,
+                    availablePreviewSizes[i].height);
         }
+        ALOGV("Supported preview sizes are: %s", supportedPreviewSizes.string());
         params.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES,
                 supportedPreviewSizes);
         params.set(CameraParameters::KEY_SUPPORTED_VIDEO_SIZES,
@@ -183,6 +184,7 @@
 
     // NOTE: Not scaled like FPS range values are.
     previewFps = fpsFromRange(previewFpsRange[0], previewFpsRange[1]);
+    lastSetPreviewFps = previewFps;
     params.set(CameraParameters::KEY_PREVIEW_FRAME_RATE,
             previewFps);
 
@@ -248,9 +250,17 @@
         staticInfo(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, 4);
     if (!availableJpegThumbnailSizes.count) return NO_INIT;
 
-    // TODO: Pick default thumbnail size sensibly
-    jpegThumbSize[0] = availableJpegThumbnailSizes.data.i32[0];
-    jpegThumbSize[1] = availableJpegThumbnailSizes.data.i32[1];
+    // Pick the largest thumbnail size that matches still image aspect ratio.
+    ALOG_ASSERT(pictureWidth > 0 && pictureHeight > 0,
+            "Invalid picture size, %d x %d", pictureWidth, pictureHeight);
+    float picAspectRatio = static_cast<float>(pictureWidth) / pictureHeight;
+    Size thumbnailSize =
+            getMaxSizeForRatio(
+                    picAspectRatio,
+                    &availableJpegThumbnailSizes.data.i32[0],
+                    availableJpegThumbnailSizes.count);
+    jpegThumbSize[0] = thumbnailSize.width;
+    jpegThumbSize[1] = thumbnailSize.height;
 
     params.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH,
             jpegThumbSize[0]);
@@ -851,6 +861,33 @@
         arrayHeight = activeArraySize.data.i32[3];
     } else return NO_INIT;
 
+    // We'll set the target FPS range for still captures to be as wide
+    // as possible to give the HAL maximum latitude for exposure selection
+    camera_metadata_ro_entry_t availableFpsRanges =
+        staticInfo(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 2);
+    if (availableFpsRanges.count < 2 || availableFpsRanges.count % 2 != 0) {
+        return NO_INIT;
+    }
+
+    int32_t bestStillCaptureFpsRange[2] = {
+        availableFpsRanges.data.i32[0], availableFpsRanges.data.i32[1]
+    };
+    int32_t curRange =
+            bestStillCaptureFpsRange[1] - bestStillCaptureFpsRange[0];
+    for (size_t i = 2; i < availableFpsRanges.count; i += 2) {
+        int32_t nextRange =
+                availableFpsRanges.data.i32[i + 1] -
+                availableFpsRanges.data.i32[i];
+        if ( (nextRange > curRange) ||       // Maximize size of FPS range first
+                (nextRange == curRange &&    // Then minimize low-end FPS
+                 bestStillCaptureFpsRange[0] > availableFpsRanges.data.i32[i])) {
+
+            bestStillCaptureFpsRange[0] = availableFpsRanges.data.i32[i];
+            bestStillCaptureFpsRange[1] = availableFpsRanges.data.i32[i + 1];
+            curRange = nextRange;
+        }
+    }
+
     camera_metadata_ro_entry_t availableFaceDetectModes =
         staticInfo(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, 0, 0,
                 false);
@@ -970,6 +1007,8 @@
 
     fastInfo.arrayWidth = arrayWidth;
     fastInfo.arrayHeight = arrayHeight;
+    fastInfo.bestStillCaptureFpsRange[0] = bestStillCaptureFpsRange[0];
+    fastInfo.bestStillCaptureFpsRange[1] = bestStillCaptureFpsRange[1];
     fastInfo.bestFaceDetectMode = bestFaceDetectMode;
     fastInfo.maxFaces = maxFaces;
 
@@ -1072,15 +1111,13 @@
                     validatedParams.previewWidth, validatedParams.previewHeight);
             return BAD_VALUE;
         }
-        camera_metadata_ro_entry_t availablePreviewSizes =
-            staticInfo(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES);
-        for (i = 0; i < availablePreviewSizes.count; i += 2 ) {
-            if ((availablePreviewSizes.data.i32[i] ==
+        for (i = 0; i < availablePreviewSizes.size(); i++) {
+            if ((availablePreviewSizes[i].width ==
                     validatedParams.previewWidth) &&
-                (availablePreviewSizes.data.i32[i+1] ==
+                (availablePreviewSizes[i].height ==
                     validatedParams.previewHeight)) break;
         }
-        if (i == availablePreviewSizes.count) {
+        if (i == availablePreviewSizes.size()) {
             ALOGE("%s: Requested preview size %d x %d is not supported",
                     __FUNCTION__, validatedParams.previewWidth,
                     validatedParams.previewHeight);
@@ -1124,6 +1161,12 @@
         validatedParams.previewFps =
             fpsFromRange(validatedParams.previewFpsRange[0],
                          validatedParams.previewFpsRange[1]);
+
+        // Update our last-seen single preview FPS, needed for disambiguating
+        // when the application is intending to use the deprecated single-FPS
+        // setting vs. the range FPS setting
+        validatedParams.lastSetPreviewFps = newParams.getPreviewFrameRate();
+
         newParams.setPreviewFrameRate(validatedParams.previewFps);
     }
 
@@ -1159,12 +1202,15 @@
         }
     }
 
-    // PREVIEW_FRAME_RATE
-    // Deprecated, only use if the preview fps range is unchanged this time.
-    // The single-value FPS is the same as the minimum of the range.
+    // PREVIEW_FRAME_RATE Deprecated, only use if the preview fps range is
+    // unchanged this time.  The single-value FPS is the same as the minimum of
+    // the range.  To detect whether the application has changed the value of
+    // previewFps, compare against their last-set preview FPS instead of the
+    // single FPS we may have synthesized from a range FPS set.
     if (!fpsRangeChanged) {
         validatedParams.previewFps = newParams.getPreviewFrameRate();
-        if (validatedParams.previewFps != previewFps || recordingHintChanged) {
+        if (validatedParams.previewFps != lastSetPreviewFps ||
+                recordingHintChanged) {
             camera_metadata_ro_entry_t availableFrameRates =
                 staticInfo(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
             /**
@@ -1235,7 +1281,10 @@
                 String8::format("%d,%d",
                         validatedParams.previewFpsRange[0] * kFpsToApiScale,
                         validatedParams.previewFpsRange[1] * kFpsToApiScale));
-
+        // Update our last-seen single preview FPS, needed for disambiguating
+        // when the application is intending to use the deprecated single-FPS
+        // setting vs. the range FPS setting
+        validatedParams.lastSetPreviewFps = validatedParams.previewFps;
     }
 
     // PICTURE_SIZE
@@ -1618,15 +1667,13 @@
                     __FUNCTION__);
             return BAD_VALUE;
         }
-        camera_metadata_ro_entry_t availableVideoSizes =
-            staticInfo(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES);
-        for (i = 0; i < availableVideoSizes.count; i += 2 ) {
-            if ((availableVideoSizes.data.i32[i] ==
+        for (i = 0; i < availablePreviewSizes.size(); i++) {
+            if ((availablePreviewSizes[i].width ==
                     validatedParams.videoWidth) &&
-                (availableVideoSizes.data.i32[i+1] ==
+                (availablePreviewSizes[i].height ==
                     validatedParams.videoHeight)) break;
         }
-        if (i == availableVideoSizes.count) {
+        if (i == availablePreviewSizes.size()) {
             ALOGE("%s: Requested video size %d x %d is not supported",
                     __FUNCTION__, validatedParams.videoWidth,
                     validatedParams.videoHeight);
@@ -1712,8 +1759,15 @@
             &metadataMode, 1);
     if (res != OK) return res;
 
-    res = request->update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
-            previewFpsRange, 2);
+    camera_metadata_entry_t intent =
+            request->find(ANDROID_CONTROL_CAPTURE_INTENT);
+    if (intent.data.u8[0] == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
+        res = request->update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+                fastInfo.bestStillCaptureFpsRange, 2);
+    } else {
+        res = request->update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+                previewFpsRange, 2);
+    }
     if (res != OK) return res;
 
     uint8_t reqWbLock = autoWhiteBalanceLock ?
@@ -2447,6 +2501,64 @@
     return cropYToArray(normalizedYToCrop(y));
 }
 
+status_t Parameters::getFilteredPreviewSizes(Size limit, Vector<Size> *sizes) {
+    if (info == NULL) {
+        ALOGE("%s: Static metadata is not initialized", __FUNCTION__);
+        return NO_INIT;
+    }
+    if (sizes == NULL) {
+        ALOGE("%s: Input size is null", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    const size_t SIZE_COUNT = sizeof(Size) / sizeof(int);
+    camera_metadata_ro_entry_t availableProcessedSizes =
+        staticInfo(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, SIZE_COUNT);
+    if (availableProcessedSizes.count < SIZE_COUNT) return BAD_VALUE;
+
+    Size previewSize;
+    for (size_t i = 0; i < availableProcessedSizes.count; i += SIZE_COUNT) {
+        previewSize.width = availableProcessedSizes.data.i32[i];
+        previewSize.height = availableProcessedSizes.data.i32[i+1];
+            // Need skip the preview sizes that are too large.
+            if (previewSize.width <= limit.width &&
+                    previewSize.height <= limit.height) {
+                sizes->push(previewSize);
+            }
+    }
+    if (sizes->isEmpty()) {
+        ALOGE("generated preview size list is empty!!");
+        return BAD_VALUE;
+    }
+    return OK;
+}
+
+Parameters::Size Parameters::getMaxSizeForRatio(
+        float ratio, const int32_t* sizeArray, size_t count) {
+    ALOG_ASSERT(sizeArray != NULL, "size array shouldn't be NULL");
+    ALOG_ASSERT(count >= 2 && count % 2 == 0, "count must be a positive even number");
+
+    Size maxSize = {0, 0};
+    for (size_t i = 0; i < count; i += 2) {
+        if (sizeArray[i] > 0 && sizeArray[i+1] > 0) {
+            float curRatio = static_cast<float>(sizeArray[i]) / sizeArray[i+1];
+            if (fabs(curRatio - ratio) < ASPECT_RATIO_TOLERANCE && maxSize.width < sizeArray[i]) {
+                maxSize.width = sizeArray[i];
+                maxSize.height = sizeArray[i+1];
+            }
+        }
+    }
+
+    if (maxSize.width == 0 || maxSize.height == 0) {
+        maxSize.width = sizeArray[0];
+        maxSize.height = sizeArray[1];
+        ALOGW("Unable to find the size to match the given aspect ratio %f."
+                "Fall back to %d x %d", ratio, maxSize.width, maxSize.height);
+    }
+
+    return maxSize;
+}
+
 Parameters::CropRegion Parameters::calculateCropRegion(
                             Parameters::CropRegion::Outputs outputs) const {
 
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h
index 464830c..2e78c73 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.h
+++ b/services/camera/libcameraservice/api1/client2/Parameters.h
@@ -46,6 +46,7 @@
 
     int previewWidth, previewHeight;
     int32_t previewFpsRange[2];
+    int lastSetPreviewFps; // the last single FPS value seen in a set call
     int previewFps; // deprecated, here only for tracking changes
     int previewFormat;
 
@@ -105,6 +106,11 @@
     };
     Vector<Area> focusingAreas;
 
+    struct Size {
+        int32_t width;
+        int32_t height;
+    };
+
     int32_t exposureCompensation;
     bool autoExposureLock;
     bool autoWhiteBalanceLock;
@@ -159,6 +165,11 @@
 
     // Number of zoom steps to simulate
     static const unsigned int NUM_ZOOM_STEPS = 100;
+    // Max preview size allowed
+    static const unsigned int MAX_PREVIEW_WIDTH = 1920;
+    static const unsigned int MAX_PREVIEW_HEIGHT = 1080;
+    // Aspect ratio tolerance
+    static const float ASPECT_RATIO_TOLERANCE = 0.001;
 
     // Full static camera info, object owned by someone else, such as
     // Camera2Device.
@@ -171,6 +182,7 @@
     struct DeviceInfo {
         int32_t arrayWidth;
         int32_t arrayHeight;
+        int32_t bestStillCaptureFpsRange[2];
         uint8_t bestFaceDetectMode;
         int32_t maxFaces;
         struct OverrideModes {
@@ -317,6 +329,12 @@
     int cropYToNormalized(int y) const;
     int normalizedXToCrop(int x) const;
     int normalizedYToCrop(int y) const;
+
+    Vector<Size> availablePreviewSizes;
+    // Get size list (that are no larger than limit) from static metadata.
+    status_t getFilteredPreviewSizes(Size limit, Vector<Size> *sizes);
+    // Get max size (from the size array) that matches the given aspect ratio.
+    Size getMaxSizeForRatio(float ratio, const int32_t* sizeArray, size_t count);
 };
 
 // This class encapsulates the Parameters class so that it can only be accessed
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
index 08ab357..4207ba9 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
@@ -71,7 +71,7 @@
     }
 }
 
-void ZslProcessor::onFrameAvailable(int32_t /*frameId*/,
+void ZslProcessor::onFrameAvailable(int32_t /*requestId*/,
         const CameraMetadata &frame) {
     Mutex::Autolock l(mInputMutex);
     camera_metadata_ro_entry_t entry;
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.h b/services/camera/libcameraservice/api1/client2/ZslProcessor.h
index 5fb178f..6d3cb85 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.h
@@ -54,7 +54,7 @@
     // From mZslConsumer
     virtual void onFrameAvailable();
     // From FrameProcessor
-    virtual void onFrameAvailable(int32_t frameId, const CameraMetadata &frame);
+    virtual void onFrameAvailable(int32_t requestId, const CameraMetadata &frame);
 
     virtual void onBufferReleased(buffer_handle_t *handle);
 
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
index 3e05091..776ebe2 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
@@ -61,7 +61,7 @@
     deleteStream();
 }
 
-void ZslProcessor3::onFrameAvailable(int32_t /*frameId*/,
+void ZslProcessor3::onFrameAvailable(int32_t /*requestId*/,
                                      const CameraMetadata &frame) {
     Mutex::Autolock l(mInputMutex);
     camera_metadata_ro_entry_t entry;
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor3.h b/services/camera/libcameraservice/api1/client2/ZslProcessor3.h
index 35b85f5..d2f8322 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor3.h
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor3.h
@@ -51,7 +51,7 @@
     ~ZslProcessor3();
 
     // From FrameProcessor
-    virtual void onFrameAvailable(int32_t frameId, const CameraMetadata &frame);
+    virtual void onFrameAvailable(int32_t requestId, const CameraMetadata &frame);
 
     /**
      ****************************************
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 055ea12..72126c1 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -45,14 +45,6 @@
                 cameraId, cameraFacing, clientPid, clientUid, servicePid),
     mRemoteCallback(remoteCallback) {
 }
-void CameraDeviceClientBase::notifyError() {
-    // Thread safe. Don't bother locking.
-    sp<ICameraDeviceCallbacks> remoteCb = mRemoteCallback;
-
-    if (remoteCb != 0) {
-        remoteCb->notifyCallback(CAMERA_MSG_ERROR, CAMERA_ERROR_RELEASED, 0);
-    }
-}
 
 // Interface used by CameraService
 
@@ -164,7 +156,6 @@
     metadata.update(ANDROID_REQUEST_OUTPUT_STREAMS, &outputStreamIds[0],
                     outputStreamIds.size());
 
-    // TODO: @hide ANDROID_REQUEST_ID, or use another request token
     int32_t requestId = mRequestIdCounter++;
     metadata.update(ANDROID_REQUEST_ID, &requestId, /*size*/1);
     ALOGV("%s: Camera %d: Submitting request with ID %d",
@@ -298,11 +289,28 @@
         }
     }
 
+    // HACK b/10949105
+    // Query consumer usage bits to set async operation mode for
+    // GLConsumer using controlledByApp parameter.
+    bool useAsync = false;
+    int32_t consumerUsage;
+    if ((res = bufferProducer->query(NATIVE_WINDOW_CONSUMER_USAGE_BITS,
+            &consumerUsage)) != OK) {
+        ALOGE("%s: Camera %d: Failed to query consumer usage", __FUNCTION__,
+              mCameraId);
+        return res;
+    }
+    if (consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) {
+        ALOGW("%s: Camera %d: Forcing asynchronous mode for stream",
+                __FUNCTION__, mCameraId);
+        useAsync = true;
+    }
+
     sp<IBinder> binder;
     sp<ANativeWindow> anw;
     if (bufferProducer != 0) {
         binder = bufferProducer->asBinder();
-        anw = new Surface(bufferProducer);
+        anw = new Surface(bufferProducer, useAsync);
     }
 
     // TODO: remove w,h,f since we are ignoring them
@@ -360,6 +368,26 @@
 
         ALOGV("%s: Camera %d: Successfully created a new stream ID %d",
               __FUNCTION__, mCameraId, streamId);
+
+        /**
+         * Set the stream transform flags to automatically
+         * rotate the camera stream for preview use cases.
+         */
+        int32_t transform = 0;
+        res = getRotationTransformLocked(&transform);
+
+        if (res != OK) {
+            // Error logged by getRotationTransformLocked.
+            return res;
+        }
+
+        res = mDevice->setStreamTransform(streamId, transform);
+        if (res != OK) {
+            ALOGE("%s: Failed to set stream transform (stream id %d)",
+                  __FUNCTION__, streamId);
+            return res;
+        }
+
         return streamId;
     }
 
@@ -464,6 +492,34 @@
     return dumpDevice(fd, args);
 }
 
+
+void CameraDeviceClient::notifyError() {
+    // Thread safe. Don't bother locking.
+    sp<ICameraDeviceCallbacks> remoteCb = getRemoteCallback();
+
+    if (remoteCb != 0) {
+        remoteCb->onDeviceError(ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE);
+    }
+}
+
+void CameraDeviceClient::notifyIdle() {
+    // Thread safe. Don't bother locking.
+    sp<ICameraDeviceCallbacks> remoteCb = getRemoteCallback();
+
+    if (remoteCb != 0) {
+        remoteCb->onDeviceIdle();
+    }
+}
+
+void CameraDeviceClient::notifyShutter(int requestId,
+        nsecs_t timestamp) {
+    // Thread safe. Don't bother locking.
+    sp<ICameraDeviceCallbacks> remoteCb = getRemoteCallback();
+    if (remoteCb != 0) {
+        remoteCb->onCaptureStarted(requestId, timestamp);
+    }
+}
+
 // TODO: refactor the code below this with IProCameraUser.
 // it's 100% copy-pasted, so lets not change it right now to make it easier.
 
@@ -495,8 +551,8 @@
 }
 
 /** Device-related methods */
-void CameraDeviceClient::onFrameAvailable(int32_t frameId,
-                                        const CameraMetadata& frame) {
+void CameraDeviceClient::onFrameAvailable(int32_t requestId,
+        const CameraMetadata& frame) {
     ATRACE_CALL();
     ALOGV("%s", __FUNCTION__);
 
@@ -504,7 +560,7 @@
     sp<ICameraDeviceCallbacks> remoteCb = mRemoteCallback;
     if (remoteCb != NULL) {
         ALOGV("%s: frame = %p ", __FUNCTION__, &frame);
-        remoteCb->onResultReceived(frameId, frame);
+        remoteCb->onResultReceived(requestId, frame);
     }
 }
 
@@ -560,4 +616,64 @@
     return true;
 }
 
+status_t CameraDeviceClient::getRotationTransformLocked(int32_t* transform) {
+    ALOGV("%s: begin", __FUNCTION__);
+
+    if (transform == NULL) {
+        ALOGW("%s: null transform", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    *transform = 0;
+
+    const CameraMetadata& staticInfo = mDevice->info();
+    camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_SENSOR_ORIENTATION);
+    if (entry.count == 0) {
+        ALOGE("%s: Camera %d: Can't find android.sensor.orientation in "
+                "static metadata!", __FUNCTION__, mCameraId);
+        return INVALID_OPERATION;
+    }
+
+    int32_t& flags = *transform;
+
+    int orientation = entry.data.i32[0];
+    switch (orientation) {
+        case 0:
+            flags = 0;
+            break;
+        case 90:
+            flags = NATIVE_WINDOW_TRANSFORM_ROT_90;
+            break;
+        case 180:
+            flags = NATIVE_WINDOW_TRANSFORM_ROT_180;
+            break;
+        case 270:
+            flags = NATIVE_WINDOW_TRANSFORM_ROT_270;
+            break;
+        default:
+            ALOGE("%s: Invalid HAL android.sensor.orientation value: %d",
+                  __FUNCTION__, orientation);
+            return INVALID_OPERATION;
+    }
+
+    /**
+     * This magic flag makes surfaceflinger un-rotate the buffers
+     * to counter the extra global device UI rotation whenever the user
+     * physically rotates the device.
+     *
+     * By doing this, the camera buffer always ends up aligned
+     * with the physical camera for a "see through" effect.
+     *
+     * In essence, the buffer only gets rotated during preview use-cases.
+     * The user is still responsible to re-create streams of the proper
+     * aspect ratio, or the preview will end up looking non-uniformly
+     * stretched.
+     */
+    flags |= NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY;
+
+    ALOGV("%s: final transform = 0x%x", __FUNCTION__, flags);
+
+    return OK;
+}
+
 } // namespace android
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index c6b6336..b9c16aa 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -45,8 +45,6 @@
             uid_t clientUid,
             int servicePid);
 
-    virtual void notifyError();
-
     sp<ICameraDeviceCallbacks> mRemoteCallback;
 };
 
@@ -112,14 +110,25 @@
     virtual status_t      dump(int fd, const Vector<String16>& args);
 
     /**
+     * Device listener interface
+     */
+
+    virtual void notifyIdle();
+    virtual void notifyError();
+    virtual void notifyShutter(int requestId, nsecs_t timestamp);
+
+    /**
      * Interface used by independent components of CameraDeviceClient.
      */
 protected:
     /** FilteredListener implementation **/
-    virtual void          onFrameAvailable(int32_t frameId,
+    virtual void          onFrameAvailable(int32_t requestId,
                                            const CameraMetadata& frame);
     virtual void          detachDevice();
 
+    // Calculate the ANativeWindow transform from android.sensor.orientation
+    status_t              getRotationTransformLocked(/*out*/int32_t* transform);
+
 private:
     /** ICameraDeviceUser interface-related private members */
 
diff --git a/services/camera/libcameraservice/api_pro/ProCamera2Client.cpp b/services/camera/libcameraservice/api_pro/ProCamera2Client.cpp
index 2b583e5..1a7a7a7 100644
--- a/services/camera/libcameraservice/api_pro/ProCamera2Client.cpp
+++ b/services/camera/libcameraservice/api_pro/ProCamera2Client.cpp
@@ -374,7 +374,7 @@
 }
 
 /** Device-related methods */
-void ProCamera2Client::onFrameAvailable(int32_t frameId,
+void ProCamera2Client::onFrameAvailable(int32_t requestId,
                                         const CameraMetadata& frame) {
     ATRACE_CALL();
     ALOGV("%s", __FUNCTION__);
@@ -386,7 +386,7 @@
         CameraMetadata tmp(frame);
         camera_metadata_t* meta = tmp.release();
         ALOGV("%s: meta = %p ", __FUNCTION__, meta);
-        mRemoteCallback->onResultReceived(frameId, meta);
+        mRemoteCallback->onResultReceived(requestId, meta);
         tmp.acquire(meta);
     }
 
diff --git a/services/camera/libcameraservice/api_pro/ProCamera2Client.h b/services/camera/libcameraservice/api_pro/ProCamera2Client.h
index 0bf6784..8a0f547 100644
--- a/services/camera/libcameraservice/api_pro/ProCamera2Client.h
+++ b/services/camera/libcameraservice/api_pro/ProCamera2Client.h
@@ -97,7 +97,7 @@
 
 protected:
     /** FilteredListener implementation **/
-    virtual void          onFrameAvailable(int32_t frameId,
+    virtual void          onFrameAvailable(int32_t requestId,
                                            const CameraMetadata& frame);
     virtual void          detachDevice();
 
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 060e2a2..2d1253f 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -95,7 +95,7 @@
     if (res != OK) {
         ALOGE("%s: Camera %d: unable to initialize device: %s (%d)",
                 __FUNCTION__, TClientBase::mCameraId, strerror(-res), res);
-        return NO_INIT;
+        return res;
     }
 
     res = mDevice->setNotifyCallback(this);
@@ -226,13 +226,18 @@
 }
 
 template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyShutter(int frameNumber,
+void Camera2ClientBase<TClientBase>::notifyIdle() {
+    ALOGV("Camera device is now idle");
+}
+
+template <typename TClientBase>
+void Camera2ClientBase<TClientBase>::notifyShutter(int requestId,
                                                    nsecs_t timestamp) {
-    (void)frameNumber;
+    (void)requestId;
     (void)timestamp;
 
-    ALOGV("%s: Shutter notification for frame %d at time %lld", __FUNCTION__,
-          frameNumber, timestamp);
+    ALOGV("%s: Shutter notification for request id %d at time %lld",
+            __FUNCTION__, requestId, timestamp);
 }
 
 template <typename TClientBase>
@@ -244,13 +249,6 @@
     ALOGV("%s: Autofocus state now %d, last trigger %d",
           __FUNCTION__, newState, triggerId);
 
-    typename SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
-    if (l.mRemoteCallback != 0) {
-        l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS_MOVE, 1, 0);
-    }
-    if (l.mRemoteCallback != 0) {
-        l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS, 1, 0);
-    }
 }
 
 template <typename TClientBase>
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index d23197c..61e44f0 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -62,7 +62,8 @@
      */
 
     virtual void          notifyError(int errorCode, int arg1, int arg2);
-    virtual void          notifyShutter(int frameNumber, nsecs_t timestamp);
+    virtual void          notifyIdle();
+    virtual void          notifyShutter(int requestId, nsecs_t timestamp);
     virtual void          notifyAutoFocus(uint8_t newState, int triggerId);
     virtual void          notifyAutoExposure(uint8_t newState, int triggerId);
     virtual void          notifyAutoWhitebalance(uint8_t newState,
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index ebbd4ea..e80abf1 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -138,9 +138,18 @@
      */
     class NotificationListener {
       public:
-        // Refer to the Camera2 HAL definition for notification definitions
+        // The set of notifications is a merge of the notifications required for
+        // API1 and API2.
+
+        // Required for API 1 and 2
         virtual void notifyError(int errorCode, int arg1, int arg2) = 0;
-        virtual void notifyShutter(int frameNumber, nsecs_t timestamp) = 0;
+
+        // Required only for API2
+        virtual void notifyIdle() = 0;
+        virtual void notifyShutter(int requestId,
+                nsecs_t timestamp) = 0;
+
+        // Required only for API1
         virtual void notifyAutoFocus(uint8_t newState, int triggerId) = 0;
         virtual void notifyAutoExposure(uint8_t newState, int triggerId) = 0;
         virtual void notifyAutoWhitebalance(uint8_t newState,
@@ -165,12 +174,14 @@
     /**
      * Wait for a new frame to be produced, with timeout in nanoseconds.
      * Returns TIMED_OUT when no frame produced within the specified duration
+     * May be called concurrently to most methods, except for getNextFrame
      */
     virtual status_t waitForNextFrame(nsecs_t timeout) = 0;
 
     /**
      * Get next metadata frame from the frame queue. Returns NULL if the queue
      * is empty; caller takes ownership of the metadata buffer.
+     * May be called concurrently to most methods, except for waitForNextFrame
      */
     virtual status_t getNextFrame(CameraMetadata *frame) = 0;
 
diff --git a/services/camera/libcameraservice/common/FrameProcessorBase.cpp b/services/camera/libcameraservice/common/FrameProcessorBase.cpp
index e7b440a..52906ee 100644
--- a/services/camera/libcameraservice/common/FrameProcessorBase.cpp
+++ b/services/camera/libcameraservice/common/FrameProcessorBase.cpp
@@ -66,7 +66,14 @@
 void FrameProcessorBase::dump(int fd, const Vector<String16>& /*args*/) {
     String8 result("    Latest received frame:\n");
     write(fd, result.string(), result.size());
-    mLastFrame.dump(fd, 2, 6);
+
+    CameraMetadata lastFrame;
+    {
+        // Don't race while dumping metadata
+        Mutex::Autolock al(mLastFrameMutex);
+        lastFrame = CameraMetadata(mLastFrame);
+    }
+    lastFrame.dump(fd, 2, 6);
 }
 
 bool FrameProcessorBase::threadLoop() {
@@ -113,6 +120,7 @@
         }
 
         if (!frame.isEmpty()) {
+            Mutex::Autolock al(mLastFrameMutex);
             mLastFrame.acquire(frame);
         }
     }
diff --git a/services/camera/libcameraservice/common/FrameProcessorBase.h b/services/camera/libcameraservice/common/FrameProcessorBase.h
index 1e46beb..4d80ebf 100644
--- a/services/camera/libcameraservice/common/FrameProcessorBase.h
+++ b/services/camera/libcameraservice/common/FrameProcessorBase.h
@@ -39,7 +39,7 @@
     virtual ~FrameProcessorBase();
 
     struct FilteredListener: virtual public RefBase {
-        virtual void onFrameAvailable(int32_t frameId,
+        virtual void onFrameAvailable(int32_t requestId,
                                       const CameraMetadata &frame) = 0;
     };
 
@@ -58,6 +58,7 @@
     virtual bool threadLoop();
 
     Mutex mInputMutex;
+    Mutex mLastFrameMutex;
 
     struct RangeListener {
         int32_t minId;
diff --git a/services/camera/libcameraservice/device2/Camera2Device.cpp b/services/camera/libcameraservice/device2/Camera2Device.cpp
index fe2cd77..2bc1a8a 100644
--- a/services/camera/libcameraservice/device2/Camera2Device.cpp
+++ b/services/camera/libcameraservice/device2/Camera2Device.cpp
@@ -464,8 +464,10 @@
                 listener->notifyError(ext1, ext2, ext3);
                 break;
             case CAMERA2_MSG_SHUTTER: {
-                nsecs_t timestamp = (nsecs_t)ext2 | ((nsecs_t)(ext3) << 32 );
-                listener->notifyShutter(ext1, timestamp);
+                // TODO: Only needed for camera2 API, which is unsupported
+                // by HAL2 directly.
+                // nsecs_t timestamp = (nsecs_t)ext2 | ((nsecs_t)(ext3) << 32 );
+                // listener->notifyShutter(requestId, timestamp);
                 break;
             }
             case CAMERA2_MSG_AUTOFOCUS:
diff --git a/services/camera/libcameraservice/device2/Camera2Device.h b/services/camera/libcameraservice/device2/Camera2Device.h
index 2aa22a2..1f53c56 100644
--- a/services/camera/libcameraservice/device2/Camera2Device.h
+++ b/services/camera/libcameraservice/device2/Camera2Device.h
@@ -28,6 +28,10 @@
 
 /**
  * CameraDevice for HAL devices with version CAMERA_DEVICE_API_VERSION_2_0
+ *
+ * TODO for camera2 API implementation:
+ * Does not produce notifyShutter / notifyIdle callbacks to NotificationListener
+ * Use waitUntilDrained for idle.
  */
 class Camera2Device: public CameraDeviceBase {
   public:
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index b70a278..303823c 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -82,6 +82,7 @@
 status_t Camera3Device::initialize(camera_module_t *module)
 {
     ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
 
     ALOGV("%s: Initializing device for camera %d", __FUNCTION__, mId);
@@ -159,9 +160,20 @@
         }
     }
 
+    /** Start up status tracker thread */
+    mStatusTracker = new StatusTracker(this);
+    res = mStatusTracker->run(String8::format("C3Dev-%d-Status", mId).string());
+    if (res != OK) {
+        SET_ERR_L("Unable to start status tracking thread: %s (%d)",
+                strerror(-res), res);
+        device->common.close(&device->common);
+        mStatusTracker.clear();
+        return res;
+    }
+
     /** Start up request queue thread */
 
-    mRequestThread = new RequestThread(this, device);
+    mRequestThread = new RequestThread(this, mStatusTracker, device);
     res = mRequestThread->run(String8::format("C3Dev-%d-ReqQueue", mId).string());
     if (res != OK) {
         SET_ERR_L("Unable to start request queue thread: %s (%d)",
@@ -175,81 +187,130 @@
 
     mDeviceInfo = info.static_camera_characteristics;
     mHal3Device = device;
-    mStatus = STATUS_IDLE;
+    mStatus = STATUS_UNCONFIGURED;
     mNextStreamId = 0;
     mNeedConfig = true;
+    mPauseStateNotify = false;
 
     return OK;
 }
 
 status_t Camera3Device::disconnect() {
     ATRACE_CALL();
-    Mutex::Autolock l(mLock);
+    Mutex::Autolock il(mInterfaceLock);
 
     ALOGV("%s: E", __FUNCTION__);
 
     status_t res = OK;
-    if (mStatus == STATUS_UNINITIALIZED) return res;
 
-    if (mStatus == STATUS_ACTIVE ||
-            (mStatus == STATUS_ERROR && mRequestThread != NULL)) {
-        res = mRequestThread->clearRepeatingRequests();
-        if (res != OK) {
-            SET_ERR_L("Can't stop streaming");
-            // Continue to close device even in case of error
-        } else {
-            res = waitUntilDrainedLocked();
+    {
+        Mutex::Autolock l(mLock);
+        if (mStatus == STATUS_UNINITIALIZED) return res;
+
+        if (mStatus == STATUS_ACTIVE ||
+                (mStatus == STATUS_ERROR && mRequestThread != NULL)) {
+            res = mRequestThread->clearRepeatingRequests();
             if (res != OK) {
-                SET_ERR_L("Timeout waiting for HAL to drain");
+                SET_ERR_L("Can't stop streaming");
                 // Continue to close device even in case of error
+            } else {
+                res = waitUntilStateThenRelock(/*active*/ false, kShutdownTimeout);
+                if (res != OK) {
+                    SET_ERR_L("Timeout waiting for HAL to drain");
+                    // Continue to close device even in case of error
+                }
             }
         }
-    }
-    assert(mStatus == STATUS_IDLE || mStatus == STATUS_ERROR);
 
-    if (mStatus == STATUS_ERROR) {
-        CLOGE("Shutting down in an error state");
-    }
-
-    if (mRequestThread != NULL) {
-        mRequestThread->requestExit();
-    }
-
-    mOutputStreams.clear();
-    mInputStream.clear();
-
-    if (mRequestThread != NULL) {
-        if (mStatus != STATUS_ERROR) {
-            // HAL may be in a bad state, so waiting for request thread
-            // (which may be stuck in the HAL processCaptureRequest call)
-            // could be dangerous.
-            mRequestThread->join();
+        if (mStatus == STATUS_ERROR) {
+            CLOGE("Shutting down in an error state");
         }
+
+        if (mStatusTracker != NULL) {
+            mStatusTracker->requestExit();
+        }
+
+        if (mRequestThread != NULL) {
+            mRequestThread->requestExit();
+        }
+
+        mOutputStreams.clear();
+        mInputStream.clear();
+    }
+
+    // Joining done without holding mLock, otherwise deadlocks may ensue
+    // as the threads try to access parent state
+    if (mRequestThread != NULL && mStatus != STATUS_ERROR) {
+        // HAL may be in a bad state, so waiting for request thread
+        // (which may be stuck in the HAL processCaptureRequest call)
+        // could be dangerous.
+        mRequestThread->join();
+    }
+
+    if (mStatusTracker != NULL) {
+        mStatusTracker->join();
+    }
+
+    {
+        Mutex::Autolock l(mLock);
+
         mRequestThread.clear();
-    }
+        mStatusTracker.clear();
 
-    if (mHal3Device != NULL) {
-        mHal3Device->common.close(&mHal3Device->common);
-        mHal3Device = NULL;
-    }
+        if (mHal3Device != NULL) {
+            mHal3Device->common.close(&mHal3Device->common);
+            mHal3Device = NULL;
+        }
 
-    mStatus = STATUS_UNINITIALIZED;
+        mStatus = STATUS_UNINITIALIZED;
+    }
 
     ALOGV("%s: X", __FUNCTION__);
     return res;
 }
 
+// For dumping/debugging only -
+// try to acquire a lock a few times, eventually give up to proceed with
+// debug/dump operations
+bool Camera3Device::tryLockSpinRightRound(Mutex& lock) {
+    bool gotLock = false;
+    for (size_t i = 0; i < kDumpLockAttempts; ++i) {
+        if (lock.tryLock() == NO_ERROR) {
+            gotLock = true;
+            break;
+        } else {
+            usleep(kDumpSleepDuration);
+        }
+    }
+    return gotLock;
+}
+
 status_t Camera3Device::dump(int fd, const Vector<String16> &args) {
     ATRACE_CALL();
     (void)args;
+
+    // Try to lock, but continue in case of failure (to avoid blocking in
+    // deadlocks)
+    bool gotInterfaceLock = tryLockSpinRightRound(mInterfaceLock);
+    bool gotLock = tryLockSpinRightRound(mLock);
+
+    ALOGW_IF(!gotInterfaceLock,
+            "Camera %d: %s: Unable to lock interface lock, proceeding anyway",
+            mId, __FUNCTION__);
+    ALOGW_IF(!gotLock,
+            "Camera %d: %s: Unable to lock main lock, proceeding anyway",
+            mId, __FUNCTION__);
+
     String8 lines;
 
     const char *status =
             mStatus == STATUS_ERROR         ? "ERROR" :
             mStatus == STATUS_UNINITIALIZED ? "UNINITIALIZED" :
-            mStatus == STATUS_IDLE          ? "IDLE" :
+            mStatus == STATUS_UNCONFIGURED  ? "UNCONFIGURED" :
+            mStatus == STATUS_CONFIGURED    ? "CONFIGURED" :
             mStatus == STATUS_ACTIVE        ? "ACTIVE" :
             "Unknown";
+
     lines.appendFormat("    Device status: %s\n", status);
     if (mStatus == STATUS_ERROR) {
         lines.appendFormat("    Error cause: %s\n", mErrorCause.string());
@@ -285,7 +346,7 @@
         lines = String8("    Last request sent:\n");
         write(fd, lines.string(), lines.size());
 
-        CameraMetadata lastRequest = getLatestRequest();
+        CameraMetadata lastRequest = getLatestRequestLocked();
         lastRequest.dump(fd, /*verbosity*/2, /*indentation*/6);
     }
 
@@ -295,6 +356,9 @@
         mHal3Device->ops->dump(mHal3Device, fd);
     }
 
+    if (gotLock) mLock.unlock();
+    if (gotInterfaceLock) mInterfaceLock.unlock();
+
     return OK;
 }
 
@@ -311,6 +375,8 @@
 
 status_t Camera3Device::capture(CameraMetadata &request) {
     ATRACE_CALL();
+    status_t res;
+    Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
 
     // TODO: take ownership of the request
@@ -322,7 +388,9 @@
         case STATUS_UNINITIALIZED:
             CLOGE("Device not initialized");
             return INVALID_OPERATION;
-        case STATUS_IDLE:
+        case STATUS_UNCONFIGURED:
+            // May be lazily configuring streams, will check during setup
+        case STATUS_CONFIGURED:
         case STATUS_ACTIVE:
             // OK
             break;
@@ -337,12 +405,23 @@
         return BAD_VALUE;
     }
 
-    return mRequestThread->queueRequest(newRequest);
+    res = mRequestThread->queueRequest(newRequest);
+    if (res == OK) {
+        waitUntilStateThenRelock(/*active*/ true, kActiveTimeout);
+        if (res != OK) {
+            SET_ERR_L("Can't transition to active in %f seconds!",
+                    kActiveTimeout/1e9);
+        }
+        ALOGV("Camera %d: Capture request enqueued", mId);
+    }
+    return res;
 }
 
 
 status_t Camera3Device::setStreamingRequest(const CameraMetadata &request) {
     ATRACE_CALL();
+    status_t res;
+    Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
 
     switch (mStatus) {
@@ -352,7 +431,9 @@
         case STATUS_UNINITIALIZED:
             CLOGE("Device not initialized");
             return INVALID_OPERATION;
-        case STATUS_IDLE:
+        case STATUS_UNCONFIGURED:
+            // May be lazily configuring streams, will check during setup
+        case STATUS_CONFIGURED:
         case STATUS_ACTIVE:
             // OK
             break;
@@ -370,7 +451,16 @@
     RequestList newRepeatingRequests;
     newRepeatingRequests.push_back(newRepeatingRequest);
 
-    return mRequestThread->setRepeatingRequests(newRepeatingRequests);
+    res = mRequestThread->setRepeatingRequests(newRepeatingRequests);
+    if (res == OK) {
+        waitUntilStateThenRelock(/*active*/ true, kActiveTimeout);
+        if (res != OK) {
+            SET_ERR_L("Can't transition to active in %f seconds!",
+                    kActiveTimeout/1e9);
+        }
+        ALOGV("Camera %d: Repeating request set", mId);
+    }
+    return res;
 }
 
 
@@ -378,12 +468,16 @@
         const CameraMetadata &request) {
     status_t res;
 
-    if (mStatus == STATUS_IDLE) {
+    if (mStatus == STATUS_UNCONFIGURED || mNeedConfig) {
         res = configureStreamsLocked();
         if (res != OK) {
             SET_ERR_L("Can't set up streams: %s (%d)", strerror(-res), res);
             return NULL;
         }
+        if (mStatus == STATUS_UNCONFIGURED) {
+            CLOGE("No streams configured");
+            return NULL;
+        }
     }
 
     sp<CaptureRequest> newRequest = createCaptureRequest(request);
@@ -392,6 +486,7 @@
 
 status_t Camera3Device::clearStreamingRequest() {
     ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
 
     switch (mStatus) {
@@ -401,7 +496,8 @@
         case STATUS_UNINITIALIZED:
             CLOGE("Device not initialized");
             return INVALID_OPERATION;
-        case STATUS_IDLE:
+        case STATUS_UNCONFIGURED:
+        case STATUS_CONFIGURED:
         case STATUS_ACTIVE:
             // OK
             break;
@@ -409,12 +505,13 @@
             SET_ERR_L("Unexpected status: %d", mStatus);
             return INVALID_OPERATION;
     }
-
+    ALOGV("Camera %d: Clearing repeating request", mId);
     return mRequestThread->clearRepeatingRequests();
 }
 
 status_t Camera3Device::waitUntilRequestReceived(int32_t requestId, nsecs_t timeout) {
     ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
 
     return mRequestThread->waitUntilRequestProcessed(requestId, timeout);
 }
@@ -422,7 +519,10 @@
 status_t Camera3Device::createInputStream(
         uint32_t width, uint32_t height, int format, int *id) {
     ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
+    ALOGV("Camera %d: Creating new input stream %d: %d x %d, format %d",
+            mId, mNextStreamId, width, height, format);
 
     status_t res;
     bool wasActive = false;
@@ -434,26 +534,24 @@
         case STATUS_UNINITIALIZED:
             ALOGE("%s: Device not initialized", __FUNCTION__);
             return INVALID_OPERATION;
-        case STATUS_IDLE:
+        case STATUS_UNCONFIGURED:
+        case STATUS_CONFIGURED:
             // OK
             break;
         case STATUS_ACTIVE:
             ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__);
-            mRequestThread->setPaused(true);
-            res = waitUntilDrainedLocked();
+            res = internalPauseAndWaitLocked();
             if (res != OK) {
-                ALOGE("%s: Can't pause captures to reconfigure streams!",
-                        __FUNCTION__);
-                mStatus = STATUS_ERROR;
+                SET_ERR_L("Can't pause captures to reconfigure streams!");
                 return res;
             }
             wasActive = true;
             break;
         default:
-            ALOGE("%s: Unexpected status: %d", __FUNCTION__, mStatus);
+            SET_ERR_L("%s: Unexpected status: %d", mStatus);
             return INVALID_OPERATION;
     }
-    assert(mStatus == STATUS_IDLE);
+    assert(mStatus != STATUS_ACTIVE);
 
     if (mInputStream != 0) {
         ALOGE("%s: Cannot create more than 1 input stream", __FUNCTION__);
@@ -462,6 +560,7 @@
 
     sp<Camera3InputStream> newStream = new Camera3InputStream(mNextStreamId,
                 width, height, format);
+    newStream->setStatusTracker(mStatusTracker);
 
     mInputStream = newStream;
 
@@ -476,9 +575,10 @@
                     __FUNCTION__, mNextStreamId, strerror(-res), res);
             return res;
         }
-        mRequestThread->setPaused(false);
+        internalResumeLocked();
     }
 
+    ALOGV("Camera %d: Created input stream", mId);
     return OK;
 }
 
@@ -490,7 +590,10 @@
             int *id,
             sp<Camera3ZslStream>* zslStream) {
     ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
+    ALOGV("Camera %d: Creating ZSL stream %d: %d x %d, depth %d",
+            mId, mNextStreamId, width, height, depth);
 
     status_t res;
     bool wasActive = false;
@@ -502,26 +605,24 @@
         case STATUS_UNINITIALIZED:
             ALOGE("%s: Device not initialized", __FUNCTION__);
             return INVALID_OPERATION;
-        case STATUS_IDLE:
+        case STATUS_UNCONFIGURED:
+        case STATUS_CONFIGURED:
             // OK
             break;
         case STATUS_ACTIVE:
             ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__);
-            mRequestThread->setPaused(true);
-            res = waitUntilDrainedLocked();
+            res = internalPauseAndWaitLocked();
             if (res != OK) {
-                ALOGE("%s: Can't pause captures to reconfigure streams!",
-                        __FUNCTION__);
-                mStatus = STATUS_ERROR;
+                SET_ERR_L("Can't pause captures to reconfigure streams!");
                 return res;
             }
             wasActive = true;
             break;
         default:
-            ALOGE("%s: Unexpected status: %d", __FUNCTION__, mStatus);
+            SET_ERR_L("Unexpected status: %d", mStatus);
             return INVALID_OPERATION;
     }
-    assert(mStatus == STATUS_IDLE);
+    assert(mStatus != STATUS_ACTIVE);
 
     if (mInputStream != 0) {
         ALOGE("%s: Cannot create more than 1 input stream", __FUNCTION__);
@@ -530,6 +631,7 @@
 
     sp<Camera3ZslStream> newStream = new Camera3ZslStream(mNextStreamId,
                 width, height, depth);
+    newStream->setStatusTracker(mStatusTracker);
 
     res = mOutputStreams.add(mNextStreamId, newStream);
     if (res < 0) {
@@ -551,16 +653,20 @@
                     __FUNCTION__, mNextStreamId, strerror(-res), res);
             return res;
         }
-        mRequestThread->setPaused(false);
+        internalResumeLocked();
     }
 
+    ALOGV("Camera %d: Created ZSL stream", mId);
     return OK;
 }
 
 status_t Camera3Device::createStream(sp<ANativeWindow> consumer,
         uint32_t width, uint32_t height, int format, size_t size, int *id) {
     ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
+    ALOGV("Camera %d: Creating new stream %d: %d x %d, format %d, size %d",
+            mId, mNextStreamId, width, height, format, size);
 
     status_t res;
     bool wasActive = false;
@@ -572,16 +678,15 @@
         case STATUS_UNINITIALIZED:
             CLOGE("Device not initialized");
             return INVALID_OPERATION;
-        case STATUS_IDLE:
+        case STATUS_UNCONFIGURED:
+        case STATUS_CONFIGURED:
             // OK
             break;
         case STATUS_ACTIVE:
             ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__);
-            mRequestThread->setPaused(true);
-            res = waitUntilDrainedLocked();
+            res = internalPauseAndWaitLocked();
             if (res != OK) {
-                ALOGE("%s: Can't pause captures to reconfigure streams!",
-                        __FUNCTION__);
+                SET_ERR_L("Can't pause captures to reconfigure streams!");
                 return res;
             }
             wasActive = true;
@@ -590,7 +695,7 @@
             SET_ERR_L("Unexpected status: %d", mStatus);
             return INVALID_OPERATION;
     }
-    assert(mStatus == STATUS_IDLE);
+    assert(mStatus != STATUS_ACTIVE);
 
     sp<Camera3OutputStream> newStream;
     if (format == HAL_PIXEL_FORMAT_BLOB) {
@@ -600,6 +705,7 @@
         newStream = new Camera3OutputStream(mNextStreamId, consumer,
                 width, height, format);
     }
+    newStream->setStatusTracker(mStatusTracker);
 
     res = mOutputStreams.add(mNextStreamId, newStream);
     if (res < 0) {
@@ -619,9 +725,9 @@
                     mNextStreamId, strerror(-res), res);
             return res;
         }
-        mRequestThread->setPaused(false);
+        internalResumeLocked();
     }
-
+    ALOGV("Camera %d: Created new stream", mId);
     return OK;
 }
 
@@ -637,6 +743,7 @@
 status_t Camera3Device::getStreamInfo(int id,
         uint32_t *width, uint32_t *height, uint32_t *format) {
     ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
 
     switch (mStatus) {
@@ -646,7 +753,8 @@
         case STATUS_UNINITIALIZED:
             CLOGE("Device not initialized!");
             return INVALID_OPERATION;
-        case STATUS_IDLE:
+        case STATUS_UNCONFIGURED:
+        case STATUS_CONFIGURED:
         case STATUS_ACTIVE:
             // OK
             break;
@@ -671,6 +779,7 @@
 status_t Camera3Device::setStreamTransform(int id,
         int transform) {
     ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
 
     switch (mStatus) {
@@ -680,7 +789,8 @@
         case STATUS_UNINITIALIZED:
             CLOGE("Device not initialized");
             return INVALID_OPERATION;
-        case STATUS_IDLE:
+        case STATUS_UNCONFIGURED:
+        case STATUS_CONFIGURED:
         case STATUS_ACTIVE:
             // OK
             break;
@@ -701,6 +811,7 @@
 
 status_t Camera3Device::deleteStream(int id) {
     ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
     status_t res;
 
@@ -708,7 +819,7 @@
 
     // CameraDevice semantics require device to already be idle before
     // deleteStream is called, unlike for createStream.
-    if (mStatus != STATUS_IDLE) {
+    if (mStatus == STATUS_ACTIVE) {
         ALOGV("%s: Camera %d: Device not idle", __FUNCTION__, mId);
         return -EBUSY;
     }
@@ -752,6 +863,7 @@
         CameraMetadata *request) {
     ATRACE_CALL();
     ALOGV("%s: for template %d", __FUNCTION__, templateId);
+    Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
 
     switch (mStatus) {
@@ -761,7 +873,8 @@
         case STATUS_UNINITIALIZED:
             CLOGE("Device is not initialized!");
             return INVALID_OPERATION;
-        case STATUS_IDLE:
+        case STATUS_UNCONFIGURED:
+        case STATUS_CONFIGURED:
         case STATUS_ACTIVE:
             // OK
             break;
@@ -787,61 +900,88 @@
 
 status_t Camera3Device::waitUntilDrained() {
     ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
 
-    return waitUntilDrainedLocked();
-}
-
-status_t Camera3Device::waitUntilDrainedLocked() {
-    ATRACE_CALL();
-    status_t res;
-
     switch (mStatus) {
         case STATUS_UNINITIALIZED:
-        case STATUS_IDLE:
+        case STATUS_UNCONFIGURED:
             ALOGV("%s: Already idle", __FUNCTION__);
             return OK;
+        case STATUS_CONFIGURED:
+            // To avoid race conditions, check with tracker to be sure
         case STATUS_ERROR:
         case STATUS_ACTIVE:
-            // Need to shut down
+            // Need to verify shut down
             break;
         default:
             SET_ERR_L("Unexpected status: %d",mStatus);
             return INVALID_OPERATION;
     }
 
-    if (mRequestThread != NULL) {
-        res = mRequestThread->waitUntilPaused(kShutdownTimeout);
-        if (res != OK) {
-            SET_ERR_L("Can't stop request thread in %f seconds!",
-                    kShutdownTimeout/1e9);
-            return res;
-        }
-    }
-    if (mInputStream != NULL) {
-        res = mInputStream->waitUntilIdle(kShutdownTimeout);
-        if (res != OK) {
-            SET_ERR_L("Can't idle input stream %d in %f seconds!",
-                    mInputStream->getId(), kShutdownTimeout/1e9);
-            return res;
-        }
-    }
-    for (size_t i = 0; i < mOutputStreams.size(); i++) {
-        res = mOutputStreams.editValueAt(i)->waitUntilIdle(kShutdownTimeout);
-        if (res != OK) {
-            SET_ERR_L("Can't idle output stream %d in %f seconds!",
-                    mOutputStreams.keyAt(i), kShutdownTimeout/1e9);
-            return res;
-        }
+    ALOGV("%s: Camera %d: Waiting until idle", __FUNCTION__, mId);
+    status_t res = waitUntilStateThenRelock(/*active*/ false, kShutdownTimeout);
+    return res;
+}
+
+// Pause to reconfigure
+status_t Camera3Device::internalPauseAndWaitLocked() {
+    mRequestThread->setPaused(true);
+    mPauseStateNotify = true;
+
+    ALOGV("%s: Camera %d: Internal wait until idle", __FUNCTION__, mId);
+    status_t res = waitUntilStateThenRelock(/*active*/ false, kShutdownTimeout);
+    if (res != OK) {
+        SET_ERR_L("Can't idle device in %f seconds!",
+                kShutdownTimeout/1e9);
     }
 
-    if (mStatus != STATUS_ERROR) {
-        mStatus = STATUS_IDLE;
-    }
+    return res;
+}
 
+// Resume after internalPauseAndWaitLocked
+status_t Camera3Device::internalResumeLocked() {
+    status_t res;
+
+    mRequestThread->setPaused(false);
+
+    res = waitUntilStateThenRelock(/*active*/ true, kActiveTimeout);
+    if (res != OK) {
+        SET_ERR_L("Can't transition to active in %f seconds!",
+                kActiveTimeout/1e9);
+    }
+    mPauseStateNotify = false;
     return OK;
 }
 
+status_t Camera3Device::waitUntilStateThenRelock(bool active,
+        nsecs_t timeout) {
+    status_t res = OK;
+    if (active == (mStatus == STATUS_ACTIVE)) {
+        // Desired state already reached
+        return res;
+    }
+
+    bool stateSeen = false;
+    do {
+        mRecentStatusUpdates.clear();
+
+        res = mStatusChanged.waitRelative(mLock, timeout);
+        if (res != OK) break;
+
+        // Check state change history during wait
+        for (size_t i = 0; i < mRecentStatusUpdates.size(); i++) {
+            if (active == (mRecentStatusUpdates[i] == STATUS_ACTIVE) ) {
+                stateSeen = true;
+                break;
+            }
+        }
+    } while (!stateSeen);
+
+    return res;
+}
+
+
 status_t Camera3Device::setNotifyCallback(NotificationListener *listener) {
     ATRACE_CALL();
     Mutex::Autolock l(mOutputLock);
@@ -893,6 +1033,7 @@
 
 status_t Camera3Device::triggerAutofocus(uint32_t id) {
     ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
 
     ALOGV("%s: Triggering autofocus, id %d", __FUNCTION__, id);
     // Mix-in this trigger into the next request and only the next request.
@@ -913,6 +1054,7 @@
 
 status_t Camera3Device::triggerCancelAutofocus(uint32_t id) {
     ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
 
     ALOGV("%s: Triggering cancel autofocus, id %d", __FUNCTION__, id);
     // Mix-in this trigger into the next request and only the next request.
@@ -933,6 +1075,7 @@
 
 status_t Camera3Device::triggerPrecaptureMetering(uint32_t id) {
     ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
 
     ALOGV("%s: Triggering precapture metering, id %d", __FUNCTION__, id);
     // Mix-in this trigger into the next request and only the next request.
@@ -963,7 +1106,7 @@
 status_t Camera3Device::flush() {
     ATRACE_CALL();
     ALOGV("%s: Camera %d: Flushing all requests", __FUNCTION__, mId);
-
+    Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
 
     mRequestThread->clear();
@@ -971,6 +1114,41 @@
 }
 
 /**
+ * Methods called by subclasses
+ */
+
+void Camera3Device::notifyStatus(bool idle) {
+    {
+        // Need mLock to safely update state and synchronize to current
+        // state of methods in flight.
+        Mutex::Autolock l(mLock);
+        // We can get various system-idle notices from the status tracker
+        // while starting up. Only care about them if we've actually sent
+        // in some requests recently.
+        if (mStatus != STATUS_ACTIVE && mStatus != STATUS_CONFIGURED) {
+            return;
+        }
+        ALOGV("%s: Camera %d: Now %s", __FUNCTION__, mId,
+                idle ? "idle" : "active");
+        mStatus = idle ? STATUS_CONFIGURED : STATUS_ACTIVE;
+        mRecentStatusUpdates.add(mStatus);
+        mStatusChanged.signal();
+
+        // Skip notifying listener if we're doing some user-transparent
+        // state changes
+        if (mPauseStateNotify) return;
+    }
+    NotificationListener *listener;
+    {
+        Mutex::Autolock l(mOutputLock);
+        listener = mListener;
+    }
+    if (idle && listener != NULL) {
+        listener->notifyIdle();
+    }
+}
+
+/**
  * Camera3Device private methods
  */
 
@@ -1046,18 +1224,18 @@
     ATRACE_CALL();
     status_t res;
 
-    if (mStatus != STATUS_IDLE) {
+    if (mStatus != STATUS_UNCONFIGURED && mStatus != STATUS_CONFIGURED) {
         CLOGE("Not idle");
         return INVALID_OPERATION;
     }
 
     if (!mNeedConfig) {
         ALOGV("%s: Skipping config, no stream changes", __FUNCTION__);
-        mStatus = STATUS_ACTIVE;
         return OK;
     }
 
     // Start configuring the streams
+    ALOGV("%s: Camera %d: Starting stream configuration", __FUNCTION__, mId);
 
     camera3_stream_configuration config;
 
@@ -1139,11 +1317,18 @@
     // across configure_streams() calls
     mRequestThread->configurationComplete();
 
-    // Finish configuring the streams lazily on first reference
+    // Update device state
 
-    mStatus = STATUS_ACTIVE;
     mNeedConfig = false;
 
+    if (config.num_streams > 0) {
+        mStatus = STATUS_CONFIGURED;
+    } else {
+        mStatus = STATUS_UNCONFIGURED;
+    }
+
+    ALOGV("%s: Camera %d: Stream configuration complete", __FUNCTION__, mId);
+
     return OK;
 }
 
@@ -1190,12 +1375,12 @@
  */
 
 status_t Camera3Device::registerInFlight(int32_t frameNumber,
-        int32_t numBuffers) {
+        int32_t requestId, int32_t numBuffers) {
     ATRACE_CALL();
     Mutex::Autolock l(mInFlightLock);
 
     ssize_t res;
-    res = mInFlightMap.add(frameNumber, InFlightRequest(numBuffers));
+    res = mInFlightMap.add(frameNumber, InFlightRequest(requestId, numBuffers));
     if (res < 0) return res;
 
     return OK;
@@ -1232,7 +1417,12 @@
         }
         InFlightRequest &request = mInFlightMap.editValueAt(idx);
         timestamp = request.captureTimestamp;
-        if (timestamp == 0) {
+        /**
+         * One of the following must happen before it's legal to call process_capture_result:
+         * - CAMERA3_MSG_SHUTTER (expected during normal operation)
+         * - CAMERA3_MSG_ERROR (expected during flush)
+         */
+        if (request.requestStatus == OK && timestamp == 0) {
             SET_ERR("Called before shutter notify for frame %d",
                     frameNumber);
             return;
@@ -1316,7 +1506,7 @@
         // Note: stream may be deallocated at this point, if this buffer was the
         // last reference to it.
         if (res != OK) {
-            SET_ERR("Can't return buffer %d for frame %d to its stream: "
+            ALOGE("Can't return buffer %d for frame %d to its stream: "
                     " %s (%d)", i, frameNumber, strerror(-res), res);
         }
     }
@@ -1356,6 +1546,16 @@
             ALOGV("Camera %d: %s: HAL error, frame %d, stream %d: %d",
                     mId, __FUNCTION__, msg->message.error.frame_number,
                     streamId, msg->message.error.error_code);
+
+            // Set request error status for the request in the in-flight tracking
+            {
+                Mutex::Autolock l(mInFlightLock);
+                ssize_t idx = mInFlightMap.indexOfKey(msg->message.error.frame_number);
+                if (idx >= 0) {
+                    mInFlightMap.editValueAt(idx).requestStatus = msg->message.error.error_code;
+                }
+            }
+
             if (listener != NULL) {
                 listener->notifyError(msg->message.error.error_code,
                         msg->message.error.frame_number, streamId);
@@ -1378,12 +1578,17 @@
                 mNextShutterFrameNumber++;
             }
 
+            int32_t requestId = -1;
+
             // Set timestamp for the request in the in-flight tracking
+            // and get the request ID to send upstream
             {
                 Mutex::Autolock l(mInFlightLock);
                 idx = mInFlightMap.indexOfKey(frameNumber);
                 if (idx >= 0) {
-                    mInFlightMap.editValueAt(idx).captureTimestamp = timestamp;
+                    InFlightRequest &r = mInFlightMap.editValueAt(idx);
+                    r.captureTimestamp = timestamp;
+                    requestId = r.requestId;
                 }
             }
             if (idx < 0) {
@@ -1391,11 +1596,11 @@
                         frameNumber);
                 break;
             }
-            ALOGVV("Camera %d: %s: Shutter fired for frame %d at %lld",
-                    mId, __FUNCTION__, frameNumber, timestamp);
+            ALOGVV("Camera %d: %s: Shutter fired for frame %d (id %d) at %lld",
+                    mId, __FUNCTION__, frameNumber, requestId, timestamp);
             // Call listener, if any
             if (listener != NULL) {
-                listener->notifyShutter(frameNumber, timestamp);
+                listener->notifyShutter(requestId, timestamp);
             }
             break;
         }
@@ -1405,40 +1610,15 @@
     }
 }
 
-CameraMetadata Camera3Device::getLatestRequest() {
+CameraMetadata Camera3Device::getLatestRequestLocked() {
     ALOGV("%s", __FUNCTION__);
 
-    bool locked = false;
-
-    /**
-     * Why trylock instead of autolock?
-     *
-     * We want to be able to call this function from
-     * dumpsys, which often happens during deadlocks.
-     */
-    for (size_t i = 0; i < kDumpLockAttempts; ++i) {
-        if (mLock.tryLock() == NO_ERROR) {
-            locked = true;
-            break;
-        } else {
-            usleep(kDumpSleepDuration);
-        }
-    }
-
-    if (!locked) {
-        ALOGW("%s: Possible deadlock detected", __FUNCTION__);
-    }
-
     CameraMetadata retVal;
 
     if (mRequestThread != NULL) {
         retVal = mRequestThread->getLatestRequest();
     }
 
-    if (locked) {
-        mLock.unlock();
-    }
-
     return retVal;
 }
 
@@ -1447,9 +1627,11 @@
  */
 
 Camera3Device::RequestThread::RequestThread(wp<Camera3Device> parent,
+        sp<StatusTracker> statusTracker,
         camera3_device_t *hal3Device) :
         Thread(false),
         mParent(parent),
+        mStatusTracker(statusTracker),
         mHal3Device(hal3Device),
         mId(getId(parent)),
         mReconfigured(false),
@@ -1457,6 +1639,7 @@
         mPaused(true),
         mFrameNumber(0),
         mLatestRequestId(NAME_NOT_FOUND) {
+    mStatusId = statusTracker->addComponent();
 }
 
 void Camera3Device::RequestThread::configurationComplete() {
@@ -1562,19 +1745,6 @@
     mDoPauseSignal.signal();
 }
 
-status_t Camera3Device::RequestThread::waitUntilPaused(nsecs_t timeout) {
-    ATRACE_CALL();
-    status_t res;
-    Mutex::Autolock l(mPauseLock);
-    while (!mPaused) {
-        res = mPausedSignal.waitRelative(mPauseLock, timeout);
-        if (res == TIMED_OUT) {
-            return res;
-        }
-    }
-    return OK;
-}
-
 status_t Camera3Device::RequestThread::waitUntilRequestProcessed(
         int32_t requestId, nsecs_t timeout) {
     Mutex::Autolock l(mLatestRequestMutex);
@@ -1591,7 +1761,13 @@
     return OK;
 }
 
-
+void Camera3Device::RequestThread::requestExit() {
+    // Call parent to set up shutdown
+    Thread::requestExit();
+    // The exit from any possible waits
+    mDoPauseSignal.signal();
+    mRequestSignal.signal();
+}
 
 bool Camera3Device::RequestThread::threadLoop() {
 
@@ -1613,6 +1789,18 @@
     camera3_capture_request_t request = camera3_capture_request_t();
     Vector<camera3_stream_buffer_t> outputBuffers;
 
+    // Get the request ID, if any
+    int requestId;
+    camera_metadata_entry_t requestIdEntry =
+            nextRequest->mSettings.find(ANDROID_REQUEST_ID);
+    if (requestIdEntry.count > 0) {
+        requestId = requestIdEntry.data.i32[0];
+    } else {
+        ALOGW("%s: Did not have android.request.id set in the request",
+                __FUNCTION__);
+        requestId = NAME_NOT_FOUND;
+    }
+
     // Insert any queued triggers (before metadata is locked)
     int32_t triggerCount;
     res = insertTriggers(nextRequest);
@@ -1630,6 +1818,19 @@
     // If the request is the same as last, or we had triggers last time
     if (mPrevRequest != nextRequest || triggersMixedIn) {
         /**
+         * HAL workaround:
+         * Insert a dummy trigger ID if a trigger is set but no trigger ID is
+         */
+        res = addDummyTriggerIds(nextRequest);
+        if (res != OK) {
+            SET_ERR("RequestThread: Unable to insert dummy trigger IDs "
+                    "(capture request %d, HAL device: %s (%d)",
+                    (mFrameNumber+1), strerror(-res), res);
+            cleanUpFailedRequest(request, nextRequest, outputBuffers);
+            return false;
+        }
+
+        /**
          * The request should be presorted so accesses in HAL
          *   are O(logn). Sidenote, sorting a sorted metadata is nop.
          */
@@ -1666,7 +1867,7 @@
         request.input_buffer = &inputBuffer;
         res = nextRequest->mInputStream->getInputBuffer(&inputBuffer);
         if (res != OK) {
-            SET_ERR("RequestThread: Can't get input buffer, skipping request:"
+            ALOGE("RequestThread: Can't get input buffer, skipping request:"
                     " %s (%d)", strerror(-res), res);
             cleanUpFailedRequest(request, nextRequest, outputBuffers);
             return true;
@@ -1682,8 +1883,8 @@
         res = nextRequest->mOutputStreams.editItemAt(i)->
                 getBuffer(&outputBuffers.editItemAt(i));
         if (res != OK) {
-            SET_ERR("RequestThread: Can't get output buffer, skipping request:"
-                    "%s (%d)", strerror(-res), res);
+            ALOGE("RequestThread: Can't get output buffer, skipping request:"
+                    " %s (%d)", strerror(-res), res);
             cleanUpFailedRequest(request, nextRequest, outputBuffers);
             return true;
         }
@@ -1700,7 +1901,7 @@
         return false;
     }
 
-    res = parent->registerInFlight(request.frame_number,
+    res = parent->registerInFlight(request.frame_number, requestId,
             request.num_output_buffers);
     if (res != OK) {
         SET_ERR("RequestThread: Unable to register new in-flight request:"
@@ -1749,16 +1950,7 @@
     {
         Mutex::Autolock al(mLatestRequestMutex);
 
-        camera_metadata_entry_t requestIdEntry =
-                nextRequest->mSettings.find(ANDROID_REQUEST_ID);
-        if (requestIdEntry.count > 0) {
-            mLatestRequestId = requestIdEntry.data.i32[0];
-        } else {
-            ALOGW("%s: Did not have android.request.id set in the request",
-                  __FUNCTION__);
-            mLatestRequestId = NAME_NOT_FOUND;
-        }
-
+        mLatestRequestId = requestId;
         mLatestRequestSignal.signal();
     }
 
@@ -1777,8 +1969,6 @@
         }
     }
 
-
-
     return true;
 }
 
@@ -1836,12 +2026,17 @@
 
         res = mRequestSignal.waitRelative(mRequestLock, kRequestTimeout);
 
-        if (res == TIMED_OUT) {
-            // Signal that we're paused by starvation
+        if ((mRequestQueue.empty() && mRepeatingRequests.empty()) ||
+                exitPending()) {
             Mutex::Autolock pl(mPauseLock);
             if (mPaused == false) {
+                ALOGV("%s: RequestThread: Going idle", __FUNCTION__);
                 mPaused = true;
-                mPausedSignal.signal();
+                // Let the tracker know
+                sp<StatusTracker> statusTracker = mStatusTracker.promote();
+                if (statusTracker != 0) {
+                    statusTracker->markComponentIdle(mStatusId, Fence::NO_FENCE);
+                }
             }
             // Stop waiting for now and let thread management happen
             return NULL;
@@ -1861,6 +2056,13 @@
     // update internal pause state (capture/setRepeatingRequest unpause
     // directly).
     Mutex::Autolock pl(mPauseLock);
+    if (mPaused) {
+        ALOGV("%s: RequestThread: Unpaused", __FUNCTION__);
+        sp<StatusTracker> statusTracker = mStatusTracker.promote();
+        if (statusTracker != 0) {
+            statusTracker->markComponentActive(mStatusId);
+        }
+    }
     mPaused = false;
 
     // Check if we've reconfigured since last time, and reset the preview
@@ -1877,13 +2079,18 @@
     status_t res;
     Mutex::Autolock l(mPauseLock);
     while (mDoPause) {
-        // Signal that we're paused by request
         if (mPaused == false) {
             mPaused = true;
-            mPausedSignal.signal();
+            ALOGV("%s: RequestThread: Paused", __FUNCTION__);
+            // Let the tracker know
+            sp<StatusTracker> statusTracker = mStatusTracker.promote();
+            if (statusTracker != 0) {
+                statusTracker->markComponentIdle(mStatusId, Fence::NO_FENCE);
+            }
         }
+
         res = mDoPauseSignal.waitRelative(mPauseLock, kRequestTimeout);
-        if (res == TIMED_OUT) {
+        if (res == TIMED_OUT || exitPending()) {
             return true;
         }
     }
@@ -1896,8 +2103,16 @@
     // With work to do, mark thread as unpaused.
     // If paused by request (setPaused), don't resume, to avoid
     // extra signaling/waiting overhead to waitUntilPaused
+    mRequestSignal.signal();
     Mutex::Autolock p(mPauseLock);
     if (!mDoPause) {
+        ALOGV("%s: RequestThread: Going active", __FUNCTION__);
+        if (mPaused) {
+            sp<StatusTracker> statusTracker = mStatusTracker.promote();
+            if (statusTracker != 0) {
+                statusTracker->markComponentActive(mStatusId);
+            }
+        }
         mPaused = false;
     }
 }
@@ -2047,6 +2262,40 @@
     return OK;
 }
 
+status_t Camera3Device::RequestThread::addDummyTriggerIds(
+        const sp<CaptureRequest> &request) {
+    // Trigger ID 0 has special meaning in the HAL2 spec, so avoid it here
+    static const int32_t dummyTriggerId = 1;
+    status_t res;
+
+    CameraMetadata &metadata = request->mSettings;
+
+    // If AF trigger is active, insert a dummy AF trigger ID if none already
+    // exists
+    camera_metadata_entry afTrigger = metadata.find(ANDROID_CONTROL_AF_TRIGGER);
+    camera_metadata_entry afId = metadata.find(ANDROID_CONTROL_AF_TRIGGER_ID);
+    if (afTrigger.count > 0 &&
+            afTrigger.data.u8[0] != ANDROID_CONTROL_AF_TRIGGER_IDLE &&
+            afId.count == 0) {
+        res = metadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &dummyTriggerId, 1);
+        if (res != OK) return res;
+    }
+
+    // If AE precapture trigger is active, insert a dummy precapture trigger ID
+    // if none already exists
+    camera_metadata_entry pcTrigger =
+            metadata.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER);
+    camera_metadata_entry pcId = metadata.find(ANDROID_CONTROL_AE_PRECAPTURE_ID);
+    if (pcTrigger.count > 0 &&
+            pcTrigger.data.u8[0] != ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE &&
+            pcId.count == 0) {
+        res = metadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
+                &dummyTriggerId, 1);
+        if (res != OK) return res;
+    }
+
+    return OK;
+}
 
 
 /**
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 0b3ad6e..c2b0867 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -26,6 +26,7 @@
 #include <hardware/camera3.h>
 
 #include "common/CameraDeviceBase.h"
+#include "device3/StatusTracker.h"
 
 /**
  * Function pointer types with C calling convention to
@@ -126,29 +127,47 @@
 
     virtual status_t flush();
 
+    // Methods called by subclasses
+    void             notifyStatus(bool idle); // updates from StatusTracker
+
   private:
     static const size_t        kDumpLockAttempts  = 10;
     static const size_t        kDumpSleepDuration = 100000; // 0.10 sec
     static const size_t        kInFlightWarnLimit = 20;
     static const nsecs_t       kShutdownTimeout   = 5000000000; // 5 sec
+    static const nsecs_t       kActiveTimeout     = 500000000;  // 500 ms
     struct                     RequestTrigger;
 
+    // A lock to enforce serialization on the input/configure side
+    // of the public interface.
+    // Only locked by public methods inherited from CameraDeviceBase.
+    // Not locked by methods guarded by mOutputLock, since they may act
+    // concurrently to the input/configure side of the interface.
+    // Must be locked before mLock if both will be locked by a method
+    Mutex                      mInterfaceLock;
+
+    // The main lock on internal state
     Mutex                      mLock;
 
+    // Camera device ID
+    const int                  mId;
+
     /**** Scope for mLock ****/
 
-    const int                  mId;
     camera3_device_t          *mHal3Device;
 
     CameraMetadata             mDeviceInfo;
     vendor_tag_query_ops_t     mVendorTagOps;
 
-    enum {
+    enum Status {
         STATUS_ERROR,
         STATUS_UNINITIALIZED,
-        STATUS_IDLE,
+        STATUS_UNCONFIGURED,
+        STATUS_CONFIGURED,
         STATUS_ACTIVE
     }                          mStatus;
+    Vector<Status>             mRecentStatusUpdates;
+    Condition                  mStatusChanged;
 
     // Tracking cause of fatal errors when in STATUS_ERROR
     String8                    mErrorCause;
@@ -162,6 +181,10 @@
     int                        mNextStreamId;
     bool                       mNeedConfig;
 
+    // Whether to send state updates upstream
+    // Pause when doing transparent reconfiguration
+    bool                       mPauseStateNotify;
+
     // Need to hold on to stream references until configure completes.
     Vector<sp<camera3::Camera3StreamInterface> > mDeletedStreams;
 
@@ -181,13 +204,34 @@
      *
      * Takes mLock.
      */
-    virtual CameraMetadata getLatestRequest();
+    virtual CameraMetadata getLatestRequestLocked();
 
     /**
-     * Lock-held version of waitUntilDrained. Will transition to IDLE on
-     * success.
+     * Pause processing and flush everything, but don't tell the clients.
+     * This is for reconfiguring outputs transparently when according to the
+     * CameraDeviceBase interface we shouldn't need to.
+     * Must be called with mLock and mInterfaceLock both held.
      */
-    status_t           waitUntilDrainedLocked();
+    status_t internalPauseAndWaitLocked();
+
+    /**
+     * Resume work after internalPauseAndWaitLocked()
+     * Must be called with mLock and mInterfaceLock both held.
+     */
+    status_t internalResumeLocked();
+
+    /**
+     * Wait until status tracker tells us we've transitioned to the target state
+     * set, which is either ACTIVE when active==true or IDLE (which is any
+     * non-ACTIVE state) when active==false.
+     *
+     * Needs to be called with mLock and mInterfaceLock held.  This means there
+     * can ever only be one waiter at most.
+     *
+     * During the wait mLock is released.
+     *
+     */
+    status_t waitUntilStateThenRelock(bool active, nsecs_t timeout);
 
     /**
      * Do common work for setting up a streaming or single capture request.
@@ -217,6 +261,12 @@
     void               setErrorStateLocked(const char *fmt, ...);
     void               setErrorStateLockedV(const char *fmt, va_list args);
 
+    /**
+     * Debugging trylock/spin method
+     * Try to acquire a lock a few times with sleeps between before giving up.
+     */
+    bool               tryLockSpinRightRound(Mutex& lock);
+
     struct RequestTrigger {
         // Metadata tag number, e.g. android.control.aePrecaptureTrigger
         uint32_t metadataTag;
@@ -242,6 +292,7 @@
       public:
 
         RequestThread(wp<Camera3Device> parent,
+                sp<camera3::StatusTracker> statusTracker,
                 camera3_device_t *hal3Device);
 
         /**
@@ -279,13 +330,6 @@
         void     setPaused(bool paused);
 
         /**
-         * Wait until thread is paused, either due to setPaused(true)
-         * or due to lack of input requests. Returns TIMED_OUT in case
-         * the thread does not pause within the timeout.
-         */
-        status_t waitUntilPaused(nsecs_t timeout);
-
-        /**
          * Wait until thread processes the capture request with settings'
          * android.request.id == requestId.
          *
@@ -295,6 +339,12 @@
         status_t waitUntilRequestProcessed(int32_t requestId, nsecs_t timeout);
 
         /**
+         * Shut down the thread. Shutdown is asynchronous, so thread may
+         * still be running once this method returns.
+         */
+        virtual void requestExit();
+
+        /**
          * Get the latest request that was sent to the HAL
          * with process_capture_request.
          */
@@ -314,6 +364,10 @@
         //  restoring the old field values for those tags.
         status_t           removeTriggers(const sp<CaptureRequest> &request);
 
+        // HAL workaround: Make sure a trigger ID always exists if
+        // a trigger does
+        status_t          addDummyTriggerIds(const sp<CaptureRequest> &request);
+
         static const nsecs_t kRequestTimeout = 50e6; // 50 ms
 
         // Waits for a request, or returns NULL if times out.
@@ -335,9 +389,12 @@
         void               setErrorState(const char *fmt, ...);
 
         wp<Camera3Device>  mParent;
+        wp<camera3::StatusTracker>  mStatusTracker;
         camera3_device_t  *mHal3Device;
 
-        const int          mId;
+        const int          mId;       // The camera ID
+        int                mStatusId; // The RequestThread's component ID for
+                                      // status tracking
 
         Mutex              mRequestLock;
         Condition          mRequestSignal;
@@ -377,21 +434,28 @@
      */
 
     struct InFlightRequest {
+        // android.request.id for the request
+        int     requestId;
         // Set by notify() SHUTTER call.
         nsecs_t captureTimestamp;
+        int     requestStatus;
         // Set by process_capture_result call with valid metadata
         bool    haveResultMetadata;
         // Decremented by calls to process_capture_result with valid output
         // buffers
         int     numBuffersLeft;
 
+        // Default constructor needed by KeyedVector
         InFlightRequest() :
+                requestId(0),
                 captureTimestamp(0),
+                requestStatus(OK),
                 haveResultMetadata(false),
                 numBuffersLeft(0) {
         }
 
-        explicit InFlightRequest(int numBuffers) :
+        InFlightRequest(int id, int numBuffers) :
+                requestId(id),
                 captureTimestamp(0),
                 haveResultMetadata(false),
                 numBuffersLeft(numBuffers) {
@@ -403,7 +467,13 @@
     Mutex                  mInFlightLock; // Protects mInFlightMap
     InFlightMap            mInFlightMap;
 
-    status_t registerInFlight(int32_t frameNumber, int32_t numBuffers);
+    status_t registerInFlight(int32_t frameNumber, int32_t requestId,
+            int32_t numBuffers);
+
+    /**
+     * Tracking for idle detection
+     */
+    sp<camera3::StatusTracker> mStatusTracker;
 
     /**
      * Output result queue and current HAL device 3A state
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index 0850566..da51228 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -23,7 +23,8 @@
 
 #include <utils/Log.h>
 #include <utils/Trace.h>
-#include "Camera3IOStreamBase.h"
+#include "device3/Camera3IOStreamBase.h"
+#include "device3/StatusTracker.h"
 
 namespace android {
 
@@ -62,53 +63,6 @@
     return false;
 }
 
-status_t Camera3IOStreamBase::waitUntilIdle(nsecs_t timeout) {
-    status_t res;
-    {
-        Mutex::Autolock l(mLock);
-        while (mDequeuedBufferCount > 0) {
-            if (timeout != TIMEOUT_NEVER) {
-                nsecs_t startTime = systemTime();
-                res = mBufferReturnedSignal.waitRelative(mLock, timeout);
-                if (res == TIMED_OUT) {
-                    return res;
-                } else if (res != OK) {
-                    ALOGE("%s: Error waiting for outstanding buffers: %s (%d)",
-                            __FUNCTION__, strerror(-res), res);
-                    return res;
-                }
-                nsecs_t deltaTime = systemTime() - startTime;
-                if (timeout <= deltaTime) {
-                    timeout = 0;
-                } else {
-                    timeout -= deltaTime;
-                }
-            } else {
-                res = mBufferReturnedSignal.wait(mLock);
-                if (res != OK) {
-                    ALOGE("%s: Error waiting for outstanding buffers: %s (%d)",
-                            __FUNCTION__, strerror(-res), res);
-                    return res;
-                }
-            }
-        }
-    }
-
-    // No lock
-
-    unsigned int timeoutMs;
-    if (timeout == TIMEOUT_NEVER) {
-        timeoutMs = Fence::TIMEOUT_NEVER;
-    } else if (timeout == 0) {
-        timeoutMs = 0;
-    } else {
-        // Round up to wait at least 1 ms
-        timeoutMs = (timeout + 999999) / 1000000;
-    }
-
-    return mCombinedFence->wait(timeoutMs);
-}
-
 void Camera3IOStreamBase::dump(int fd, const Vector<String16> &args) const {
     (void) args;
     String8 lines;
@@ -190,6 +144,14 @@
     buffer.release_fence = releaseFence;
     buffer.status = status;
 
+    // Inform tracker about becoming busy
+    if (mDequeuedBufferCount == 0 && mState != STATE_IN_CONFIG &&
+            mState != STATE_IN_RECONFIG) {
+        sp<StatusTracker> statusTracker = mStatusTracker.promote();
+        if (statusTracker != 0) {
+            statusTracker->markComponentActive(mStatusId);
+        }
+    }
     mDequeuedBufferCount++;
 }
 
@@ -252,20 +214,32 @@
     sp<Fence> releaseFence;
     res = returnBufferCheckedLocked(buffer, timestamp, output,
                                     &releaseFence);
-    if (res != OK) {
-        return res;
+    // Res may be an error, but we still want to decrement our owned count
+    // to enable clean shutdown. So we'll just return the error but otherwise
+    // carry on
+
+    if (releaseFence != 0) {
+        mCombinedFence = Fence::merge(mName, mCombinedFence, releaseFence);
     }
 
-    mCombinedFence = Fence::merge(mName, mCombinedFence, releaseFence);
-
     mDequeuedBufferCount--;
+    if (mDequeuedBufferCount == 0 && mState != STATE_IN_CONFIG &&
+            mState != STATE_IN_RECONFIG) {
+        ALOGV("%s: Stream %d: All buffers returned; now idle", __FUNCTION__,
+                mId);
+        sp<StatusTracker> statusTracker = mStatusTracker.promote();
+        if (statusTracker != 0) {
+            statusTracker->markComponentIdle(mStatusId, mCombinedFence);
+        }
+    }
+
     mBufferReturnedSignal.signal();
 
     if (output) {
         mLastTimestamp = timestamp;
     }
 
-    return OK;
+    return res;
 }
 
 
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
index 9432a59..fcb9d04 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
@@ -43,7 +43,6 @@
      * Camera3Stream interface
      */
 
-    virtual status_t waitUntilIdle(nsecs_t timeout);
     virtual void     dump(int fd, const Vector<String16> &args) const;
 
   protected:
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
index c80f512..5aa9a3e 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
@@ -115,7 +115,6 @@
                 bufferFound = true;
                 bufferItem = tmp;
                 mBuffersInFlight.erase(it);
-                mDequeuedBufferCount--;
             }
         }
     }
@@ -148,12 +147,11 @@
     if (res != OK) {
         ALOGE("%s: Stream %d: Error releasing buffer back to buffer queue:"
                 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
-        return res;
     }
 
     *releaseFenceOut = releaseFence;
 
-    return OK;
+    return res;
 }
 
 status_t Camera3InputStream::returnInputBufferLocked(
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 35cb5ba..41328fc 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -198,12 +198,11 @@
     mLock.lock();
     if (res != OK) {
         close(anwReleaseFence);
-        return res;
     }
 
     *releaseFenceOut = releaseFence;
 
-    return OK;
+    return res;
 }
 
 void Camera3OutputStream::dump(int fd, const Vector<String16> &args) const {
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index a6872aa..6d2cf94 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -20,13 +20,18 @@
 
 #include <utils/Log.h>
 #include <utils/Trace.h>
-#include "Camera3Stream.h"
+#include "device3/Camera3Stream.h"
+#include "device3/StatusTracker.h"
 
 namespace android {
 
 namespace camera3 {
 
 Camera3Stream::~Camera3Stream() {
+    sp<StatusTracker> statusTracker = mStatusTracker.promote();
+    if (statusTracker != 0 && mStatusId != StatusTracker::NO_STATUS_ID) {
+        statusTracker->removeComponent(mStatusId);
+    }
 }
 
 Camera3Stream* Camera3Stream::cast(camera3_stream *stream) {
@@ -44,7 +49,8 @@
     mId(id),
     mName(String8::format("Camera3Stream[%d]", id)),
     mMaxSize(maxSize),
-    mState(STATE_CONSTRUCTED) {
+    mState(STATE_CONSTRUCTED),
+    mStatusId(StatusTracker::NO_STATUS_ID) {
 
     camera3_stream::stream_type = type;
     camera3_stream::width = width;
@@ -119,6 +125,15 @@
         return NULL;
     }
 
+    // Stop tracking if currently doing so
+    if (mStatusId != StatusTracker::NO_STATUS_ID) {
+        sp<StatusTracker> statusTracker = mStatusTracker.promote();
+        if (statusTracker != 0) {
+            statusTracker->removeComponent(mStatusId);
+        }
+        mStatusId = StatusTracker::NO_STATUS_ID;
+    }
+
     if (mState == STATE_CONSTRUCTED) {
         mState = STATE_IN_CONFIG;
     } else { // mState == STATE_CONFIGURED
@@ -154,6 +169,12 @@
             return INVALID_OPERATION;
     }
 
+    // Register for idle tracking
+    sp<StatusTracker> statusTracker = mStatusTracker.promote();
+    if (statusTracker != 0) {
+        mStatusId = statusTracker->addComponent();
+    }
+
     // Check if the stream configuration is unchanged, and skip reallocation if
     // so. As documented in hardware/camera3.h:configure_streams().
     if (mState == STATE_IN_RECONFIG &&
@@ -265,6 +286,18 @@
     return hasOutstandingBuffersLocked();
 }
 
+status_t Camera3Stream::setStatusTracker(sp<StatusTracker> statusTracker) {
+    Mutex::Autolock l(mLock);
+    sp<StatusTracker> oldTracker = mStatusTracker.promote();
+    if (oldTracker != 0 && mStatusId != StatusTracker::NO_STATUS_ID) {
+        oldTracker->removeComponent(mStatusId);
+    }
+    mStatusId = StatusTracker::NO_STATUS_ID;
+    mStatusTracker = statusTracker;
+
+    return OK;
+}
+
 status_t Camera3Stream::disconnect() {
     ATRACE_CALL();
     Mutex::Autolock l(mLock);
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index b64fd86..6eeb721 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -190,12 +190,11 @@
     enum {
         TIMEOUT_NEVER = -1
     };
+
     /**
-     * Wait until the HAL is done with all of this stream's buffers, including
-     * signalling all release fences. Returns TIMED_OUT if the timeout is exceeded,
-     * OK on success. Pass in TIMEOUT_NEVER for timeout to indicate an indefinite wait.
+     * Set the status tracker to notify about idle transitions
      */
-    virtual status_t waitUntilIdle(nsecs_t timeout) = 0;
+    virtual status_t setStatusTracker(sp<StatusTracker> statusTracker);
 
     /**
      * Disconnect stream from its non-HAL endpoint. After this,
@@ -267,6 +266,11 @@
     // INVALID_OPERATION if they cannot be obtained.
     virtual status_t getEndpointUsage(uint32_t *usage) = 0;
 
+    // Tracking for idle state
+    wp<StatusTracker> mStatusTracker;
+    // Status tracker component ID
+    int mStatusId;
+
   private:
     uint32_t oldUsage;
     uint32_t oldMaxBuffers;
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 4768536..c93ae15 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -26,6 +26,8 @@
 
 namespace camera3 {
 
+class StatusTracker;
+
 /**
  * An interface for managing a single stream of input and/or output data from
  * the camera device.
@@ -128,13 +130,11 @@
     enum {
         TIMEOUT_NEVER = -1
     };
+
     /**
-     * Wait until the HAL is done with all of this stream's buffers, including
-     * signalling all release fences. Returns TIMED_OUT if the timeout is
-     * exceeded, OK on success. Pass in TIMEOUT_NEVER for timeout to indicate
-     * an indefinite wait.
+     * Set the state tracker to use for signaling idle transitions.
      */
-    virtual status_t waitUntilIdle(nsecs_t timeout) = 0;
+    virtual status_t setStatusTracker(sp<StatusTracker> statusTracker) = 0;
 
     /**
      * Disconnect stream from its non-HAL endpoint. After this,
diff --git a/services/camera/libcameraservice/device3/StatusTracker.cpp b/services/camera/libcameraservice/device3/StatusTracker.cpp
new file mode 100644
index 0000000..ab5419f
--- /dev/null
+++ b/services/camera/libcameraservice/device3/StatusTracker.cpp
@@ -0,0 +1,219 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera3-Status"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+// This is needed for stdint.h to define INT64_MAX in C++
+#define __STDC_LIMIT_MACROS
+
+#include <utils/Log.h>
+#include <utils/Trace.h>
+#include <ui/Fence.h>
+
+#include "device3/StatusTracker.h"
+#include "device3/Camera3Device.h"
+
+namespace android {
+
+namespace camera3 {
+
+StatusTracker::StatusTracker(wp<Camera3Device> parent) :
+        mComponentsChanged(false),
+        mParent(parent),
+        mNextComponentId(0),
+        mIdleFence(new Fence()),
+        mDeviceState(IDLE) {
+}
+
+StatusTracker::~StatusTracker() {
+}
+
+int StatusTracker::addComponent() {
+    int id;
+    ssize_t err;
+    {
+        Mutex::Autolock l(mLock);
+        id = mNextComponentId++;
+        ALOGV("%s: Adding new component %d", __FUNCTION__, id);
+
+        err = mStates.add(id, IDLE);
+        ALOGE_IF(err < 0, "%s: Can't add new component %d: %s (%d)",
+                __FUNCTION__, id, strerror(-err), err);
+    }
+
+    if (err >= 0) {
+        Mutex::Autolock pl(mPendingLock);
+        mComponentsChanged = true;
+        mPendingChangeSignal.signal();
+    }
+
+    return err < 0 ? err : id;
+}
+
+void StatusTracker::removeComponent(int id) {
+    ssize_t idx;
+    {
+        Mutex::Autolock l(mLock);
+        ALOGV("%s: Removing component %d", __FUNCTION__, id);
+        idx = mStates.removeItem(id);
+    }
+
+    if (idx >= 0) {
+        Mutex::Autolock pl(mPendingLock);
+        mComponentsChanged = true;
+        mPendingChangeSignal.signal();
+    }
+
+    return;
+}
+
+
+void StatusTracker::markComponentIdle(int id, const sp<Fence>& componentFence) {
+    markComponent(id, IDLE, componentFence);
+}
+
+void StatusTracker::markComponentActive(int id) {
+    markComponent(id, ACTIVE, Fence::NO_FENCE);
+}
+
+void StatusTracker::markComponent(int id, ComponentState state,
+        const sp<Fence>& componentFence) {
+    ALOGV("%s: Component %d is now %s", __FUNCTION__, id,
+            state == IDLE ? "idle" : "active");
+    Mutex::Autolock l(mPendingLock);
+
+    StateChange newState = {
+        id,
+        state,
+        componentFence
+    };
+
+    mPendingChangeQueue.add(newState);
+    mPendingChangeSignal.signal();
+}
+
+void StatusTracker::requestExit() {
+    // First mark thread dead
+    Thread::requestExit();
+    // Then exit any waits
+    mPendingChangeSignal.signal();
+}
+
+StatusTracker::ComponentState StatusTracker::getDeviceStateLocked() {
+    for (size_t i = 0; i < mStates.size(); i++) {
+        if (mStates.valueAt(i) == ACTIVE) {
+            ALOGV("%s: Component %d not idle", __FUNCTION__,
+                    mStates.keyAt(i));
+            return ACTIVE;
+        }
+    }
+    // - If not yet signaled, getSignalTime returns INT64_MAX
+    // - If invalid fence or error, returns -1
+    // - Otherwise returns time of signalling.
+    // Treat -1 as 'signalled', since HAL may not be using fences, and want
+    // to be able to idle in case of errors.
+    nsecs_t signalTime = mIdleFence->getSignalTime();
+    bool fencesDone = signalTime != INT64_MAX;
+
+    ALOGV_IF(!fencesDone, "%s: Fences still to wait on", __FUNCTION__);
+
+    return fencesDone ? IDLE : ACTIVE;
+}
+
+bool StatusTracker::threadLoop() {
+    status_t res;
+
+    // Wait for state updates
+    {
+        Mutex::Autolock pl(mPendingLock);
+        while (mPendingChangeQueue.size() == 0 && !mComponentsChanged) {
+            res = mPendingChangeSignal.waitRelative(mPendingLock,
+                    kWaitDuration);
+            if (exitPending()) return false;
+            if (res != OK) {
+                if (res != TIMED_OUT) {
+                    ALOGE("%s: Error waiting on state changes: %s (%d)",
+                            __FUNCTION__, strerror(-res), res);
+                }
+                // TIMED_OUT is expected
+                break;
+            }
+        }
+    }
+
+    // After new pending states appear, or timeout, check if we're idle.  Even
+    // with timeout, need to check to account for fences that may still be
+    // clearing out
+    sp<Camera3Device> parent;
+    {
+        Mutex::Autolock pl(mPendingLock);
+        Mutex::Autolock l(mLock);
+
+        // Collect all pending state updates and see if the device
+        // collectively transitions between idle and active for each one
+
+        // First pass for changed components or fence completions
+        ComponentState prevState = getDeviceStateLocked();
+        if (prevState != mDeviceState) {
+            // Only collect changes to overall device state
+            mStateTransitions.add(prevState);
+        }
+        // For each pending component state update, check if we've transitioned
+        // to a new overall device state
+        for (size_t i = 0; i < mPendingChangeQueue.size(); i++) {
+            const StateChange &newState = mPendingChangeQueue[i];
+            ssize_t idx = mStates.indexOfKey(newState.id);
+            // Ignore notices for unknown components
+            if (idx >= 0) {
+                // Update single component state
+                mStates.replaceValueAt(idx, newState.state);
+                mIdleFence = Fence::merge(String8("idleFence"),
+                        mIdleFence, newState.fence);
+                // .. and see if overall device state has changed
+                ComponentState newState = getDeviceStateLocked();
+                if (newState != prevState) {
+                    mStateTransitions.add(newState);
+                }
+                prevState = newState;
+            }
+        }
+        mPendingChangeQueue.clear();
+        mComponentsChanged = false;
+
+        // Store final state after all pending state changes are done with
+
+        mDeviceState = prevState;
+        parent = mParent.promote();
+    }
+
+    // Notify parent for all intermediate transitions
+    if (mStateTransitions.size() > 0 && parent.get()) {
+        for (size_t i = 0; i < mStateTransitions.size(); i++) {
+            bool idle = (mStateTransitions[i] == IDLE);
+            ALOGV("Camera device is now %s", idle ? "idle" : "active");
+            parent->notifyStatus(idle);
+        }
+    }
+    mStateTransitions.clear();
+
+    return true;
+}
+
+} // namespace android
+
+} // namespace camera3
diff --git a/services/camera/libcameraservice/device3/StatusTracker.h b/services/camera/libcameraservice/device3/StatusTracker.h
new file mode 100644
index 0000000..49cecb3
--- /dev/null
+++ b/services/camera/libcameraservice/device3/StatusTracker.h
@@ -0,0 +1,130 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA3_STATUSTRACKER_H
+#define ANDROID_SERVERS_CAMERA3_STATUSTRACKER_H
+
+#include <utils/Condition.h>
+#include <utils/Errors.h>
+#include <utils/List.h>
+#include <utils/Mutex.h>
+#include <utils/Thread.h>
+#include <utils/KeyedVector.h>
+#include <hardware/camera3.h>
+
+#include "common/CameraDeviceBase.h"
+
+namespace android {
+
+class Camera3Device;
+class Fence;
+
+namespace camera3 {
+
+/**
+ * State tracking for idle and other collective state transitions.
+ * Collects idle notifications from different sources and calls the
+ * parent when all of them become idle.
+ *
+ * The parent is responsible for synchronizing the status updates with its
+ * internal state correctly, which means the notifyStatus call to the parent may
+ * block for a while.
+ */
+class StatusTracker: public Thread {
+  public:
+    StatusTracker(wp<Camera3Device> parent);
+    ~StatusTracker();
+
+    // An always-invalid component ID
+    static const int NO_STATUS_ID = -1;
+
+    // Add a component to track; returns non-negative unique ID for the new
+    // component on success, negative error code on failure.
+    // New components start in the idle state.
+    int addComponent();
+
+    // Remove existing component from idle tracking. Ignores unknown IDs
+    void removeComponent(int id);
+
+    // Set the state of a tracked component to be idle. Ignores unknown IDs; can
+    // accept a fence to wait on to complete idle.  The fence is merged with any
+    // previous fences given, which means they all must signal before the
+    // component is considered idle.
+    void markComponentIdle(int id, const sp<Fence>& componentFence);
+
+    // Set the state of a tracked component to be active. Ignores unknown IDs.
+    void markComponentActive(int id);
+
+    virtual void requestExit();
+  protected:
+
+    virtual bool threadLoop();
+
+  private:
+    enum ComponentState {
+        IDLE,
+        ACTIVE
+    };
+
+    void markComponent(int id, ComponentState state,
+            const sp<Fence>& componentFence);
+
+    // Guards mPendingChange, mPendingStates, mComponentsChanged
+    Mutex mPendingLock;
+
+    Condition mPendingChangeSignal;
+
+    struct StateChange {
+        int id;
+        ComponentState state;
+        sp<Fence> fence;
+    };
+    // A queue of yet-to-be-processed state changes to components
+    Vector<StateChange> mPendingChangeQueue;
+    bool mComponentsChanged;
+
+    wp<Camera3Device> mParent;
+
+    // Guards rest of internals. Must be locked after mPendingLock if both used.
+    Mutex mLock;
+
+    int mNextComponentId;
+
+    // Current component states
+    KeyedVector<int, ComponentState> mStates;
+    // Merged fence for all processed state changes
+    sp<Fence> mIdleFence;
+    // Current overall device state
+    ComponentState mDeviceState;
+
+    // Private to threadLoop
+
+    // Determine current overall device state
+    // We're IDLE iff
+    // - All components are currently IDLE
+    // - The merged fence for all component updates has signalled
+    ComponentState getDeviceStateLocked();
+
+    Vector<ComponentState> mStateTransitions;
+
+    static const nsecs_t kWaitDuration = 250000000LL; // 250 ms
+};
+
+} // namespace camera3
+
+} // namespace android
+
+#endif