Merge "Revert "Camera: Fix race condition between initialize and statusCallback"" into rvc-qpr-dev am: c4ba40d49f am: f47673163d

Original change: https://googleplex-android-review.googlesource.com/c/platform/frameworks/av/+/12215422

Change-Id: I5a722654a5a48ee65fcc99fde6d21fb41617fada
diff --git a/PREUPLOAD.cfg b/PREUPLOAD.cfg
index ae920c0..8fe48c2 100644
--- a/PREUPLOAD.cfg
+++ b/PREUPLOAD.cfg
@@ -1,2 +1,11 @@
 [Hook Scripts]
 mainline_hook = ${REPO_ROOT}/frameworks/av/tools/mainline_hook_partial.sh ${REPO_ROOT} ${PREUPLOAD_FILES}
+
+[Builtin Hooks]
+clang_format = true
+
+[Builtin Hooks Options]
+# Only turn on clang-format check for the following subfolders.
+clang_format = --commit ${PREUPLOAD_COMMIT} --style file --extensions c,h,cc,cpp
+               media/libmediatranscoding/
+               services/mediatranscoding/
diff --git a/camera/Android.bp b/camera/Android.bp
index fa36bb3..b777d74 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -38,7 +38,6 @@
         "ICamera.cpp",
         "ICameraClient.cpp",
         "ICameraRecordingProxy.cpp",
-        "ICameraRecordingProxyListener.cpp",
         "camera2/CaptureRequest.cpp",
         "camera2/ConcurrentCamera.cpp",
         "camera2/OutputConfiguration.cpp",
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index 84d1d93..f7d194e 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -25,7 +25,6 @@
 #include <binder/IMemory.h>
 
 #include <Camera.h>
-#include <ICameraRecordingProxyListener.h>
 #include <android/hardware/ICameraService.h>
 #include <android/hardware/ICamera.h>
 
@@ -77,63 +76,6 @@
     return CameraBaseT::connect(cameraId, clientPackageName, clientUid, clientPid);
 }
 
-status_t Camera::connectLegacy(int cameraId, int halVersion,
-        const String16& clientPackageName,
-        int clientUid,
-        sp<Camera>& camera)
-{
-    ALOGV("%s: connect legacy camera device", __FUNCTION__);
-    sp<Camera> c = new Camera(cameraId);
-    sp<::android::hardware::ICameraClient> cl = c;
-    status_t status = NO_ERROR;
-    const sp<::android::hardware::ICameraService>& cs = CameraBaseT::getCameraService();
-
-    binder::Status ret;
-    if (cs != nullptr) {
-        ret = cs.get()->connectLegacy(cl, cameraId, halVersion, clientPackageName,
-                clientUid, /*out*/&(c->mCamera));
-    }
-    if (ret.isOk() && c->mCamera != nullptr) {
-        IInterface::asBinder(c->mCamera)->linkToDeath(c);
-        c->mStatus = NO_ERROR;
-        camera = c;
-    } else {
-        switch(ret.serviceSpecificErrorCode()) {
-            case hardware::ICameraService::ERROR_DISCONNECTED:
-                status = -ENODEV;
-                break;
-            case hardware::ICameraService::ERROR_CAMERA_IN_USE:
-                status = -EBUSY;
-                break;
-            case hardware::ICameraService::ERROR_INVALID_OPERATION:
-                status = -EINVAL;
-                break;
-            case hardware::ICameraService::ERROR_MAX_CAMERAS_IN_USE:
-                status = -EUSERS;
-                break;
-            case hardware::ICameraService::ERROR_ILLEGAL_ARGUMENT:
-                status = BAD_VALUE;
-                break;
-            case hardware::ICameraService::ERROR_DEPRECATED_HAL:
-                status = -EOPNOTSUPP;
-                break;
-            case hardware::ICameraService::ERROR_DISABLED:
-                status = -EACCES;
-                break;
-            case hardware::ICameraService::ERROR_PERMISSION_DENIED:
-                status = PERMISSION_DENIED;
-                break;
-            default:
-                status = -EINVAL;
-                ALOGW("An error occurred while connecting to camera %d: %s", cameraId,
-                        (cs != nullptr) ? "Service not available" : ret.toString8().string());
-                break;
-        }
-        c.clear();
-    }
-    return status;
-}
-
 status_t Camera::reconnect()
 {
     ALOGV("reconnect");
@@ -214,10 +156,6 @@
 void Camera::stopRecording()
 {
     ALOGV("stopRecording");
-    {
-        Mutex::Autolock _l(mLock);
-        mRecordingProxyListener.clear();
-    }
     sp <::android::hardware::ICamera> c = mCamera;
     if (c == 0) return;
     c->stopRecording();
@@ -325,12 +263,6 @@
     mListener = listener;
 }
 
-void Camera::setRecordingProxyListener(const sp<ICameraRecordingProxyListener>& listener)
-{
-    Mutex::Autolock _l(mLock);
-    mRecordingProxyListener = listener;
-}
-
 void Camera::setPreviewCallbackFlags(int flag)
 {
     ALOGV("setPreviewCallbackFlags");
@@ -384,19 +316,6 @@
 // callback from camera service when timestamped frame is ready
 void Camera::dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr)
 {
-    // If recording proxy listener is registered, forward the frame and return.
-    // The other listener (mListener) is ignored because the receiver needs to
-    // call releaseRecordingFrame.
-    sp<ICameraRecordingProxyListener> proxylistener;
-    {
-        Mutex::Autolock _l(mLock);
-        proxylistener = mRecordingProxyListener;
-    }
-    if (proxylistener != NULL) {
-        proxylistener->dataCallbackTimestamp(timestamp, msgType, dataPtr);
-        return;
-    }
-
     sp<CameraListener> listener;
     {
         Mutex::Autolock _l(mLock);
@@ -413,19 +332,6 @@
 
 void Camera::recordingFrameHandleCallbackTimestamp(nsecs_t timestamp, native_handle_t* handle)
 {
-    // If recording proxy listener is registered, forward the frame and return.
-    // The other listener (mListener) is ignored because the receiver needs to
-    // call releaseRecordingFrameHandle.
-    sp<ICameraRecordingProxyListener> proxylistener;
-    {
-        Mutex::Autolock _l(mLock);
-        proxylistener = mRecordingProxyListener;
-    }
-    if (proxylistener != NULL) {
-        proxylistener->recordingFrameHandleCallbackTimestamp(timestamp, handle);
-        return;
-    }
-
     sp<CameraListener> listener;
     {
         Mutex::Autolock _l(mLock);
@@ -444,19 +350,6 @@
         const std::vector<nsecs_t>& timestamps,
         const std::vector<native_handle_t*>& handles)
 {
-    // If recording proxy listener is registered, forward the frame and return.
-    // The other listener (mListener) is ignored because the receiver needs to
-    // call releaseRecordingFrameHandle.
-    sp<ICameraRecordingProxyListener> proxylistener;
-    {
-        Mutex::Autolock _l(mLock);
-        proxylistener = mRecordingProxyListener;
-    }
-    if (proxylistener != NULL) {
-        proxylistener->recordingFrameHandleCallbackTimestampBatch(timestamps, handles);
-        return;
-    }
-
     sp<CameraListener> listener;
     {
         Mutex::Autolock _l(mLock);
@@ -476,10 +369,9 @@
     return new RecordingProxy(this);
 }
 
-status_t Camera::RecordingProxy::startRecording(const sp<ICameraRecordingProxyListener>& listener)
+status_t Camera::RecordingProxy::startRecording()
 {
     ALOGV("RecordingProxy::startRecording");
-    mCamera->setRecordingProxyListener(listener);
     mCamera->reconnect();
     return mCamera->startRecording();
 }
@@ -490,23 +382,6 @@
     mCamera->stopRecording();
 }
 
-void Camera::RecordingProxy::releaseRecordingFrame(const sp<IMemory>& mem)
-{
-    ALOGV("RecordingProxy::releaseRecordingFrame");
-    mCamera->releaseRecordingFrame(mem);
-}
-
-void Camera::RecordingProxy::releaseRecordingFrameHandle(native_handle_t* handle) {
-    ALOGV("RecordingProxy::releaseRecordingFrameHandle");
-    mCamera->releaseRecordingFrameHandle(handle);
-}
-
-void Camera::RecordingProxy::releaseRecordingFrameHandleBatch(
-        const std::vector<native_handle_t*>& handles) {
-    ALOGV("RecordingProxy::releaseRecordingFrameHandleBatch");
-    mCamera->releaseRecordingFrameHandleBatch(handles);
-}
-
 Camera::RecordingProxy::RecordingProxy(const sp<Camera>& camera)
 {
     mCamera = camera;
diff --git a/camera/ICameraRecordingProxy.cpp b/camera/ICameraRecordingProxy.cpp
index bd6af75..97523a5 100644
--- a/camera/ICameraRecordingProxy.cpp
+++ b/camera/ICameraRecordingProxy.cpp
@@ -18,7 +18,6 @@
 #define LOG_TAG "ICameraRecordingProxy"
 #include <camera/CameraUtils.h>
 #include <camera/ICameraRecordingProxy.h>
-#include <camera/ICameraRecordingProxyListener.h>
 #include <binder/IMemory.h>
 #include <binder/Parcel.h>
 #include <media/hardware/HardwareAPI.h>
@@ -29,10 +28,7 @@
 
 enum {
     START_RECORDING = IBinder::FIRST_CALL_TRANSACTION,
-    STOP_RECORDING,
-    RELEASE_RECORDING_FRAME,
-    RELEASE_RECORDING_FRAME_HANDLE,
-    RELEASE_RECORDING_FRAME_HANDLE_BATCH,
+    STOP_RECORDING
 };
 
 
@@ -44,12 +40,11 @@
     {
     }
 
-    status_t startRecording(const sp<ICameraRecordingProxyListener>& listener)
+    status_t startRecording()
     {
         ALOGV("startRecording");
         Parcel data, reply;
         data.writeInterfaceToken(ICameraRecordingProxy::getInterfaceDescriptor());
-        data.writeStrongBinder(IInterface::asBinder(listener));
         remote()->transact(START_RECORDING, data, &reply);
         return reply.readInt32();
     }
@@ -61,46 +56,6 @@
         data.writeInterfaceToken(ICameraRecordingProxy::getInterfaceDescriptor());
         remote()->transact(STOP_RECORDING, data, &reply);
     }
-
-    void releaseRecordingFrame(const sp<IMemory>& mem)
-    {
-        ALOGV("releaseRecordingFrame");
-        Parcel data, reply;
-        data.writeInterfaceToken(ICameraRecordingProxy::getInterfaceDescriptor());
-        data.writeStrongBinder(IInterface::asBinder(mem));
-        remote()->transact(RELEASE_RECORDING_FRAME, data, &reply);
-    }
-
-    void releaseRecordingFrameHandle(native_handle_t *handle) {
-        ALOGV("releaseRecordingFrameHandle");
-        Parcel data, reply;
-        data.writeInterfaceToken(ICameraRecordingProxy::getInterfaceDescriptor());
-        data.writeNativeHandle(handle);
-
-        remote()->transact(RELEASE_RECORDING_FRAME_HANDLE, data, &reply);
-
-        // Close the native handle because camera received a dup copy.
-        native_handle_close(handle);
-        native_handle_delete(handle);
-    }
-
-    void releaseRecordingFrameHandleBatch(const std::vector<native_handle_t*>& handles) {
-        ALOGV("releaseRecordingFrameHandleBatch");
-        Parcel data, reply;
-        data.writeInterfaceToken(ICameraRecordingProxy::getInterfaceDescriptor());
-        uint32_t n = handles.size();
-        data.writeUint32(n);
-        for (auto& handle : handles) {
-            data.writeNativeHandle(handle);
-        }
-        remote()->transact(RELEASE_RECORDING_FRAME_HANDLE_BATCH, data, &reply);
-
-        // Close the native handle because camera received a dup copy.
-        for (auto& handle : handles) {
-            native_handle_close(handle);
-            native_handle_delete(handle);
-        }
-    }
 };
 
 IMPLEMENT_META_INTERFACE(CameraRecordingProxy, "android.hardware.ICameraRecordingProxy");
@@ -114,9 +69,7 @@
         case START_RECORDING: {
             ALOGV("START_RECORDING");
             CHECK_INTERFACE(ICameraRecordingProxy, data, reply);
-            sp<ICameraRecordingProxyListener> listener =
-                interface_cast<ICameraRecordingProxyListener>(data.readStrongBinder());
-            reply->writeInt32(startRecording(listener));
+            reply->writeInt32(startRecording());
             return NO_ERROR;
         } break;
         case STOP_RECORDING: {
@@ -125,46 +78,6 @@
             stopRecording();
             return NO_ERROR;
         } break;
-        case RELEASE_RECORDING_FRAME: {
-            ALOGV("RELEASE_RECORDING_FRAME");
-            CHECK_INTERFACE(ICameraRecordingProxy, data, reply);
-            sp<IMemory> mem = interface_cast<IMemory>(data.readStrongBinder());
-            releaseRecordingFrame(mem);
-            return NO_ERROR;
-        } break;
-        case RELEASE_RECORDING_FRAME_HANDLE: {
-            ALOGV("RELEASE_RECORDING_FRAME_HANDLE");
-            CHECK_INTERFACE(ICameraRecordingProxy, data, reply);
-
-            // releaseRecordingFrameHandle will be responsble to close the native handle.
-            releaseRecordingFrameHandle(data.readNativeHandle());
-            return NO_ERROR;
-        } break;
-        case RELEASE_RECORDING_FRAME_HANDLE_BATCH: {
-            ALOGV("RELEASE_RECORDING_FRAME_HANDLE_BATCH");
-            CHECK_INTERFACE(ICameraRecordingProxy, data, reply);
-            uint32_t n = 0;
-            status_t res = data.readUint32(&n);
-            if (res != OK) {
-                ALOGE("%s: Failed to read batch size: %s (%d)", __FUNCTION__, strerror(-res), res);
-                return BAD_VALUE;
-            }
-            std::vector<native_handle_t*> handles;
-            handles.reserve(n);
-            for (uint32_t i = 0; i < n; i++) {
-                native_handle_t* handle = data.readNativeHandle();
-                if (handle == nullptr) {
-                    ALOGE("%s: Received a null native handle at handles[%d]",
-                            __FUNCTION__, i);
-                    return BAD_VALUE;
-                }
-                handles.push_back(handle);
-            }
-
-            // releaseRecordingFrameHandleBatch will be responsble to close the native handle.
-            releaseRecordingFrameHandleBatch(handles);
-            return NO_ERROR;
-        } break;
         default:
             return BBinder::onTransact(code, data, reply, flags);
     }
@@ -173,4 +86,3 @@
 // ----------------------------------------------------------------------------
 
 }; // namespace android
-
diff --git a/camera/ICameraRecordingProxyListener.cpp b/camera/ICameraRecordingProxyListener.cpp
deleted file mode 100644
index 66faf8f..0000000
--- a/camera/ICameraRecordingProxyListener.cpp
+++ /dev/null
@@ -1,180 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "ICameraRecordingProxyListener"
-#include <camera/CameraUtils.h>
-#include <camera/ICameraRecordingProxyListener.h>
-#include <binder/IMemory.h>
-#include <binder/Parcel.h>
-#include <media/hardware/HardwareAPI.h>
-#include <utils/Log.h>
-
-namespace android {
-
-enum {
-    DATA_CALLBACK_TIMESTAMP = IBinder::FIRST_CALL_TRANSACTION,
-    RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP,
-    RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP_BATCH
-};
-
-class BpCameraRecordingProxyListener: public BpInterface<ICameraRecordingProxyListener>
-{
-public:
-    explicit BpCameraRecordingProxyListener(const sp<IBinder>& impl)
-        : BpInterface<ICameraRecordingProxyListener>(impl)
-    {
-    }
-
-    void dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& imageData)
-    {
-        ALOGV("dataCallback");
-        Parcel data, reply;
-        data.writeInterfaceToken(ICameraRecordingProxyListener::getInterfaceDescriptor());
-        data.writeInt64(timestamp);
-        data.writeInt32(msgType);
-        data.writeStrongBinder(IInterface::asBinder(imageData));
-        remote()->transact(DATA_CALLBACK_TIMESTAMP, data, &reply, IBinder::FLAG_ONEWAY);
-    }
-
-    void recordingFrameHandleCallbackTimestamp(nsecs_t timestamp, native_handle_t* handle) {
-        ALOGV("recordingFrameHandleCallbackTimestamp");
-        Parcel data, reply;
-        data.writeInterfaceToken(ICameraRecordingProxyListener::getInterfaceDescriptor());
-        data.writeInt64(timestamp);
-        data.writeNativeHandle(handle);
-        remote()->transact(RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP, data, &reply,
-                IBinder::FLAG_ONEWAY);
-
-        // The native handle is dupped in ICameraClient so we need to free it here.
-        native_handle_close(handle);
-        native_handle_delete(handle);
-    }
-
-    void recordingFrameHandleCallbackTimestampBatch(
-            const std::vector<nsecs_t>& timestamps,
-            const std::vector<native_handle_t*>& handles) {
-        ALOGV("recordingFrameHandleCallbackTimestampBatch");
-        Parcel data, reply;
-        data.writeInterfaceToken(ICameraRecordingProxyListener::getInterfaceDescriptor());
-
-        uint32_t n = timestamps.size();
-        if (n != handles.size()) {
-            ALOGE("%s: size of timestamps(%zu) and handles(%zu) mismatch!",
-                    __FUNCTION__, timestamps.size(), handles.size());
-            return;
-        }
-        data.writeUint32(n);
-        for (auto ts : timestamps) {
-            data.writeInt64(ts);
-        }
-        for (auto& handle : handles) {
-            data.writeNativeHandle(handle);
-        }
-        remote()->transact(RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP_BATCH, data, &reply,
-                IBinder::FLAG_ONEWAY);
-
-        // The native handle is dupped in ICameraClient so we need to free it here.
-        for (auto& handle : handles) {
-            native_handle_close(handle);
-            native_handle_delete(handle);
-        }
-    }
-};
-
-IMPLEMENT_META_INTERFACE(CameraRecordingProxyListener, "android.hardware.ICameraRecordingProxyListener");
-
-// ----------------------------------------------------------------------
-
-status_t BnCameraRecordingProxyListener::onTransact(
-    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
-    switch(code) {
-        case DATA_CALLBACK_TIMESTAMP: {
-            ALOGV("DATA_CALLBACK_TIMESTAMP");
-            CHECK_INTERFACE(ICameraRecordingProxyListener, data, reply);
-            nsecs_t timestamp = data.readInt64();
-            int32_t msgType = data.readInt32();
-            sp<IMemory> imageData = interface_cast<IMemory>(data.readStrongBinder());
-            dataCallbackTimestamp(timestamp, msgType, imageData);
-            return NO_ERROR;
-        } break;
-        case RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP: {
-            ALOGV("RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP");
-            CHECK_INTERFACE(ICameraRecordingProxyListener, data, reply);
-            nsecs_t timestamp;
-            status_t res = data.readInt64(&timestamp);
-            if (res != OK) {
-                ALOGE("%s: Failed to read timestamp: %s (%d)", __FUNCTION__, strerror(-res), res);
-                return BAD_VALUE;
-            }
-
-            native_handle_t* handle = data.readNativeHandle();
-            if (handle == nullptr) {
-                ALOGE("%s: Received a null native handle", __FUNCTION__);
-                return BAD_VALUE;
-            }
-            // The native handle will be freed in
-            // BpCameraRecordingProxy::releaseRecordingFrameHandle.
-            recordingFrameHandleCallbackTimestamp(timestamp, handle);
-            return NO_ERROR;
-        } break;
-        case RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP_BATCH: {
-            ALOGV("RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP_BATCH");
-            CHECK_INTERFACE(ICameraRecordingProxyListener, data, reply);
-            uint32_t n = 0;
-            status_t res = data.readUint32(&n);
-            if (res != OK) {
-                ALOGE("%s: Failed to read batch size: %s (%d)", __FUNCTION__, strerror(-res), res);
-                return BAD_VALUE;
-            }
-            std::vector<nsecs_t> timestamps;
-            std::vector<native_handle_t*> handles;
-            timestamps.reserve(n);
-            handles.reserve(n);
-            for (uint32_t i = 0; i < n; i++) {
-                nsecs_t t;
-                res = data.readInt64(&t);
-                if (res != OK) {
-                    ALOGE("%s: Failed to read timestamp[%d]: %s (%d)",
-                            __FUNCTION__, i, strerror(-res), res);
-                    return BAD_VALUE;
-                }
-                timestamps.push_back(t);
-            }
-            for (uint32_t i = 0; i < n; i++) {
-                native_handle_t* handle = data.readNativeHandle();
-                if (handle == nullptr) {
-                    ALOGE("%s: Received a null native handle at handles[%d]",
-                            __FUNCTION__, i);
-                    return BAD_VALUE;
-                }
-                handles.push_back(handle);
-            }
-            // The native handle will be freed in
-            // BpCameraRecordingProxy::releaseRecordingFrameHandleBatch.
-            recordingFrameHandleCallbackTimestampBatch(timestamps, handles);
-            return NO_ERROR;
-        } break;
-        default:
-            return BBinder::onTransact(code, data, reply, flags);
-    }
-}
-
-// ----------------------------------------------------------------------------
-
-}; // namespace android
-
diff --git a/camera/TEST_MAPPING b/camera/TEST_MAPPING
new file mode 100644
index 0000000..683e183
--- /dev/null
+++ b/camera/TEST_MAPPING
@@ -0,0 +1,11 @@
+{
+  "postsubmit": [
+    {
+      "name": "CtsCameraTestCases"
+    },
+    {
+      "name": "CtsCameraTestCases",
+      "keywords": ["primary-device"]
+    }
+  ]
+}
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index ac7a35b..8af704d 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -69,7 +69,7 @@
 
     /**
      * Default UID/PID values for non-privileged callers of
-     * connect(), connectDevice(), and connectLegacy()
+     * connect() and connectDevice()
      */
     const int USE_CALLING_UID = -1;
     const int USE_CALLING_PID = -1;
@@ -93,20 +93,6 @@
             int clientUid);
 
     /**
-     * halVersion constant for connectLegacy
-     */
-    const int CAMERA_HAL_API_VERSION_UNSPECIFIED = -1;
-
-    /**
-     * Open a camera device in legacy mode, if supported by the camera module HAL.
-     */
-    ICamera connectLegacy(ICameraClient client,
-            int cameraId,
-            int halVersion,
-            String opPackageName,
-            int clientUid);
-
-    /**
      * Add listener for changes to camera device and flashlight state.
      *
      * Also returns the set of currently-known camera IDs and state of each device.
diff --git a/camera/include/camera/Camera.h b/camera/include/camera/Camera.h
index 2cdb617..5579183 100644
--- a/camera/include/camera/Camera.h
+++ b/camera/include/camera/Camera.h
@@ -24,7 +24,6 @@
 #include <gui/IGraphicBufferProducer.h>
 #include <system/camera.h>
 #include <camera/ICameraRecordingProxy.h>
-#include <camera/ICameraRecordingProxyListener.h>
 #include <camera/android/hardware/ICamera.h>
 #include <camera/android/hardware/ICameraClient.h>
 #include <camera/CameraBase.h>
@@ -84,10 +83,6 @@
                                 const String16& clientPackageName,
                                 int clientUid, int clientPid);
 
-    static  status_t  connectLegacy(int cameraId, int halVersion,
-                                     const String16& clientPackageName,
-                                     int clientUid, sp<Camera>& camera);
-
             virtual     ~Camera();
 
             status_t    reconnect();
@@ -154,7 +149,6 @@
             status_t    setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer);
 
             void        setListener(const sp<CameraListener>& listener);
-            void        setRecordingProxyListener(const sp<ICameraRecordingProxyListener>& listener);
 
             // Configure preview callbacks to app. Only one of the older
             // callbacks or the callback surface can be active at the same time;
@@ -187,12 +181,8 @@
         explicit RecordingProxy(const sp<Camera>& camera);
 
         // ICameraRecordingProxy interface
-        virtual status_t startRecording(const sp<ICameraRecordingProxyListener>& listener);
+        virtual status_t startRecording();
         virtual void stopRecording();
-        virtual void releaseRecordingFrame(const sp<IMemory>& mem);
-        virtual void releaseRecordingFrameHandle(native_handle_t* handle);
-        virtual void releaseRecordingFrameHandleBatch(
-                const std::vector<native_handle_t*>& handles);
 
     private:
         sp<Camera>         mCamera;
@@ -203,8 +193,6 @@
                         Camera(const Camera&);
                         Camera& operator=(const Camera);
 
-    sp<ICameraRecordingProxyListener>  mRecordingProxyListener;
-
     friend class        CameraBase;
 };
 
diff --git a/camera/include/camera/ICameraRecordingProxy.h b/camera/include/camera/ICameraRecordingProxy.h
index 02af2f3..4306dc1 100644
--- a/camera/include/camera/ICameraRecordingProxy.h
+++ b/camera/include/camera/ICameraRecordingProxy.h
@@ -24,13 +24,11 @@
 
 namespace android {
 
-class ICameraRecordingProxyListener;
-class IMemory;
 class Parcel;
 
 /*
- * The purpose of ICameraRecordingProxy and ICameraRecordingProxyListener is to
- * allow applications using the camera during recording.
+ * The purpose of ICameraRecordingProxy is to
+ * allow applications to use the camera during recording with the old camera API.
  *
  * Camera service allows only one client at a time. Since camcorder application
  * needs to own the camera to do things like zoom, the media recorder cannot
@@ -42,35 +40,29 @@
  * ICameraRecordingProxy
  *   startRecording()
  *   stopRecording()
- *   releaseRecordingFrame()
  *
- * ICameraRecordingProxyListener
- *   dataCallbackTimestamp()
-
  * The camcorder app opens the camera and starts the preview. The app passes
  * ICamera and ICameraRecordingProxy to the media recorder by
  * MediaRecorder::setCamera(). The recorder uses ICamera to setup the camera in
  * MediaRecorder::start(). After setup, the recorder disconnects from camera
- * service. The recorder calls ICameraRecordingProxy::startRecording() and
- * passes a ICameraRecordingProxyListener to the app. The app connects back to
- * camera service and starts the recording. The app owns the camera and can do
- * things like zoom. The media recorder receives the video frames from the
- * listener and releases them by ICameraRecordingProxy::releaseRecordingFrame.
- * The recorder calls ICameraRecordingProxy::stopRecording() to stop the
- * recording.
+ * service. The recorder calls ICameraRecordingProxy::startRecording() and The
+ * app owns the camera and can do things like zoom. The media recorder receives
+ * the video frames via a buffer queue.  The recorder calls
+ * ICameraRecordingProxy::stopRecording() to stop the recording.
  *
  * The call sequences are as follows:
  * 1. The app: Camera.unlock().
  * 2. The app: MediaRecorder.setCamera().
  * 3. Start recording
  *    (1) The app: MediaRecorder.start().
- *    (2) The recorder: ICamera.unlock() and ICamera.disconnect().
- *    (3) The recorder: ICameraRecordingProxy.startRecording().
- *    (4) The app: ICamera.reconnect().
- *    (5) The app: ICamera.startRecording().
+ *    (2) The recorder: ICamera.setVideoTarget(buffer queue).
+ *    (3) The recorder: ICamera.unlock() and ICamera.disconnect().
+ *    (4) The recorder: ICameraRecordingProxy.startRecording().
+ *    (5) The app: ICamera.reconnect().
+ *    (6) The app: ICamera.startRecording().
  * 4. During recording
- *    (1) The recorder: receive frames from ICameraRecordingProxyListener.dataCallbackTimestamp()
- *    (2) The recorder: release frames by ICameraRecordingProxy.releaseRecordingFrame().
+ *    (1) The recorder: receive frames via a buffer queue
+ *    (2) The recorder: release frames via a buffer queue
  * 5. Stop recording
  *    (1) The app: MediaRecorder.stop()
  *    (2) The recorder: ICameraRecordingProxy.stopRecording().
@@ -82,12 +74,8 @@
 public:
     DECLARE_META_INTERFACE(CameraRecordingProxy);
 
-    virtual status_t        startRecording(const sp<ICameraRecordingProxyListener>& listener) = 0;
+    virtual status_t        startRecording() = 0;
     virtual void            stopRecording() = 0;
-    virtual void            releaseRecordingFrame(const sp<IMemory>& mem) = 0;
-    virtual void            releaseRecordingFrameHandle(native_handle_t *handle) = 0;
-    virtual void            releaseRecordingFrameHandleBatch(
-                                    const std::vector<native_handle_t*>& handles) = 0;
 };
 
 // ----------------------------------------------------------------------------
diff --git a/camera/include/camera/ICameraRecordingProxyListener.h b/camera/include/camera/ICameraRecordingProxyListener.h
deleted file mode 100644
index da03c56..0000000
--- a/camera/include/camera/ICameraRecordingProxyListener.h
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_ICAMERA_RECORDING_PROXY_LISTENER_H
-#define ANDROID_HARDWARE_ICAMERA_RECORDING_PROXY_LISTENER_H
-
-#include <vector>
-#include <binder/IInterface.h>
-#include <cutils/native_handle.h>
-#include <stdint.h>
-#include <utils/RefBase.h>
-#include <utils/Timers.h>
-
-namespace android {
-
-class Parcel;
-class IMemory;
-
-class ICameraRecordingProxyListener: public IInterface
-{
-public:
-    DECLARE_META_INTERFACE(CameraRecordingProxyListener);
-
-    virtual void dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType,
-                                       const sp<IMemory>& data) = 0;
-
-    virtual void recordingFrameHandleCallbackTimestamp(nsecs_t timestamp,
-                                                       native_handle_t* handle) = 0;
-
-    virtual void recordingFrameHandleCallbackTimestampBatch(
-            const std::vector<nsecs_t>& timestamps,
-            const std::vector<native_handle_t*>& handles) = 0;
-};
-
-// ----------------------------------------------------------------------------
-
-class BnCameraRecordingProxyListener: public BnInterface<ICameraRecordingProxyListener>
-{
-public:
-    virtual status_t    onTransact( uint32_t code,
-                                    const Parcel& data,
-                                    Parcel* reply,
-                                    uint32_t flags = 0);
-};
-
-}; // namespace android
-
-#endif
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index bfa60d9..631f6cd 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -527,6 +527,7 @@
         case ACAMERA_LENS_OPTICAL_STABILIZATION_MODE:
         case ACAMERA_NOISE_REDUCTION_MODE:
         case ACAMERA_SCALER_CROP_REGION:
+        case ACAMERA_SCALER_ROTATE_AND_CROP:
         case ACAMERA_SENSOR_EXPOSURE_TIME:
         case ACAMERA_SENSOR_FRAME_DURATION:
         case ACAMERA_SENSOR_SENSITIVITY:
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 7d78571..96dc541 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -3730,6 +3730,108 @@
     ACAMERA_SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP = 
                                                                 // int32
             ACAMERA_SCALER_START + 15,
+    /**
+     * <p>List of rotate-and-crop modes for ACAMERA_SCALER_ROTATE_AND_CROP that are supported by this camera device.</p>
+     *
+     * @see ACAMERA_SCALER_ROTATE_AND_CROP
+     *
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>This entry lists the valid modes for ACAMERA_SCALER_ROTATE_AND_CROP for this camera device.</p>
+     * <p>Starting with API level 30, all devices will list at least <code>ROTATE_AND_CROP_NONE</code>.
+     * Devices with support for rotate-and-crop will additionally list at least
+     * <code>ROTATE_AND_CROP_AUTO</code> and <code>ROTATE_AND_CROP_90</code>.</p>
+     *
+     * @see ACAMERA_SCALER_ROTATE_AND_CROP
+     */
+    ACAMERA_SCALER_AVAILABLE_ROTATE_AND_CROP_MODES =            // byte[n]
+            ACAMERA_SCALER_START + 16,
+    /**
+     * <p>Whether a rotation-and-crop operation is applied to processed
+     * outputs from the camera.</p>
+     *
+     * <p>Type: byte (acamera_metadata_enum_android_scaler_rotate_and_crop_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+     *   <li>ACaptureRequest</li>
+     * </ul></p>
+     *
+     * <p>This control is primarily intended to help camera applications with no support for
+     * multi-window modes to work correctly on devices where multi-window scenarios are
+     * unavoidable, such as foldables or other devices with variable display geometry or more
+     * free-form window placement (such as laptops, which often place portrait-orientation apps
+     * in landscape with pillarboxing).</p>
+     * <p>If supported, the default value is <code>ROTATE_AND_CROP_AUTO</code>, which allows the camera API
+     * to enable backwards-compatibility support for applications that do not support resizing
+     * / multi-window modes, when the device is in fact in a multi-window mode (such as inset
+     * portrait on laptops, or on a foldable device in some fold states).  In addition,
+     * <code>ROTATE_AND_CROP_NONE</code> and <code>ROTATE_AND_CROP_90</code> will always be available if this control
+     * is supported by the device.  If not supported, devices API level 30 or higher will always
+     * list only <code>ROTATE_AND_CROP_NONE</code>.</p>
+     * <p>When <code>CROP_AUTO</code> is in use, and the camera API activates backward-compatibility mode,
+     * several metadata fields will also be parsed differently to ensure that coordinates are
+     * correctly handled for features like drawing face detection boxes or passing in
+     * tap-to-focus coordinates.  The camera API will convert positions in the active array
+     * coordinate system to/from the cropped-and-rotated coordinate system to make the
+     * operation transparent for applications.  The following controls are affected:</p>
+     * <ul>
+     * <li>ACAMERA_CONTROL_AE_REGIONS</li>
+     * <li>ACAMERA_CONTROL_AF_REGIONS</li>
+     * <li>ACAMERA_CONTROL_AWB_REGIONS</li>
+     * <li>android.statistics.faces</li>
+     * </ul>
+     * <p>Capture results will contain the actual value selected by the API;
+     * <code>ROTATE_AND_CROP_AUTO</code> will never be seen in a capture result.</p>
+     * <p>Applications can also select their preferred cropping mode, either to opt out of the
+     * backwards-compatibility treatment, or to use the cropping feature themselves as needed.
+     * In this case, no coordinate translation will be done automatically, and all controls
+     * will continue to use the normal active array coordinates.</p>
+     * <p>Cropping and rotating is done after the application of digital zoom (via either
+     * ACAMERA_SCALER_CROP_REGION or ACAMERA_CONTROL_ZOOM_RATIO), but before each individual
+     * output is further cropped and scaled. It only affects processed outputs such as
+     * YUV, PRIVATE, and JPEG.  It has no effect on RAW outputs.</p>
+     * <p>When <code>CROP_90</code> or <code>CROP_270</code> are selected, there is a significant loss to the field of
+     * view. For example, with a 4:3 aspect ratio output of 1600x1200, <code>CROP_90</code> will still
+     * produce 1600x1200 output, but these buffers are cropped from a vertical 3:4 slice at the
+     * center of the 4:3 area, then rotated to be 4:3, and then upscaled to 1600x1200.  Only
+     * 56.25% of the original FOV is still visible.  In general, for an aspect ratio of <code>w:h</code>,
+     * the crop and rotate operation leaves <code>(h/w)^2</code> of the field of view visible. For 16:9,
+     * this is ~31.6%.</p>
+     * <p>As a visual example, the figure below shows the effect of <code>ROTATE_AND_CROP_90</code> on the
+     * outputs for the following parameters:</p>
+     * <ul>
+     * <li>Sensor active array: <code>2000x1500</code></li>
+     * <li>Crop region: top-left: <code>(500, 375)</code>, size: <code>(1000, 750)</code> (4:3 aspect ratio)</li>
+     * <li>Output streams: YUV <code>640x480</code> and YUV <code>1280x720</code></li>
+     * <li><code>ROTATE_AND_CROP_90</code></li>
+     * </ul>
+     * <p><img alt="Effect of ROTATE_AND_CROP_90" src="../images/camera2/metadata/android.scaler.rotateAndCrop/crop-region-rotate-90-43-ratio.png" /></p>
+     * <p>With these settings, the regions of the active array covered by the output streams are:</p>
+     * <ul>
+     * <li>640x480 stream crop: top-left: <code>(219, 375)</code>, size: <code>(562, 750)</code></li>
+     * <li>1280x720 stream crop: top-left: <code>(289, 375)</code>, size: <code>(422, 750)</code></li>
+     * </ul>
+     * <p>Since the buffers are rotated, the buffers as seen by the application are:</p>
+     * <ul>
+     * <li>640x480 stream: top-left: <code>(781, 375)</code> on active array, size: <code>(640, 480)</code>, downscaled 1.17x from sensor pixels</li>
+     * <li>1280x720 stream: top-left: <code>(711, 375)</code> on active array, size: <code>(1280, 720)</code>, upscaled 1.71x from sensor pixels</li>
+     * </ul>
+     *
+     * @see ACAMERA_CONTROL_AE_REGIONS
+     * @see ACAMERA_CONTROL_AF_REGIONS
+     * @see ACAMERA_CONTROL_AWB_REGIONS
+     * @see ACAMERA_CONTROL_ZOOM_RATIO
+     * @see ACAMERA_SCALER_CROP_REGION
+     */
+    ACAMERA_SCALER_ROTATE_AND_CROP =                            // byte (acamera_metadata_enum_android_scaler_rotate_and_crop_t)
+            ACAMERA_SCALER_START + 17,
     ACAMERA_SCALER_END,
 
     /**
@@ -8292,6 +8394,51 @@
 
 } acamera_metadata_enum_android_scaler_available_recommended_stream_configurations_t;
 
+// ACAMERA_SCALER_ROTATE_AND_CROP
+typedef enum acamera_metadata_enum_acamera_scaler_rotate_and_crop {
+    /**
+     * <p>No rotate and crop is applied. Processed outputs are in the sensor orientation.</p>
+     */
+    ACAMERA_SCALER_ROTATE_AND_CROP_NONE                              = 0,
+
+    /**
+     * <p>Processed images are rotated by 90 degrees clockwise, and then cropped
+     * to the original aspect ratio.</p>
+     */
+    ACAMERA_SCALER_ROTATE_AND_CROP_90                                = 1,
+
+    /**
+     * <p>Processed images are rotated by 180 degrees.  Since the aspect ratio does not
+     * change, no cropping is performed.</p>
+     */
+    ACAMERA_SCALER_ROTATE_AND_CROP_180                               = 2,
+
+    /**
+     * <p>Processed images are rotated by 270 degrees clockwise, and then cropped
+     * to the original aspect ratio.</p>
+     */
+    ACAMERA_SCALER_ROTATE_AND_CROP_270                               = 3,
+
+    /**
+     * <p>The camera API automatically selects the best concrete value for
+     * rotate-and-crop based on the application's support for resizability and the current
+     * multi-window mode.</p>
+     * <p>If the application does not support resizing but the display mode for its main
+     * Activity is not in a typical orientation, the camera API will set <code>ROTATE_AND_CROP_90</code>
+     * or some other supported rotation value, depending on device configuration,
+     * to ensure preview and captured images are correctly shown to the user. Otherwise,
+     * <code>ROTATE_AND_CROP_NONE</code> will be selected.</p>
+     * <p>When a value other than NONE is selected, several metadata fields will also be parsed
+     * differently to ensure that coordinates are correctly handled for features like drawing
+     * face detection boxes or passing in tap-to-focus coordinates.  The camera API will
+     * convert positions in the active array coordinate system to/from the cropped-and-rotated
+     * coordinate system to make the operation transparent for applications.</p>
+     * <p>No coordinate mapping will be done when the application selects a non-AUTO mode.</p>
+     */
+    ACAMERA_SCALER_ROTATE_AND_CROP_AUTO                              = 4,
+
+} acamera_metadata_enum_android_scaler_rotate_and_crop_t;
+
 
 // ACAMERA_SENSOR_REFERENCE_ILLUMINANT1
 typedef enum acamera_metadata_enum_acamera_sensor_reference_illuminant1 {
diff --git a/camera/ndk/include/camera/NdkCameraWindowType.h b/camera/ndk/include/camera/NdkCameraWindowType.h
index 99f67e9..df977da 100644
--- a/camera/ndk/include/camera/NdkCameraWindowType.h
+++ b/camera/ndk/include/camera/NdkCameraWindowType.h
@@ -44,7 +44,7 @@
  */
 #ifdef __ANDROID_VNDK__
 #include <cutils/native_handle.h>
-typedef native_handle_t ACameraWindowType;
+typedef const native_handle_t ACameraWindowType;
 #else
 #include <android/native_window.h>
 typedef ANativeWindow ACameraWindowType;
diff --git a/camera/ndk/ndk_vendor/impl/ACameraCaptureSessionVendor.h b/camera/ndk/ndk_vendor/impl/ACameraCaptureSessionVendor.h
index e1af8c1..5a1af79 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraCaptureSessionVendor.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraCaptureSessionVendor.h
@@ -18,7 +18,7 @@
 #include "utils.h"
 
 struct ACaptureSessionOutput {
-    explicit ACaptureSessionOutput(native_handle_t* window, bool isShared = false,
+    explicit ACaptureSessionOutput(const native_handle_t* window, bool isShared = false,
             const char* physicalCameraId = "") :
             mWindow(window), mIsShared(isShared), mPhysicalCameraId(physicalCameraId) {};
 
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
index e511a3f..0fcb700 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
@@ -355,7 +355,7 @@
     std::vector<int32_t> requestStreamIdxList;
     std::vector<int32_t> requestSurfaceIdxList;
     for (auto outputTarget : request->targets->mOutputs) {
-        native_handle_t* anw = outputTarget.mWindow;
+        const native_handle_t* anw = outputTarget.mWindow;
         bool found = false;
         req->mSurfaceList.push_back(anw);
         // lookup stream/surface ID
@@ -434,7 +434,7 @@
     }
     pRequest->targets = new ACameraOutputTargets();
     for (size_t i = 0; i < req->mSurfaceList.size(); i++) {
-        native_handle_t* anw = req->mSurfaceList[i];
+        const native_handle_t* anw = req->mSurfaceList[i];
         ACameraOutputTarget outputTarget(anw);
         pRequest->targets->mOutputs.insert(outputTarget);
     }
@@ -611,7 +611,7 @@
 
     std::set<std::pair<native_handle_ptr_wrapper, OutputConfigurationWrapper>> outputSet;
     for (auto outConfig : outputs->mOutputs) {
-        native_handle_t* anw = outConfig.mWindow;
+        const native_handle_t* anw = outConfig.mWindow;
         OutputConfigurationWrapper outConfigInsertW;
         OutputConfiguration &outConfigInsert = outConfigInsertW.mOutputConfiguration;
         outConfigInsert.rotation = utils::convertToHidl(outConfig.mRotation);
@@ -846,8 +846,7 @@
             for (auto streamAndWindowId : request->mCaptureRequest.streamAndWindowIds) {
                 int32_t windowId = streamAndWindowId.windowId;
                 if (utils::isWindowNativeHandleEqual(windowHandles[windowId],outHandle)) {
-                    native_handle_t* anw =
-                        const_cast<native_handle_t *>(windowHandles[windowId].getNativeHandle());
+                    const native_handle_t* anw = windowHandles[windowId].getNativeHandle();
                     ALOGV("Camera %s Lost output buffer for ANW %p frame %" PRId64,
                             getId(), anw, frameNumber);
 
@@ -1244,7 +1243,7 @@
                         return;
                     }
 
-                    native_handle_t* anw;
+                    const native_handle_t* anw;
                     found = msg->findPointer(kAnwKey, (void**) &anw);
                     if (!found) {
                         ALOGE("%s: Cannot find native_handle_t!", __FUNCTION__);
diff --git a/camera/ndk/ndk_vendor/impl/ACaptureRequestVendor.h b/camera/ndk/ndk_vendor/impl/ACaptureRequestVendor.h
index ed67615..5715d77 100644
--- a/camera/ndk/ndk_vendor/impl/ACaptureRequestVendor.h
+++ b/camera/ndk/ndk_vendor/impl/ACaptureRequestVendor.h
@@ -17,7 +17,7 @@
 #include "utils.h"
 
 struct ACameraOutputTarget {
-    explicit ACameraOutputTarget(native_handle_t* window) : mWindow(window) {};
+    explicit ACameraOutputTarget(const native_handle_t* window) : mWindow(window) {};
 
     bool operator == (const ACameraOutputTarget& other) const {
         return mWindow == other.mWindow;
diff --git a/camera/ndk/ndk_vendor/impl/utils.h b/camera/ndk/ndk_vendor/impl/utils.h
index f389f03..6f5820e 100644
--- a/camera/ndk/ndk_vendor/impl/utils.h
+++ b/camera/ndk/ndk_vendor/impl/utils.h
@@ -42,7 +42,7 @@
 // Utility class so that CaptureRequest can be stored by sp<>
 struct CaptureRequest : public RefBase {
   frameworks::cameraservice::device::V2_0::CaptureRequest mCaptureRequest;
-  std::vector<native_handle_t *> mSurfaceList;
+  std::vector<const native_handle_t *> mSurfaceList;
   //Physical camera settings metadata is stored here, since the capture request
   //might not contain it. That's since, fmq might have consumed it.
   hidl_vec<PhysicalCameraSettings> mPhysicalCameraSettings;
@@ -62,13 +62,13 @@
 // Utility class so the native_handle_t can be compared with  its contents instead
 // of just raw pointer comparisons.
 struct native_handle_ptr_wrapper {
-    native_handle_t *mWindow = nullptr;
+    const native_handle_t *mWindow = nullptr;
 
-    native_handle_ptr_wrapper(native_handle_t *nh) : mWindow(nh) { }
+    native_handle_ptr_wrapper(const native_handle_t *nh) : mWindow(nh) { }
 
     native_handle_ptr_wrapper() = default;
 
-    operator native_handle_t *() const { return mWindow; }
+    operator const native_handle_t *() const { return mWindow; }
 
     bool operator ==(const native_handle_ptr_wrapper other) const {
         return isWindowNativeHandleEqual(mWindow, other.mWindow);
diff --git a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
index 938b5f5..ba14c5c 100644
--- a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
+++ b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
@@ -50,7 +50,7 @@
 static constexpr int kTestImageFormat = AIMAGE_FORMAT_YUV_420_888;
 
 using android::hardware::camera::common::V1_0::helper::VendorTagDescriptorCache;
-using ConfiguredWindows = std::set<native_handle_t *>;
+using ConfiguredWindows = std::set<const native_handle_t *>;
 
 class CameraHelper {
    public:
@@ -60,11 +60,11 @@
 
     struct PhysicalImgReaderInfo {
         const char* physicalCameraId;
-        native_handle_t* anw;
+        const native_handle_t* anw;
     };
 
     // Retaining the error code in case the caller needs to analyze it.
-    std::variant<int, ConfiguredWindows> initCamera(native_handle_t* imgReaderAnw,
+    std::variant<int, ConfiguredWindows> initCamera(const native_handle_t* imgReaderAnw,
             const std::vector<PhysicalImgReaderInfo>& physicalImgReaders,
             bool usePhysicalSettings) {
         ConfiguredWindows configuredWindows;
@@ -257,7 +257,7 @@
     ACameraDevice_StateCallbacks mDeviceCb{this, nullptr, nullptr};
     ACameraCaptureSession_stateCallbacks mSessionCb{ this, nullptr, nullptr, nullptr};
 
-    native_handle_t* mImgReaderAnw = nullptr;  // not owned by us.
+    const native_handle_t* mImgReaderAnw = nullptr;  // not owned by us.
 
     // Camera device
     ACameraDevice* mDevice = nullptr;
@@ -396,7 +396,7 @@
         return 0;
     }
 
-    native_handle_t* getNativeWindow() { return mImgReaderAnw; }
+    const native_handle_t* getNativeWindow() { return mImgReaderAnw; }
 
     int getAcquiredImageCount() {
         std::lock_guard<std::mutex> lock(mMutex);
diff --git a/cmds/stagefright/record.cpp b/cmds/stagefright/record.cpp
index 37091c4..098c278 100644
--- a/cmds/stagefright/record.cpp
+++ b/cmds/stagefright/record.cpp
@@ -259,31 +259,6 @@
     printf("$\n");
 #endif
 
-#if 0
-    CameraSource *source = CameraSource::Create(
-            String16(argv[0], strlen(argv[0])));
-    source->start();
-
-    printf("source = %p\n", source);
-
-    for (int i = 0; i < 100; ++i) {
-        MediaBuffer *buffer;
-        status_t err = source->read(&buffer);
-        CHECK_EQ(err, (status_t)OK);
-
-        printf("got a frame, data=%p, size=%d\n",
-               buffer->data(), buffer->range_length());
-
-        buffer->release();
-        buffer = NULL;
-    }
-
-    err = source->stop();
-
-    delete source;
-    source = NULL;
-#endif
-
     if (err != OK && err != ERROR_END_OF_STREAM) {
         fprintf(stderr, "record failed: %d\n", err);
         return 1;
diff --git a/media/TEST_MAPPING b/media/TEST_MAPPING
index a6dfb21..b006f38 100644
--- a/media/TEST_MAPPING
+++ b/media/TEST_MAPPING
@@ -1,32 +1,71 @@
+// for frameworks/av/media
 {
-  "presubmit": [
-    {
-      "name": "GtsMediaTestCases",
-      "options" : [
+    "presubmit": [
+        // runs whenever we change something in this tree
         {
-          "include-annotation": "android.platform.test.annotations.Presubmit"
+            "name": "CtsMediaTestCases",
+            "options": [
+                {
+                    "include-filter": "android.media.cts.EncodeDecodeTest"
+                }
+            ]
         },
         {
-          "include-filter": "com.google.android.media.gts.WidevineGenericOpsTests"
-        }
-      ]
-    },
-    {
-      "name": "GtsExoPlayerTestCases",
-      "options" : [
-        {
-          "include-annotation": "android.platform.test.annotations.SocPresubmit"
+            "name": "CtsMediaTestCases",
+            "options": [
+                {
+                    "include-filter": "android.media.cts.DecodeEditEncodeTest"
+                }
+            ]
         },
         {
-          "include-filter": "com.google.android.exoplayer.gts.DashTest#testWidevine23FpsH264Fixed"
+            "name": "GtsMediaTestCases",
+            "options" : [
+                {
+                    "include-annotation": "android.platform.test.annotations.Presubmit"
+                },
+                {
+                    "include-filter": "com.google.android.media.gts.WidevineGenericOpsTests"
+                }
+            ]
+        },
+        {
+            "name": "GtsExoPlayerTestCases",
+            "options" : [
+                {
+                    "include-annotation": "android.platform.test.annotations.SocPresubmit"
+                },
+                {
+                    "include-filter": "com.google.android.exoplayer.gts.DashTest#testWidevine23FpsH264Fixed"
+                }
+            ]
         }
-      ]
-    }
-  ],
-  "imports": [
-    {
-      "path": "frameworks/av/drm/mediadrm/plugins"
-    }
-  ]
-}
+    ],
 
+    "imports": [
+        {
+            "path": "frameworks/av/drm/mediadrm/plugins"
+        }
+    ],
+
+    "platinum-postsubmit": [
+        // runs regularly, independent of changes in this tree.
+        // signals if changes elsewhere break media functionality
+        {
+            "name": "CtsMediaTestCases",
+            "options": [
+                {
+                    "include-filter": "android.media.cts.EncodeDecodeTest"
+                }
+            ]
+        },
+        {
+            "name": "CtsMediaTestCases",
+            "options": [
+                {
+                    "include-filter": "android.media.cts.DecodeEditEncodeTest"
+                }
+            ]
+        }
+    ]
+}
diff --git a/media/audioserver/Android.bp b/media/audioserver/Android.bp
new file mode 100644
index 0000000..ca3c81c
--- /dev/null
+++ b/media/audioserver/Android.bp
@@ -0,0 +1,58 @@
+cc_binary {
+    name: "audioserver",
+
+    srcs: [
+        "main_audioserver.cpp",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+
+    header_libs: [
+        "libaudiohal_headers",
+        "libmediametrics_headers",
+    ],
+
+    shared_libs: [
+        "libaaudioservice",
+        "libaudioflinger",
+        "libaudiopolicyservice",
+        "libaudioprocessing",
+        "libbinder",
+        "libcutils",
+        "libhidlbase",
+        "liblog",
+        "libmedia",
+        "libmedialogservice",
+        "libmediautils",
+        "libnbaio",
+        "libnblog",
+        "libpowermanager",
+        "libutils",
+        "libvibrator",
+
+    ],
+
+    // TODO check if we still need all of these include directories
+    include_dirs: [
+        "external/sonic",
+        "frameworks/av/media/libaaudio/include",
+        "frameworks/av/media/libaaudio/src",
+        "frameworks/av/media/libaaudio/src/binding",
+        "frameworks/av/media/libmedia/include",
+        "frameworks/av/services/audioflinger",
+        "frameworks/av/services/audiopolicy",
+        "frameworks/av/services/audiopolicy/common/include",
+        "frameworks/av/services/audiopolicy/common/managerdefinitions/include",
+        "frameworks/av/services/audiopolicy/engine/interface",
+        "frameworks/av/services/audiopolicy/service",
+        "frameworks/av/services/medialog",
+
+        // TODO oboeservice is the old folder name for aaudioservice. It will be changed.
+        "frameworks/av/services/oboeservice",
+    ],
+
+    init_rc: ["audioserver.rc"],
+}
diff --git a/media/audioserver/Android.mk b/media/audioserver/Android.mk
deleted file mode 100644
index cf1c14c..0000000
--- a/media/audioserver/Android.mk
+++ /dev/null
@@ -1,51 +0,0 @@
-LOCAL_PATH:= $(call my-dir)
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES := \
-	main_audioserver.cpp \
-
-LOCAL_SHARED_LIBRARIES := \
-	libaaudioservice \
-	libaudioflinger \
-	libaudiopolicyservice \
-	libaudioprocessing \
-	libbinder \
-	libcutils \
-	liblog \
-	libhidlbase \
-	libmedia \
-	libmedialogservice \
-	libmediautils \
-	libnbaio \
-	libnblog \
-	libutils \
-	libvibrator
-
-LOCAL_HEADER_LIBRARIES := \
-	libaudiohal_headers \
-	libmediametrics_headers \
-
-# TODO oboeservice is the old folder name for aaudioservice. It will be changed.
-LOCAL_C_INCLUDES := \
-	frameworks/av/services/audioflinger \
-	frameworks/av/services/audiopolicy \
-	frameworks/av/services/audiopolicy/common/managerdefinitions/include \
-	frameworks/av/services/audiopolicy/common/include \
-	frameworks/av/services/audiopolicy/engine/interface \
-	frameworks/av/services/audiopolicy/service \
-	frameworks/av/services/medialog \
-	frameworks/av/services/oboeservice \
-	frameworks/av/media/libaaudio/include \
-	frameworks/av/media/libaaudio/src \
-	frameworks/av/media/libaaudio/src/binding \
-	frameworks/av/media/libmedia/include \
-	external/sonic \
-
-LOCAL_MODULE := audioserver
-
-LOCAL_INIT_RC := audioserver.rc
-
-LOCAL_CFLAGS := -Werror -Wall
-
-include $(BUILD_EXECUTABLE)
diff --git a/media/audioserver/audioserver.rc b/media/audioserver/audioserver.rc
index f75e4c7..c4a6601 100644
--- a/media/audioserver/audioserver.rc
+++ b/media/audioserver/audioserver.rc
@@ -8,6 +8,7 @@
     task_profiles ProcessCapacityHigh HighPerformance
     onrestart restart vendor.audio-hal
     onrestart restart vendor.audio-hal-4-0-msd
+    onrestart restart audio_proxy_service
     # Keep the original service names for backward compatibility
     onrestart restart vendor.audio-hal-2-0
     onrestart restart audio-hal-2-0
@@ -20,6 +21,7 @@
 on property:init.svc.audioserver=stopped
     stop vendor.audio-hal
     stop vendor.audio-hal-4-0-msd
+    stop audio_proxy_service
     # Keep the original service names for backward compatibility
     stop vendor.audio-hal-2-0
     stop audio-hal-2-0
@@ -28,6 +30,7 @@
     # audioserver bringing it back into running state.
     start vendor.audio-hal
     start vendor.audio-hal-4-0-msd
+    start audio_proxy_service
     # Keep the original service names for backward compatibility
     start vendor.audio-hal-2-0
     start audio-hal-2-0
@@ -35,6 +38,7 @@
 on property:init.svc.audioserver=running
     start vendor.audio-hal
     start vendor.audio-hal-4-0-msd
+    start audio_proxy_service
     # Keep the original service names for backward compatibility
     start vendor.audio-hal-2-0
     start audio-hal-2-0
@@ -44,10 +48,12 @@
     # Keep the original service names for backward compatibility
     stop vendor.audio-hal
     stop vendor.audio-hal-4-0-msd
+    stop audio_proxy_service
     stop vendor.audio-hal-2-0
     stop audio-hal-2-0
     start vendor.audio-hal
     start vendor.audio-hal-4-0-msd
+    start audio_proxy_service
     start vendor.audio-hal-2-0
     start audio-hal-2-0
     # reset the property
diff --git a/media/audioserver/main_audioserver.cpp b/media/audioserver/main_audioserver.cpp
index f9f4f31..533d330 100644
--- a/media/audioserver/main_audioserver.cpp
+++ b/media/audioserver/main_audioserver.cpp
@@ -29,8 +29,8 @@
 #include <mediautils/LimitProcessMemory.h>
 #include <utils/Log.h>
 
-// from LOCAL_C_INCLUDES
-#include "aaudio/AAudioTesting.h"
+// from include_dirs
+#include "aaudio/AAudioTesting.h" // aaudio_policy_t, AAUDIO_PROP_MMAP_POLICY, AAUDIO_POLICY_*
 #include "AudioFlinger.h"
 #include "AudioPolicyService.h"
 #include "AAudioService.h"
diff --git a/media/bufferpool/1.0/TEST_MAPPING b/media/bufferpool/1.0/TEST_MAPPING
new file mode 100644
index 0000000..a1e6a58
--- /dev/null
+++ b/media/bufferpool/1.0/TEST_MAPPING
@@ -0,0 +1,8 @@
+// mappings for frameworks/av/media/bufferpool/1.0
+{
+  "presubmit": [
+
+    { "name": "VtsVndkHidlBufferpoolV1_0TargetSingleTest" },
+    { "name": "VtsVndkHidlBufferpoolV1_0TargetMultiTest"}
+  ]
+}
diff --git a/media/bufferpool/1.0/vts/Android.bp b/media/bufferpool/1.0/vts/Android.bp
index ee5a757..691ed40 100644
--- a/media/bufferpool/1.0/vts/Android.bp
+++ b/media/bufferpool/1.0/vts/Android.bp
@@ -16,6 +16,7 @@
 
 cc_test {
     name: "VtsVndkHidlBufferpoolV1_0TargetSingleTest",
+    test_suites: ["device-tests"],
     defaults: ["VtsHalTargetTestDefaults"],
     srcs: [
         "allocator.cpp",
@@ -34,6 +35,7 @@
 
 cc_test {
     name: "VtsVndkHidlBufferpoolV1_0TargetMultiTest",
+    test_suites: ["device-tests"],
     defaults: ["VtsHalTargetTestDefaults"],
     srcs: [
         "allocator.cpp",
diff --git a/media/bufferpool/2.0/TEST_MAPPING b/media/bufferpool/2.0/TEST_MAPPING
new file mode 100644
index 0000000..65dee2c
--- /dev/null
+++ b/media/bufferpool/2.0/TEST_MAPPING
@@ -0,0 +1,7 @@
+// mappings for frameworks/av/media/bufferpool/2.0
+{
+  "presubmit": [
+    { "name": "VtsVndkHidlBufferpoolV2_0TargetSingleTest"},
+    { "name": "VtsVndkHidlBufferpoolV2_0TargetMultiTest"}
+  ]
+}
diff --git a/media/bufferpool/2.0/tests/Android.bp b/media/bufferpool/2.0/tests/Android.bp
index 8b44f61..8492939 100644
--- a/media/bufferpool/2.0/tests/Android.bp
+++ b/media/bufferpool/2.0/tests/Android.bp
@@ -16,6 +16,7 @@
 
 cc_test {
     name: "VtsVndkHidlBufferpoolV2_0TargetSingleTest",
+    test_suites: ["device-tests"],
     defaults: ["VtsHalTargetTestDefaults"],
     srcs: [
         "allocator.cpp",
@@ -34,6 +35,7 @@
 
 cc_test {
     name: "VtsVndkHidlBufferpoolV2_0TargetMultiTest",
+    test_suites: ["device-tests"],
     defaults: ["VtsHalTargetTestDefaults"],
     srcs: [
         "allocator.cpp",
diff --git a/media/codec2/TEST_MAPPING b/media/codec2/TEST_MAPPING
index 8afa1a8..fca3477 100644
--- a/media/codec2/TEST_MAPPING
+++ b/media/codec2/TEST_MAPPING
@@ -1,5 +1,10 @@
 {
   "presubmit": [
+    // TODO failing 4 of 13
+    // { "name": "codec2_core_param_test"},
+    // TODO(b/155516524)
+    // { "name": "codec2_vndk_interface_test"},
+    { "name": "codec2_vndk_test"},
     {
       "name": "CtsMediaTestCases",
       "options": [
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 0626c8d..369087d 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -1718,7 +1718,13 @@
         }
     }
 
-    if (notifyClient && !buffer && !flags) {
+    bool drop = false;
+    if (worklet->output.flags & C2FrameData::FLAG_DROP_FRAME) {
+        ALOGV("[%s] onWorkDone: drop buffer but keep metadata", mName);
+        drop = true;
+    }
+
+    if (notifyClient && !buffer && !flags && !(drop && outputFormat)) {
         ALOGV("[%s] onWorkDone: Not reporting output buffer (%lld)",
               mName, work->input.ordinal.frameIndex.peekull());
         notifyClient = false;
@@ -1745,7 +1751,7 @@
             return false;
         }
         output->buffers->pushToStash(
-                buffer,
+                drop ? nullptr : buffer,
                 notifyClient,
                 timestamp.peek(),
                 flags,
diff --git a/media/codec2/sfplugin/TEST_MAPPING b/media/codec2/sfplugin/TEST_MAPPING
new file mode 100644
index 0000000..045e5b5
--- /dev/null
+++ b/media/codec2/sfplugin/TEST_MAPPING
@@ -0,0 +1,12 @@
+// mappings for frameworks/av/media/codec2/sfplugin
+{
+  "presubmit": [
+    // failing 1 of 11
+    // TODO(b/156167471)
+    // { "name": "ccodec_unit_test" },
+
+    // failing 4 of 17, around max-input-size defaults & overrides
+    // TODO(b/156167471)
+    //{ "name": "mc_sanity_test"}
+  ]
+}
diff --git a/media/codec2/sfplugin/tests/Android.bp b/media/codec2/sfplugin/tests/Android.bp
index 8d1a9c3..5c774a2 100644
--- a/media/codec2/sfplugin/tests/Android.bp
+++ b/media/codec2/sfplugin/tests/Android.bp
@@ -1,5 +1,6 @@
 cc_test {
     name: "ccodec_unit_test",
+    test_suites: ["device-tests"],
 
     srcs: [
         "CCodecBuffers_test.cpp",
@@ -43,6 +44,7 @@
 
 cc_test {
     name: "mc_sanity_test",
+    test_suites: ["device-tests"],
 
     srcs: [
         "MediaCodec_sanity_test.cpp",
diff --git a/media/codec2/tests/Android.bp b/media/codec2/tests/Android.bp
index fce6e21..c9169a9 100644
--- a/media/codec2/tests/Android.bp
+++ b/media/codec2/tests/Android.bp
@@ -1,5 +1,6 @@
 cc_test {
     name: "codec2_core_param_test",
+    test_suites: ["device-tests"],
 
     srcs: [
         "C2Param_test.cpp",
@@ -28,6 +29,7 @@
 
 cc_test {
     name: "codec2_vndk_test",
+    test_suites: ["device-tests"],
 
     srcs: [
         "C2_test.cpp",
diff --git a/media/extractors/TEST_MAPPING b/media/extractors/TEST_MAPPING
index abefb0f..038b99a 100644
--- a/media/extractors/TEST_MAPPING
+++ b/media/extractors/TEST_MAPPING
@@ -1,5 +1,8 @@
 {
   "presubmit": [
+    // TODO(b/148094059): unit tests not allowed to download content
+    // { "name": "ExtractorUnitTest" },
+
     // TODO(b/153661591) enable test once the bug is fixed
     // This tests the extractor path
     // {
diff --git a/media/extractors/fuzzers/Android.bp b/media/extractors/fuzzers/Android.bp
index 31d6f83..e900e57 100644
--- a/media/extractors/fuzzers/Android.bp
+++ b/media/extractors/fuzzers/Android.bp
@@ -310,3 +310,27 @@
 
     dictionary: "flac_extractor_fuzzer.dict",
 }
+
+cc_fuzz {
+    name: "midi_extractor_fuzzer",
+    defaults: ["extractor-fuzzer-defaults"],
+
+    srcs: [
+        "midi_extractor_fuzzer.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/extractors/midi",
+    ],
+
+    static_libs: [
+        "libsonivox",
+        "libmedia_midiiowrapper",
+        "libmidiextractor",
+        "libwatchdog",
+    ],
+
+    dictionary: "midi_extractor_fuzzer.dict",
+
+    host_supported: true,
+}
diff --git a/media/extractors/fuzzers/README.md b/media/extractors/fuzzers/README.md
index 4223b5e..fb1d52f 100644
--- a/media/extractors/fuzzers/README.md
+++ b/media/extractors/fuzzers/README.md
@@ -11,6 +11,7 @@
 + [libmp3extractor](#mp3ExtractorFuzzer)
 + [libaacextractor](#aacExtractorFuzzer)
 + [libflacextractor](#flacExtractor)
++ [libmidiextractor](#midiExtractorFuzzer)
 
 # <a name="ExtractorFuzzerBase"></a> Fuzzer for libextractorfuzzerbase
 All the extractors have a common API - creating a data source, extraction
@@ -321,6 +322,41 @@
   $ adb shell /data/fuzz/arm64/flac_extractor_fuzzer/flac_extractor_fuzzer CORPUS_DIR
 ```
 
+# <a name="midiExtractorFuzzer"></a> Fuzzer for libmidiextractor
+
+## Plugin Design Considerations
+The fuzzer plugin for MIDI extractor uses the `ExtractorFuzzerBase` class and
+implements only the `createExtractor` to create the MIDI extractor class.
+
+##### Maximize code coverage
+Dict file (dictionary file) is created for MIDI to ensure that the required MIDI
+headers are present in every input file that goes to the fuzzer.
+This ensures that larger code gets covered as a range of MIDI headers will be
+present in the input data.
+
+
+## Build
+
+This describes steps to build midi_extractor_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) midi_extractor_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some MIDI files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/midi_extractor_fuzzer/midi_extractor_fuzzer CORPUS_DIR
+```
+
 ## References:
  * http://llvm.org/docs/LibFuzzer.html
  * https://github.com/google/oss-fuzz
diff --git a/media/extractors/fuzzers/midi_extractor_fuzzer.cpp b/media/extractors/fuzzers/midi_extractor_fuzzer.cpp
new file mode 100644
index 0000000..e02a12b
--- /dev/null
+++ b/media/extractors/fuzzers/midi_extractor_fuzzer.cpp
@@ -0,0 +1,54 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include "ExtractorFuzzerBase.h"
+
+#include "MidiExtractor.h"
+
+using namespace android;
+
+class MIDIExtractor : public ExtractorFuzzerBase {
+ public:
+  MIDIExtractor() = default;
+  ~MIDIExtractor() = default;
+
+  bool createExtractor();
+};
+
+bool MIDIExtractor::createExtractor() {
+  mExtractor = new MidiExtractor(mDataSource->wrap());
+  if (!mExtractor) {
+    return false;
+  }
+  mExtractor->name();
+  return true;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+  if ((!data) || (size == 0)) {
+    return 0;
+  }
+  MIDIExtractor* extractor = new MIDIExtractor();
+  if (extractor) {
+    extractor->processData(data, size);
+    delete extractor;
+  }
+  return 0;
+}
diff --git a/media/extractors/fuzzers/midi_extractor_fuzzer.dict b/media/extractors/fuzzers/midi_extractor_fuzzer.dict
new file mode 100644
index 0000000..5b6bb8b
--- /dev/null
+++ b/media/extractors/fuzzers/midi_extractor_fuzzer.dict
@@ -0,0 +1,3 @@
+# MIDI Chunks
+kw1="MThd"
+kw2="MTrk"
diff --git a/media/extractors/midi/Android.bp b/media/extractors/midi/Android.bp
index b8255fc..1c69bb8 100644
--- a/media/extractors/midi/Android.bp
+++ b/media/extractors/midi/Android.bp
@@ -5,7 +5,7 @@
     srcs: ["MidiExtractor.cpp"],
 
     header_libs: [
-        "libmedia_headers",
+        "libmedia_datasource_headers",
     ],
 
     static_libs: [
@@ -18,4 +18,12 @@
     shared_libs: [
         "libbase",
     ],
+
+    host_supported: true,
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
index fd8f44e..019a99a 100644
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -2880,6 +2880,21 @@
             break;
         }
 
+        case FOURCC("pasp"):
+        {
+            *offset += chunk_size;
+            // this must be in a VisualSampleEntry box under the Sample Description Box ('stsd')
+            // ignore otherwise
+            if (depth >= 2 && mPath[depth - 2] == FOURCC("stsd")) {
+                status_t err = parsePaspBox(data_offset, chunk_data_size);
+                if (err != OK) {
+                    return err;
+                }
+            }
+
+            break;
+        }
+
         case FOURCC("titl"):
         case FOURCC("perf"):
         case FOURCC("auth"):
@@ -4052,6 +4067,26 @@
     return OK;
 }
 
+status_t MPEG4Extractor::parsePaspBox(off64_t offset, size_t size) {
+    if (size < 8 || size == SIZE_MAX || mLastTrack == NULL) {
+        return ERROR_MALFORMED;
+    }
+
+    uint32_t data[2]; // hSpacing, vSpacing
+    if (mDataSource->readAt(offset, data, 8) < 8) {
+        return ERROR_IO;
+    }
+    uint32_t hSpacing = ntohl(data[0]);
+    uint32_t vSpacing = ntohl(data[1]);
+
+    if (hSpacing != 0 && vSpacing != 0) {
+        AMediaFormat_setInt32(mLastTrack->meta, AMEDIAFORMAT_KEY_SAR_WIDTH, hSpacing);
+        AMediaFormat_setInt32(mLastTrack->meta, AMEDIAFORMAT_KEY_SAR_HEIGHT, vSpacing);
+    }
+
+    return OK;
+}
+
 status_t MPEG4Extractor::parse3GPPMetaData(off64_t offset, size_t size, int depth) {
     if (size < 4 || size == SIZE_MAX) {
         return ERROR_MALFORMED;
diff --git a/media/extractors/mp4/MPEG4Extractor.h b/media/extractors/mp4/MPEG4Extractor.h
index 1e49d50..bafc7f5 100644
--- a/media/extractors/mp4/MPEG4Extractor.h
+++ b/media/extractors/mp4/MPEG4Extractor.h
@@ -160,6 +160,7 @@
     status_t parseChunk(off64_t *offset, int depth);
     status_t parseITunesMetaData(off64_t offset, size_t size);
     status_t parseColorInfo(off64_t offset, size_t size);
+    status_t parsePaspBox(off64_t offset, size_t size);
     status_t parse3GPPMetaData(off64_t offset, size_t size, int depth);
     void parseID3v2MetaData(off64_t offset, uint64_t size);
     status_t parseQTMetaKey(off64_t data_offset, size_t data_size);
diff --git a/media/extractors/tests/Android.bp b/media/extractors/tests/Android.bp
index b3afe2f..0bca6f5 100644
--- a/media/extractors/tests/Android.bp
+++ b/media/extractors/tests/Android.bp
@@ -17,6 +17,7 @@
 cc_test {
     name: "ExtractorUnitTest",
     gtest: true,
+    test_suites: ["device-tests"],
 
     srcs: ["ExtractorUnitTest.cpp"],
 
diff --git a/media/libaaudio/Doxyfile.orig b/media/libaaudio/Doxyfile.orig
deleted file mode 100644
index 137facb..0000000
--- a/media/libaaudio/Doxyfile.orig
+++ /dev/null
@@ -1,2303 +0,0 @@
-# Doxyfile 1.8.6
-
-# This file describes the settings to be used by the documentation system
-# doxygen (www.doxygen.org) for a project.
-#
-# All text after a double hash (##) is considered a comment and is placed in
-# front of the TAG it is preceding.
-#
-# All text after a single hash (#) is considered a comment and will be ignored.
-# The format is:
-# TAG = value [value, ...]
-# For lists, items can also be appended using:
-# TAG += value [value, ...]
-# Values that contain spaces should be placed between quotes (\" \").
-
-#---------------------------------------------------------------------------
-# Project related configuration options
-#---------------------------------------------------------------------------
-
-# This tag specifies the encoding used for all characters in the config file
-# that follow. The default is UTF-8 which is also the encoding used for all text
-# before the first occurrence of this tag. Doxygen uses libiconv (or the iconv
-# built into libc) for the transcoding. See http://www.gnu.org/software/libiconv
-# for the list of possible encodings.
-# The default value is: UTF-8.
-
-DOXYFILE_ENCODING      = UTF-8
-
-# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by
-# double-quotes, unless you are using Doxywizard) that should identify the
-# project for which the documentation is generated. This name is used in the
-# title of most generated pages and in a few other places.
-# The default value is: My Project.
-
-PROJECT_NAME           = "My Project"
-
-# The PROJECT_NUMBER tag can be used to enter a project or revision number. This
-# could be handy for archiving the generated documentation or if some version
-# control system is used.
-
-PROJECT_NUMBER         =
-
-# Using the PROJECT_BRIEF tag one can provide an optional one line description
-# for a project that appears at the top of each page and should give viewer a
-# quick idea about the purpose of the project. Keep the description short.
-
-PROJECT_BRIEF          =
-
-# With the PROJECT_LOGO tag one can specify an logo or icon that is included in
-# the documentation. The maximum height of the logo should not exceed 55 pixels
-# and the maximum width should not exceed 200 pixels. Doxygen will copy the logo
-# to the output directory.
-
-PROJECT_LOGO           =
-
-# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path
-# into which the generated documentation will be written. If a relative path is
-# entered, it will be relative to the location where doxygen was started. If
-# left blank the current directory will be used.
-
-OUTPUT_DIRECTORY       =
-
-# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create 4096 sub-
-# directories (in 2 levels) under the output directory of each output format and
-# will distribute the generated files over these directories. Enabling this
-# option can be useful when feeding doxygen a huge amount of source files, where
-# putting all generated files in the same directory would otherwise causes
-# performance problems for the file system.
-# The default value is: NO.
-
-CREATE_SUBDIRS         = NO
-
-# The OUTPUT_LANGUAGE tag is used to specify the language in which all
-# documentation generated by doxygen is written. Doxygen will use this
-# information to generate all constant output in the proper language.
-# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese,
-# Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States),
-# Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian,
-# Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages),
-# Korean, Korean-en (Korean with English messages), Latvian, Lithuanian,
-# Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian,
-# Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish,
-# Ukrainian and Vietnamese.
-# The default value is: English.
-
-OUTPUT_LANGUAGE        = English
-
-# If the BRIEF_MEMBER_DESC tag is set to YES doxygen will include brief member
-# descriptions after the members that are listed in the file and class
-# documentation (similar to Javadoc). Set to NO to disable this.
-# The default value is: YES.
-
-BRIEF_MEMBER_DESC      = YES
-
-# If the REPEAT_BRIEF tag is set to YES doxygen will prepend the brief
-# description of a member or function before the detailed description
-#
-# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the
-# brief descriptions will be completely suppressed.
-# The default value is: YES.
-
-REPEAT_BRIEF           = YES
-
-# This tag implements a quasi-intelligent brief description abbreviator that is
-# used to form the text in various listings. Each string in this list, if found
-# as the leading text of the brief description, will be stripped from the text
-# and the result, after processing the whole list, is used as the annotated
-# text. Otherwise, the brief description is used as-is. If left blank, the
-# following values are used ($name is automatically replaced with the name of
-# the entity):The $name class, The $name widget, The $name file, is, provides,
-# specifies, contains, represents, a, an and the.
-
-ABBREVIATE_BRIEF       =
-
-# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then
-# doxygen will generate a detailed section even if there is only a brief
-# description.
-# The default value is: NO.
-
-ALWAYS_DETAILED_SEC    = NO
-
-# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all
-# inherited members of a class in the documentation of that class as if those
-# members were ordinary class members. Constructors, destructors and assignment
-# operators of the base classes will not be shown.
-# The default value is: NO.
-
-INLINE_INHERITED_MEMB  = NO
-
-# If the FULL_PATH_NAMES tag is set to YES doxygen will prepend the full path
-# before files name in the file list and in the header files. If set to NO the
-# shortest path that makes the file name unique will be used
-# The default value is: YES.
-
-FULL_PATH_NAMES        = YES
-
-# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path.
-# Stripping is only done if one of the specified strings matches the left-hand
-# part of the path. The tag can be used to show relative paths in the file list.
-# If left blank the directory from which doxygen is run is used as the path to
-# strip.
-#
-# Note that you can specify absolute paths here, but also relative paths, which
-# will be relative from the directory where doxygen is started.
-# This tag requires that the tag FULL_PATH_NAMES is set to YES.
-
-STRIP_FROM_PATH        =
-
-# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the
-# path mentioned in the documentation of a class, which tells the reader which
-# header file to include in order to use a class. If left blank only the name of
-# the header file containing the class definition is used. Otherwise one should
-# specify the list of include paths that are normally passed to the compiler
-# using the -I flag.
-
-STRIP_FROM_INC_PATH    =
-
-# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but
-# less readable) file names. This can be useful is your file systems doesn't
-# support long names like on DOS, Mac, or CD-ROM.
-# The default value is: NO.
-
-SHORT_NAMES            = NO
-
-# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the
-# first line (until the first dot) of a Javadoc-style comment as the brief
-# description. If set to NO, the Javadoc-style will behave just like regular Qt-
-# style comments (thus requiring an explicit @brief command for a brief
-# description.)
-# The default value is: NO.
-
-JAVADOC_AUTOBRIEF      = NO
-
-# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first
-# line (until the first dot) of a Qt-style comment as the brief description. If
-# set to NO, the Qt-style will behave just like regular Qt-style comments (thus
-# requiring an explicit \brief command for a brief description.)
-# The default value is: NO.
-
-QT_AUTOBRIEF           = NO
-
-# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a
-# multi-line C++ special comment block (i.e. a block of //! or /// comments) as
-# a brief description. This used to be the default behavior. The new default is
-# to treat a multi-line C++ comment block as a detailed description. Set this
-# tag to YES if you prefer the old behavior instead.
-#
-# Note that setting this tag to YES also means that rational rose comments are
-# not recognized any more.
-# The default value is: NO.
-
-MULTILINE_CPP_IS_BRIEF = NO
-
-# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the
-# documentation from any documented member that it re-implements.
-# The default value is: YES.
-
-INHERIT_DOCS           = YES
-
-# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce a
-# new page for each member. If set to NO, the documentation of a member will be
-# part of the file/class/namespace that contains it.
-# The default value is: NO.
-
-SEPARATE_MEMBER_PAGES  = NO
-
-# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen
-# uses this value to replace tabs by spaces in code fragments.
-# Minimum value: 1, maximum value: 16, default value: 4.
-
-TAB_SIZE               = 4
-
-# This tag can be used to specify a number of aliases that act as commands in
-# the documentation. An alias has the form:
-# name=value
-# For example adding
-# "sideeffect=@par Side Effects:\n"
-# will allow you to put the command \sideeffect (or @sideeffect) in the
-# documentation, which will result in a user-defined paragraph with heading
-# "Side Effects:". You can put \n's in the value part of an alias to insert
-# newlines.
-
-ALIASES                =
-
-# This tag can be used to specify a number of word-keyword mappings (TCL only).
-# A mapping has the form "name=value". For example adding "class=itcl::class"
-# will allow you to use the command class in the itcl::class meaning.
-
-TCL_SUBST              =
-
-# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources
-# only. Doxygen will then generate output that is more tailored for C. For
-# instance, some of the names that are used will be different. The list of all
-# members will be omitted, etc.
-# The default value is: NO.
-
-OPTIMIZE_OUTPUT_FOR_C  = NO
-
-# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or
-# Python sources only. Doxygen will then generate output that is more tailored
-# for that language. For instance, namespaces will be presented as packages,
-# qualified scopes will look different, etc.
-# The default value is: NO.
-
-OPTIMIZE_OUTPUT_JAVA   = NO
-
-# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran
-# sources. Doxygen will then generate output that is tailored for Fortran.
-# The default value is: NO.
-
-OPTIMIZE_FOR_FORTRAN   = NO
-
-# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL
-# sources. Doxygen will then generate output that is tailored for VHDL.
-# The default value is: NO.
-
-OPTIMIZE_OUTPUT_VHDL   = NO
-
-# Doxygen selects the parser to use depending on the extension of the files it
-# parses. With this tag you can assign which parser to use for a given
-# extension. Doxygen has a built-in mapping, but you can override or extend it
-# using this tag. The format is ext=language, where ext is a file extension, and
-# language is one of the parsers supported by doxygen: IDL, Java, Javascript,
-# C#, C, C++, D, PHP, Objective-C, Python, Fortran, VHDL. For instance to make
-# doxygen treat .inc files as Fortran files (default is PHP), and .f files as C
-# (default is Fortran), use: inc=Fortran f=C.
-#
-# Note For files without extension you can use no_extension as a placeholder.
-#
-# Note that for custom extensions you also need to set FILE_PATTERNS otherwise
-# the files are not read by doxygen.
-
-EXTENSION_MAPPING      =
-
-# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments
-# according to the Markdown format, which allows for more readable
-# documentation. See http://daringfireball.net/projects/markdown/ for details.
-# The output of markdown processing is further processed by doxygen, so you can
-# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in
-# case of backward compatibilities issues.
-# The default value is: YES.
-
-MARKDOWN_SUPPORT       = YES
-
-# When enabled doxygen tries to link words that correspond to documented
-# classes, or namespaces to their corresponding documentation. Such a link can
-# be prevented in individual cases by by putting a % sign in front of the word
-# or globally by setting AUTOLINK_SUPPORT to NO.
-# The default value is: YES.
-
-AUTOLINK_SUPPORT       = YES
-
-# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want
-# to include (a tag file for) the STL sources as input, then you should set this
-# tag to YES in order to let doxygen match functions declarations and
-# definitions whose arguments contain STL classes (e.g. func(std::string);
-# versus func(std::string) {}). This also make the inheritance and collaboration
-# diagrams that involve STL classes more complete and accurate.
-# The default value is: NO.
-
-BUILTIN_STL_SUPPORT    = NO
-
-# If you use Microsoft's C++/CLI language, you should set this option to YES to
-# enable parsing support.
-# The default value is: NO.
-
-CPP_CLI_SUPPORT        = NO
-
-# Set the SIP_SUPPORT tag to YES if your project consists of sip (see:
-# http://www.riverbankcomputing.co.uk/software/sip/intro) sources only. Doxygen
-# will parse them like normal C++ but will assume all classes use public instead
-# of private inheritance when no explicit protection keyword is present.
-# The default value is: NO.
-
-SIP_SUPPORT            = NO
-
-# For Microsoft's IDL there are propget and propput attributes to indicate
-# getter and setter methods for a property. Setting this option to YES will make
-# doxygen to replace the get and set methods by a property in the documentation.
-# This will only work if the methods are indeed getting or setting a simple
-# type. If this is not the case, or you want to show the methods anyway, you
-# should set this option to NO.
-# The default value is: YES.
-
-IDL_PROPERTY_SUPPORT   = YES
-
-# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC
-# tag is set to YES, then doxygen will reuse the documentation of the first
-# member in the group (if any) for the other members of the group. By default
-# all members of a group must be documented explicitly.
-# The default value is: NO.
-
-DISTRIBUTE_GROUP_DOC   = NO
-
-# Set the SUBGROUPING tag to YES to allow class member groups of the same type
-# (for instance a group of public functions) to be put as a subgroup of that
-# type (e.g. under the Public Functions section). Set it to NO to prevent
-# subgrouping. Alternatively, this can be done per class using the
-# \nosubgrouping command.
-# The default value is: YES.
-
-SUBGROUPING            = YES
-
-# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions
-# are shown inside the group in which they are included (e.g. using \ingroup)
-# instead of on a separate page (for HTML and Man pages) or section (for LaTeX
-# and RTF).
-#
-# Note that this feature does not work in combination with
-# SEPARATE_MEMBER_PAGES.
-# The default value is: NO.
-
-INLINE_GROUPED_CLASSES = NO
-
-# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions
-# with only public data fields or simple typedef fields will be shown inline in
-# the documentation of the scope in which they are defined (i.e. file,
-# namespace, or group documentation), provided this scope is documented. If set
-# to NO, structs, classes, and unions are shown on a separate page (for HTML and
-# Man pages) or section (for LaTeX and RTF).
-# The default value is: NO.
-
-INLINE_SIMPLE_STRUCTS  = NO
-
-# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or
-# enum is documented as struct, union, or enum with the name of the typedef. So
-# typedef struct TypeS {} TypeT, will appear in the documentation as a struct
-# with name TypeT. When disabled the typedef will appear as a member of a file,
-# namespace, or class. And the struct will be named TypeS. This can typically be
-# useful for C code in case the coding convention dictates that all compound
-# types are typedef'ed and only the typedef is referenced, never the tag name.
-# The default value is: NO.
-
-TYPEDEF_HIDES_STRUCT   = NO
-
-# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This
-# cache is used to resolve symbols given their name and scope. Since this can be
-# an expensive process and often the same symbol appears multiple times in the
-# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small
-# doxygen will become slower. If the cache is too large, memory is wasted. The
-# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range
-# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536
-# symbols. At the end of a run doxygen will report the cache usage and suggest
-# the optimal cache size from a speed point of view.
-# Minimum value: 0, maximum value: 9, default value: 0.
-
-LOOKUP_CACHE_SIZE      = 0
-
-#---------------------------------------------------------------------------
-# Build related configuration options
-#---------------------------------------------------------------------------
-
-# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in
-# documentation are documented, even if no documentation was available. Private
-# class members and static file members will be hidden unless the
-# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES.
-# Note: This will also disable the warnings about undocumented members that are
-# normally produced when WARNINGS is set to YES.
-# The default value is: NO.
-
-EXTRACT_ALL            = NO
-
-# If the EXTRACT_PRIVATE tag is set to YES all private members of a class will
-# be included in the documentation.
-# The default value is: NO.
-
-EXTRACT_PRIVATE        = NO
-
-# If the EXTRACT_PACKAGE tag is set to YES all members with package or internal
-# scope will be included in the documentation.
-# The default value is: NO.
-
-EXTRACT_PACKAGE        = NO
-
-# If the EXTRACT_STATIC tag is set to YES all static members of a file will be
-# included in the documentation.
-# The default value is: NO.
-
-EXTRACT_STATIC         = NO
-
-# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) defined
-# locally in source files will be included in the documentation. If set to NO
-# only classes defined in header files are included. Does not have any effect
-# for Java sources.
-# The default value is: YES.
-
-EXTRACT_LOCAL_CLASSES  = YES
-
-# This flag is only useful for Objective-C code. When set to YES local methods,
-# which are defined in the implementation section but not in the interface are
-# included in the documentation. If set to NO only methods in the interface are
-# included.
-# The default value is: NO.
-
-EXTRACT_LOCAL_METHODS  = NO
-
-# If this flag is set to YES, the members of anonymous namespaces will be
-# extracted and appear in the documentation as a namespace called
-# 'anonymous_namespace{file}', where file will be replaced with the base name of
-# the file that contains the anonymous namespace. By default anonymous namespace
-# are hidden.
-# The default value is: NO.
-
-EXTRACT_ANON_NSPACES   = NO
-
-# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all
-# undocumented members inside documented classes or files. If set to NO these
-# members will be included in the various overviews, but no documentation
-# section is generated. This option has no effect if EXTRACT_ALL is enabled.
-# The default value is: NO.
-
-HIDE_UNDOC_MEMBERS     = NO
-
-# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all
-# undocumented classes that are normally visible in the class hierarchy. If set
-# to NO these classes will be included in the various overviews. This option has
-# no effect if EXTRACT_ALL is enabled.
-# The default value is: NO.
-
-HIDE_UNDOC_CLASSES     = NO
-
-# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend
-# (class|struct|union) declarations. If set to NO these declarations will be
-# included in the documentation.
-# The default value is: NO.
-
-HIDE_FRIEND_COMPOUNDS  = NO
-
-# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any
-# documentation blocks found inside the body of a function. If set to NO these
-# blocks will be appended to the function's detailed documentation block.
-# The default value is: NO.
-
-HIDE_IN_BODY_DOCS      = NO
-
-# The INTERNAL_DOCS tag determines if documentation that is typed after a
-# \internal command is included. If the tag is set to NO then the documentation
-# will be excluded. Set it to YES to include the internal documentation.
-# The default value is: NO.
-
-INTERNAL_DOCS          = NO
-
-# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file
-# names in lower-case letters. If set to YES upper-case letters are also
-# allowed. This is useful if you have classes or files whose names only differ
-# in case and if your file system supports case sensitive file names. Windows
-# and Mac users are advised to set this option to NO.
-# The default value is: system dependent.
-
-CASE_SENSE_NAMES       = YES
-
-# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with
-# their full class and namespace scopes in the documentation. If set to YES the
-# scope will be hidden.
-# The default value is: NO.
-
-HIDE_SCOPE_NAMES       = NO
-
-# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of
-# the files that are included by a file in the documentation of that file.
-# The default value is: YES.
-
-SHOW_INCLUDE_FILES     = YES
-
-# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each
-# grouped member an include statement to the documentation, telling the reader
-# which file to include in order to use the member.
-# The default value is: NO.
-
-SHOW_GROUPED_MEMB_INC  = NO
-
-# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include
-# files with double quotes in the documentation rather than with sharp brackets.
-# The default value is: NO.
-
-FORCE_LOCAL_INCLUDES   = NO
-
-# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the
-# documentation for inline members.
-# The default value is: YES.
-
-INLINE_INFO            = YES
-
-# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the
-# (detailed) documentation of file and class members alphabetically by member
-# name. If set to NO the members will appear in declaration order.
-# The default value is: YES.
-
-SORT_MEMBER_DOCS       = YES
-
-# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief
-# descriptions of file, namespace and class members alphabetically by member
-# name. If set to NO the members will appear in declaration order. Note that
-# this will also influence the order of the classes in the class list.
-# The default value is: NO.
-
-SORT_BRIEF_DOCS        = NO
-
-# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the
-# (brief and detailed) documentation of class members so that constructors and
-# destructors are listed first. If set to NO the constructors will appear in the
-# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS.
-# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief
-# member documentation.
-# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting
-# detailed member documentation.
-# The default value is: NO.
-
-SORT_MEMBERS_CTORS_1ST = NO
-
-# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy
-# of group names into alphabetical order. If set to NO the group names will
-# appear in their defined order.
-# The default value is: NO.
-
-SORT_GROUP_NAMES       = NO
-
-# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by
-# fully-qualified names, including namespaces. If set to NO, the class list will
-# be sorted only by class name, not including the namespace part.
-# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES.
-# Note: This option applies only to the class list, not to the alphabetical
-# list.
-# The default value is: NO.
-
-SORT_BY_SCOPE_NAME     = NO
-
-# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper
-# type resolution of all parameters of a function it will reject a match between
-# the prototype and the implementation of a member function even if there is
-# only one candidate or it is obvious which candidate to choose by doing a
-# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still
-# accept a match between prototype and implementation in such cases.
-# The default value is: NO.
-
-STRICT_PROTO_MATCHING  = NO
-
-# The GENERATE_TODOLIST tag can be used to enable ( YES) or disable ( NO) the
-# todo list. This list is created by putting \todo commands in the
-# documentation.
-# The default value is: YES.
-
-GENERATE_TODOLIST      = YES
-
-# The GENERATE_TESTLIST tag can be used to enable ( YES) or disable ( NO) the
-# test list. This list is created by putting \test commands in the
-# documentation.
-# The default value is: YES.
-
-GENERATE_TESTLIST      = YES
-
-# The GENERATE_BUGLIST tag can be used to enable ( YES) or disable ( NO) the bug
-# list. This list is created by putting \bug commands in the documentation.
-# The default value is: YES.
-
-GENERATE_BUGLIST       = YES
-
-# The GENERATE_DEPRECATEDLIST tag can be used to enable ( YES) or disable ( NO)
-# the deprecated list. This list is created by putting \deprecated commands in
-# the documentation.
-# The default value is: YES.
-
-GENERATE_DEPRECATEDLIST= YES
-
-# The ENABLED_SECTIONS tag can be used to enable conditional documentation
-# sections, marked by \if <section_label> ... \endif and \cond <section_label>
-# ... \endcond blocks.
-
-ENABLED_SECTIONS       =
-
-# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the
-# initial value of a variable or macro / define can have for it to appear in the
-# documentation. If the initializer consists of more lines than specified here
-# it will be hidden. Use a value of 0 to hide initializers completely. The
-# appearance of the value of individual variables and macros / defines can be
-# controlled using \showinitializer or \hideinitializer command in the
-# documentation regardless of this setting.
-# Minimum value: 0, maximum value: 10000, default value: 30.
-
-MAX_INITIALIZER_LINES  = 30
-
-# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at
-# the bottom of the documentation of classes and structs. If set to YES the list
-# will mention the files that were used to generate the documentation.
-# The default value is: YES.
-
-SHOW_USED_FILES        = YES
-
-# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This
-# will remove the Files entry from the Quick Index and from the Folder Tree View
-# (if specified).
-# The default value is: YES.
-
-SHOW_FILES             = YES
-
-# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces
-# page. This will remove the Namespaces entry from the Quick Index and from the
-# Folder Tree View (if specified).
-# The default value is: YES.
-
-SHOW_NAMESPACES        = YES
-
-# The FILE_VERSION_FILTER tag can be used to specify a program or script that
-# doxygen should invoke to get the current version for each file (typically from
-# the version control system). Doxygen will invoke the program by executing (via
-# popen()) the command command input-file, where command is the value of the
-# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided
-# by doxygen. Whatever the program writes to standard output is used as the file
-# version. For an example see the documentation.
-
-FILE_VERSION_FILTER    =
-
-# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed
-# by doxygen. The layout file controls the global structure of the generated
-# output files in an output format independent way. To create the layout file
-# that represents doxygen's defaults, run doxygen with the -l option. You can
-# optionally specify a file name after the option, if omitted DoxygenLayout.xml
-# will be used as the name of the layout file.
-#
-# Note that if you run doxygen from a directory containing a file called
-# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE
-# tag is left empty.
-
-LAYOUT_FILE            =
-
-# The CITE_BIB_FILES tag can be used to specify one or more bib files containing
-# the reference definitions. This must be a list of .bib files. The .bib
-# extension is automatically appended if omitted. This requires the bibtex tool
-# to be installed. See also http://en.wikipedia.org/wiki/BibTeX for more info.
-# For LaTeX the style of the bibliography can be controlled using
-# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the
-# search path. Do not use file names with spaces, bibtex cannot handle them. See
-# also \cite for info how to create references.
-
-CITE_BIB_FILES         =
-
-#---------------------------------------------------------------------------
-# Configuration options related to warning and progress messages
-#---------------------------------------------------------------------------
-
-# The QUIET tag can be used to turn on/off the messages that are generated to
-# standard output by doxygen. If QUIET is set to YES this implies that the
-# messages are off.
-# The default value is: NO.
-
-QUIET                  = NO
-
-# The WARNINGS tag can be used to turn on/off the warning messages that are
-# generated to standard error ( stderr) by doxygen. If WARNINGS is set to YES
-# this implies that the warnings are on.
-#
-# Tip: Turn warnings on while writing the documentation.
-# The default value is: YES.
-
-WARNINGS               = YES
-
-# If the WARN_IF_UNDOCUMENTED tag is set to YES, then doxygen will generate
-# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag
-# will automatically be disabled.
-# The default value is: YES.
-
-WARN_IF_UNDOCUMENTED   = YES
-
-# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for
-# potential errors in the documentation, such as not documenting some parameters
-# in a documented function, or documenting parameters that don't exist or using
-# markup commands wrongly.
-# The default value is: YES.
-
-WARN_IF_DOC_ERROR      = YES
-
-# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that
-# are documented, but have no documentation for their parameters or return
-# value. If set to NO doxygen will only warn about wrong or incomplete parameter
-# documentation, but not about the absence of documentation.
-# The default value is: NO.
-
-WARN_NO_PARAMDOC       = NO
-
-# The WARN_FORMAT tag determines the format of the warning messages that doxygen
-# can produce. The string should contain the $file, $line, and $text tags, which
-# will be replaced by the file and line number from which the warning originated
-# and the warning text. Optionally the format may contain $version, which will
-# be replaced by the version of the file (if it could be obtained via
-# FILE_VERSION_FILTER)
-# The default value is: $file:$line: $text.
-
-WARN_FORMAT            = "$file:$line: $text"
-
-# The WARN_LOGFILE tag can be used to specify a file to which warning and error
-# messages should be written. If left blank the output is written to standard
-# error (stderr).
-
-WARN_LOGFILE           =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the input files
-#---------------------------------------------------------------------------
-
-# The INPUT tag is used to specify the files and/or directories that contain
-# documented source files. You may enter file names like myfile.cpp or
-# directories like /usr/src/myproject. Separate the files or directories with
-# spaces.
-# Note: If this tag is empty the current directory is searched.
-
-INPUT                  =
-
-# This tag can be used to specify the character encoding of the source files
-# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses
-# libiconv (or the iconv built into libc) for the transcoding. See the libiconv
-# documentation (see: http://www.gnu.org/software/libiconv) for the list of
-# possible encodings.
-# The default value is: UTF-8.
-
-INPUT_ENCODING         = UTF-8
-
-# If the value of the INPUT tag contains directories, you can use the
-# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and
-# *.h) to filter out the source-files in the directories. If left blank the
-# following patterns are tested:*.c, *.cc, *.cxx, *.cpp, *.c++, *.java, *.ii,
-# *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, *.hh, *.hxx, *.hpp,
-# *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, *.m, *.markdown,
-# *.md, *.mm, *.dox, *.py, *.f90, *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf,
-# *.qsf, *.as and *.js.
-
-FILE_PATTERNS          =
-
-# The RECURSIVE tag can be used to specify whether or not subdirectories should
-# be searched for input files as well.
-# The default value is: NO.
-
-RECURSIVE              = NO
-
-# The EXCLUDE tag can be used to specify files and/or directories that should be
-# excluded from the INPUT source files. This way you can easily exclude a
-# subdirectory from a directory tree whose root is specified with the INPUT tag.
-#
-# Note that relative paths are relative to the directory from which doxygen is
-# run.
-
-EXCLUDE                =
-
-# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or
-# directories that are symbolic links (a Unix file system feature) are excluded
-# from the input.
-# The default value is: NO.
-
-EXCLUDE_SYMLINKS       = NO
-
-# If the value of the INPUT tag contains directories, you can use the
-# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude
-# certain files from those directories.
-#
-# Note that the wildcards are matched against the file with absolute path, so to
-# exclude all test directories for example use the pattern */test/*
-
-EXCLUDE_PATTERNS       =
-
-# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names
-# (namespaces, classes, functions, etc.) that should be excluded from the
-# output. The symbol name can be a fully qualified name, a word, or if the
-# wildcard * is used, a substring. Examples: ANamespace, AClass,
-# AClass::ANamespace, ANamespace::*Test
-#
-# Note that the wildcards are matched against the file with absolute path, so to
-# exclude all test directories use the pattern */test/*
-
-EXCLUDE_SYMBOLS        =
-
-# The EXAMPLE_PATH tag can be used to specify one or more files or directories
-# that contain example code fragments that are included (see the \include
-# command).
-
-EXAMPLE_PATH           =
-
-# If the value of the EXAMPLE_PATH tag contains directories, you can use the
-# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and
-# *.h) to filter out the source-files in the directories. If left blank all
-# files are included.
-
-EXAMPLE_PATTERNS       =
-
-# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be
-# searched for input files to be used with the \include or \dontinclude commands
-# irrespective of the value of the RECURSIVE tag.
-# The default value is: NO.
-
-EXAMPLE_RECURSIVE      = NO
-
-# The IMAGE_PATH tag can be used to specify one or more files or directories
-# that contain images that are to be included in the documentation (see the
-# \image command).
-
-IMAGE_PATH             =
-
-# The INPUT_FILTER tag can be used to specify a program that doxygen should
-# invoke to filter for each input file. Doxygen will invoke the filter program
-# by executing (via popen()) the command:
-#
-# <filter> <input-file>
-#
-# where <filter> is the value of the INPUT_FILTER tag, and <input-file> is the
-# name of an input file. Doxygen will then use the output that the filter
-# program writes to standard output. If FILTER_PATTERNS is specified, this tag
-# will be ignored.
-#
-# Note that the filter must not add or remove lines; it is applied before the
-# code is scanned, but not when the output code is generated. If lines are added
-# or removed, the anchors will not be placed correctly.
-
-INPUT_FILTER           =
-
-# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern
-# basis. Doxygen will compare the file name with each pattern and apply the
-# filter if there is a match. The filters are a list of the form: pattern=filter
-# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how
-# filters are used. If the FILTER_PATTERNS tag is empty or if none of the
-# patterns match the file name, INPUT_FILTER is applied.
-
-FILTER_PATTERNS        =
-
-# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using
-# INPUT_FILTER ) will also be used to filter the input files that are used for
-# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES).
-# The default value is: NO.
-
-FILTER_SOURCE_FILES    = NO
-
-# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file
-# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and
-# it is also possible to disable source filtering for a specific pattern using
-# *.ext= (so without naming a filter).
-# This tag requires that the tag FILTER_SOURCE_FILES is set to YES.
-
-FILTER_SOURCE_PATTERNS =
-
-# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that
-# is part of the input, its contents will be placed on the main page
-# (index.html). This can be useful if you have a project on for instance GitHub
-# and want to reuse the introduction page also for the doxygen output.
-
-USE_MDFILE_AS_MAINPAGE =
-
-#---------------------------------------------------------------------------
-# Configuration options related to source browsing
-#---------------------------------------------------------------------------
-
-# If the SOURCE_BROWSER tag is set to YES then a list of source files will be
-# generated. Documented entities will be cross-referenced with these sources.
-#
-# Note: To get rid of all source code in the generated output, make sure that
-# also VERBATIM_HEADERS is set to NO.
-# The default value is: NO.
-
-SOURCE_BROWSER         = NO
-
-# Setting the INLINE_SOURCES tag to YES will include the body of functions,
-# classes and enums directly into the documentation.
-# The default value is: NO.
-
-INLINE_SOURCES         = NO
-
-# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any
-# special comment blocks from generated source code fragments. Normal C, C++ and
-# Fortran comments will always remain visible.
-# The default value is: YES.
-
-STRIP_CODE_COMMENTS    = YES
-
-# If the REFERENCED_BY_RELATION tag is set to YES then for each documented
-# function all documented functions referencing it will be listed.
-# The default value is: NO.
-
-REFERENCED_BY_RELATION = NO
-
-# If the REFERENCES_RELATION tag is set to YES then for each documented function
-# all documented entities called/used by that function will be listed.
-# The default value is: NO.
-
-REFERENCES_RELATION    = NO
-
-# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set
-# to YES, then the hyperlinks from functions in REFERENCES_RELATION and
-# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will
-# link to the documentation.
-# The default value is: YES.
-
-REFERENCES_LINK_SOURCE = YES
-
-# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the
-# source code will show a tooltip with additional information such as prototype,
-# brief description and links to the definition and documentation. Since this
-# will make the HTML file larger and loading of large files a bit slower, you
-# can opt to disable this feature.
-# The default value is: YES.
-# This tag requires that the tag SOURCE_BROWSER is set to YES.
-
-SOURCE_TOOLTIPS        = YES
-
-# If the USE_HTAGS tag is set to YES then the references to source code will
-# point to the HTML generated by the htags(1) tool instead of doxygen built-in
-# source browser. The htags tool is part of GNU's global source tagging system
-# (see http://www.gnu.org/software/global/global.html). You will need version
-# 4.8.6 or higher.
-#
-# To use it do the following:
-# - Install the latest version of global
-# - Enable SOURCE_BROWSER and USE_HTAGS in the config file
-# - Make sure the INPUT points to the root of the source tree
-# - Run doxygen as normal
-#
-# Doxygen will invoke htags (and that will in turn invoke gtags), so these
-# tools must be available from the command line (i.e. in the search path).
-#
-# The result: instead of the source browser generated by doxygen, the links to
-# source code will now point to the output of htags.
-# The default value is: NO.
-# This tag requires that the tag SOURCE_BROWSER is set to YES.
-
-USE_HTAGS              = NO
-
-# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a
-# verbatim copy of the header file for each class for which an include is
-# specified. Set to NO to disable this.
-# See also: Section \class.
-# The default value is: YES.
-
-VERBATIM_HEADERS       = YES
-
-#---------------------------------------------------------------------------
-# Configuration options related to the alphabetical class index
-#---------------------------------------------------------------------------
-
-# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all
-# compounds will be generated. Enable this if the project contains a lot of
-# classes, structs, unions or interfaces.
-# The default value is: YES.
-
-ALPHABETICAL_INDEX     = YES
-
-# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in
-# which the alphabetical index list will be split.
-# Minimum value: 1, maximum value: 20, default value: 5.
-# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
-
-COLS_IN_ALPHA_INDEX    = 5
-
-# In case all classes in a project start with a common prefix, all classes will
-# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag
-# can be used to specify a prefix (or a list of prefixes) that should be ignored
-# while generating the index headers.
-# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
-
-IGNORE_PREFIX          =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the HTML output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_HTML tag is set to YES doxygen will generate HTML output
-# The default value is: YES.
-
-GENERATE_HTML          = YES
-
-# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a
-# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
-# it.
-# The default directory is: html.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_OUTPUT            = html
-
-# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each
-# generated HTML page (for example: .htm, .php, .asp).
-# The default value is: .html.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_FILE_EXTENSION    = .html
-
-# The HTML_HEADER tag can be used to specify a user-defined HTML header file for
-# each generated HTML page. If the tag is left blank doxygen will generate a
-# standard header.
-#
-# To get valid HTML the header file that includes any scripts and style sheets
-# that doxygen needs, which is dependent on the configuration options used (e.g.
-# the setting GENERATE_TREEVIEW). It is highly recommended to start with a
-# default header using
-# doxygen -w html new_header.html new_footer.html new_stylesheet.css
-# YourConfigFile
-# and then modify the file new_header.html. See also section "Doxygen usage"
-# for information on how to generate the default header that doxygen normally
-# uses.
-# Note: The header is subject to change so you typically have to regenerate the
-# default header when upgrading to a newer version of doxygen. For a description
-# of the possible markers and block names see the documentation.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_HEADER            =
-
-# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each
-# generated HTML page. If the tag is left blank doxygen will generate a standard
-# footer. See HTML_HEADER for more information on how to generate a default
-# footer and what special commands can be used inside the footer. See also
-# section "Doxygen usage" for information on how to generate the default footer
-# that doxygen normally uses.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_FOOTER            =
-
-# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style
-# sheet that is used by each HTML page. It can be used to fine-tune the look of
-# the HTML output. If left blank doxygen will generate a default style sheet.
-# See also section "Doxygen usage" for information on how to generate the style
-# sheet that doxygen normally uses.
-# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as
-# it is more robust and this tag (HTML_STYLESHEET) will in the future become
-# obsolete.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_STYLESHEET        =
-
-# The HTML_EXTRA_STYLESHEET tag can be used to specify an additional user-
-# defined cascading style sheet that is included after the standard style sheets
-# created by doxygen. Using this option one can overrule certain style aspects.
-# This is preferred over using HTML_STYLESHEET since it does not replace the
-# standard style sheet and is therefor more robust against future updates.
-# Doxygen will copy the style sheet file to the output directory. For an example
-# see the documentation.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_EXTRA_STYLESHEET  =
-
-# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or
-# other source files which should be copied to the HTML output directory. Note
-# that these files will be copied to the base HTML output directory. Use the
-# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these
-# files. In the HTML_STYLESHEET file, use the file name only. Also note that the
-# files will be copied as-is; there are no commands or markers available.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_EXTRA_FILES       =
-
-# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen
-# will adjust the colors in the stylesheet and background images according to
-# this color. Hue is specified as an angle on a colorwheel, see
-# http://en.wikipedia.org/wiki/Hue for more information. For instance the value
-# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300
-# purple, and 360 is red again.
-# Minimum value: 0, maximum value: 359, default value: 220.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_COLORSTYLE_HUE    = 220
-
-# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors
-# in the HTML output. For a value of 0 the output will use grayscales only. A
-# value of 255 will produce the most vivid colors.
-# Minimum value: 0, maximum value: 255, default value: 100.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_COLORSTYLE_SAT    = 100
-
-# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the
-# luminance component of the colors in the HTML output. Values below 100
-# gradually make the output lighter, whereas values above 100 make the output
-# darker. The value divided by 100 is the actual gamma applied, so 80 represents
-# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not
-# change the gamma.
-# Minimum value: 40, maximum value: 240, default value: 80.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_COLORSTYLE_GAMMA  = 80
-
-# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML
-# page will contain the date and time when the page was generated. Setting this
-# to NO can help when comparing the output of multiple runs.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_TIMESTAMP         = YES
-
-# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML
-# documentation will contain sections that can be hidden and shown after the
-# page has loaded.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_DYNAMIC_SECTIONS  = NO
-
-# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries
-# shown in the various tree structured indices initially; the user can expand
-# and collapse entries dynamically later on. Doxygen will expand the tree to
-# such a level that at most the specified number of entries are visible (unless
-# a fully collapsed tree already exceeds this amount). So setting the number of
-# entries 1 will produce a full collapsed tree by default. 0 is a special value
-# representing an infinite number of entries and will result in a full expanded
-# tree by default.
-# Minimum value: 0, maximum value: 9999, default value: 100.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_INDEX_NUM_ENTRIES = 100
-
-# If the GENERATE_DOCSET tag is set to YES, additional index files will be
-# generated that can be used as input for Apple's Xcode 3 integrated development
-# environment (see: http://developer.apple.com/tools/xcode/), introduced with
-# OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a
-# Makefile in the HTML output directory. Running make will produce the docset in
-# that directory and running make install will install the docset in
-# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at
-# startup. See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html
-# for more information.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-GENERATE_DOCSET        = NO
-
-# This tag determines the name of the docset feed. A documentation feed provides
-# an umbrella under which multiple documentation sets from a single provider
-# (such as a company or product suite) can be grouped.
-# The default value is: Doxygen generated docs.
-# This tag requires that the tag GENERATE_DOCSET is set to YES.
-
-DOCSET_FEEDNAME        = "Doxygen generated docs"
-
-# This tag specifies a string that should uniquely identify the documentation
-# set bundle. This should be a reverse domain-name style string, e.g.
-# com.mycompany.MyDocSet. Doxygen will append .docset to the name.
-# The default value is: org.doxygen.Project.
-# This tag requires that the tag GENERATE_DOCSET is set to YES.
-
-DOCSET_BUNDLE_ID       = org.doxygen.Project
-
-# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify
-# the documentation publisher. This should be a reverse domain-name style
-# string, e.g. com.mycompany.MyDocSet.documentation.
-# The default value is: org.doxygen.Publisher.
-# This tag requires that the tag GENERATE_DOCSET is set to YES.
-
-DOCSET_PUBLISHER_ID    = org.doxygen.Publisher
-
-# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher.
-# The default value is: Publisher.
-# This tag requires that the tag GENERATE_DOCSET is set to YES.
-
-DOCSET_PUBLISHER_NAME  = Publisher
-
-# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three
-# additional HTML index files: index.hhp, index.hhc, and index.hhk. The
-# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop
-# (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on
-# Windows.
-#
-# The HTML Help Workshop contains a compiler that can convert all HTML output
-# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML
-# files are now used as the Windows 98 help format, and will replace the old
-# Windows help format (.hlp) on all Windows platforms in the future. Compressed
-# HTML files also contain an index, a table of contents, and you can search for
-# words in the documentation. The HTML workshop also contains a viewer for
-# compressed HTML files.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-GENERATE_HTMLHELP      = NO
-
-# The CHM_FILE tag can be used to specify the file name of the resulting .chm
-# file. You can add a path in front of the file if the result should not be
-# written to the html output directory.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-CHM_FILE               =
-
-# The HHC_LOCATION tag can be used to specify the location (absolute path
-# including file name) of the HTML help compiler ( hhc.exe). If non-empty
-# doxygen will try to run the HTML help compiler on the generated index.hhp.
-# The file has to be specified with full path.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-HHC_LOCATION           =
-
-# The GENERATE_CHI flag controls if a separate .chi index file is generated (
-# YES) or that it should be included in the master .chm file ( NO).
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-GENERATE_CHI           = NO
-
-# The CHM_INDEX_ENCODING is used to encode HtmlHelp index ( hhk), content ( hhc)
-# and project file content.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-CHM_INDEX_ENCODING     =
-
-# The BINARY_TOC flag controls whether a binary table of contents is generated (
-# YES) or a normal table of contents ( NO) in the .chm file.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-BINARY_TOC             = NO
-
-# The TOC_EXPAND flag can be set to YES to add extra items for group members to
-# the table of contents of the HTML help documentation and to the tree view.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-TOC_EXPAND             = NO
-
-# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and
-# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that
-# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help
-# (.qch) of the generated HTML documentation.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-GENERATE_QHP           = NO
-
-# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify
-# the file name of the resulting .qch file. The path specified is relative to
-# the HTML output folder.
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QCH_FILE               =
-
-# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help
-# Project output. For more information please see Qt Help Project / Namespace
-# (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#namespace).
-# The default value is: org.doxygen.Project.
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHP_NAMESPACE          = org.doxygen.Project
-
-# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt
-# Help Project output. For more information please see Qt Help Project / Virtual
-# Folders (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#virtual-
-# folders).
-# The default value is: doc.
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHP_VIRTUAL_FOLDER     = doc
-
-# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom
-# filter to add. For more information please see Qt Help Project / Custom
-# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom-
-# filters).
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHP_CUST_FILTER_NAME   =
-
-# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the
-# custom filter to add. For more information please see Qt Help Project / Custom
-# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom-
-# filters).
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHP_CUST_FILTER_ATTRS  =
-
-# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this
-# project's filter section matches. Qt Help Project / Filter Attributes (see:
-# http://qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes).
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHP_SECT_FILTER_ATTRS  =
-
-# The QHG_LOCATION tag can be used to specify the location of Qt's
-# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the
-# generated .qhp file.
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHG_LOCATION           =
-
-# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be
-# generated, together with the HTML files, they form an Eclipse help plugin. To
-# install this plugin and make it available under the help contents menu in
-# Eclipse, the contents of the directory containing the HTML and XML files needs
-# to be copied into the plugins directory of eclipse. The name of the directory
-# within the plugins directory should be the same as the ECLIPSE_DOC_ID value.
-# After copying Eclipse needs to be restarted before the help appears.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-GENERATE_ECLIPSEHELP   = NO
-
-# A unique identifier for the Eclipse help plugin. When installing the plugin
-# the directory name containing the HTML and XML files should also have this
-# name. Each documentation set should have its own identifier.
-# The default value is: org.doxygen.Project.
-# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES.
-
-ECLIPSE_DOC_ID         = org.doxygen.Project
-
-# If you want full control over the layout of the generated HTML pages it might
-# be necessary to disable the index and replace it with your own. The
-# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top
-# of each HTML page. A value of NO enables the index and the value YES disables
-# it. Since the tabs in the index contain the same information as the navigation
-# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-DISABLE_INDEX          = NO
-
-# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index
-# structure should be generated to display hierarchical information. If the tag
-# value is set to YES, a side panel will be generated containing a tree-like
-# index structure (just like the one that is generated for HTML Help). For this
-# to work a browser that supports JavaScript, DHTML, CSS and frames is required
-# (i.e. any modern browser). Windows users are probably better off using the
-# HTML help feature. Via custom stylesheets (see HTML_EXTRA_STYLESHEET) one can
-# further fine-tune the look of the index. As an example, the default style
-# sheet generated by doxygen has an example that shows how to put an image at
-# the root of the tree instead of the PROJECT_NAME. Since the tree basically has
-# the same information as the tab index, you could consider setting
-# DISABLE_INDEX to YES when enabling this option.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-GENERATE_TREEVIEW      = NO
-
-# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that
-# doxygen will group on one line in the generated HTML documentation.
-#
-# Note that a value of 0 will completely suppress the enum values from appearing
-# in the overview section.
-# Minimum value: 0, maximum value: 20, default value: 4.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-ENUM_VALUES_PER_LINE   = 4
-
-# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used
-# to set the initial width (in pixels) of the frame in which the tree is shown.
-# Minimum value: 0, maximum value: 1500, default value: 250.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-TREEVIEW_WIDTH         = 250
-
-# When the EXT_LINKS_IN_WINDOW option is set to YES doxygen will open links to
-# external symbols imported via tag files in a separate window.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-EXT_LINKS_IN_WINDOW    = NO
-
-# Use this tag to change the font size of LaTeX formulas included as images in
-# the HTML documentation. When you change the font size after a successful
-# doxygen run you need to manually remove any form_*.png images from the HTML
-# output directory to force them to be regenerated.
-# Minimum value: 8, maximum value: 50, default value: 10.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-FORMULA_FONTSIZE       = 10
-
-# Use the FORMULA_TRANPARENT tag to determine whether or not the images
-# generated for formulas are transparent PNGs. Transparent PNGs are not
-# supported properly for IE 6.0, but are supported on all modern browsers.
-#
-# Note that when changing this option you need to delete any form_*.png files in
-# the HTML output directory before the changes have effect.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-FORMULA_TRANSPARENT    = YES
-
-# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see
-# http://www.mathjax.org) which uses client side Javascript for the rendering
-# instead of using prerendered bitmaps. Use this if you do not have LaTeX
-# installed or if you want to formulas look prettier in the HTML output. When
-# enabled you may also need to install MathJax separately and configure the path
-# to it using the MATHJAX_RELPATH option.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-USE_MATHJAX            = NO
-
-# When MathJax is enabled you can set the default output format to be used for
-# the MathJax output. See the MathJax site (see:
-# http://docs.mathjax.org/en/latest/output.html) for more details.
-# Possible values are: HTML-CSS (which is slower, but has the best
-# compatibility), NativeMML (i.e. MathML) and SVG.
-# The default value is: HTML-CSS.
-# This tag requires that the tag USE_MATHJAX is set to YES.
-
-MATHJAX_FORMAT         = HTML-CSS
-
-# When MathJax is enabled you need to specify the location relative to the HTML
-# output directory using the MATHJAX_RELPATH option. The destination directory
-# should contain the MathJax.js script. For instance, if the mathjax directory
-# is located at the same level as the HTML output directory, then
-# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax
-# Content Delivery Network so you can quickly see the result without installing
-# MathJax. However, it is strongly recommended to install a local copy of
-# MathJax from http://www.mathjax.org before deployment.
-# The default value is: http://cdn.mathjax.org/mathjax/latest.
-# This tag requires that the tag USE_MATHJAX is set to YES.
-
-MATHJAX_RELPATH        = http://cdn.mathjax.org/mathjax/latest
-
-# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax
-# extension names that should be enabled during MathJax rendering. For example
-# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols
-# This tag requires that the tag USE_MATHJAX is set to YES.
-
-MATHJAX_EXTENSIONS     =
-
-# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces
-# of code that will be used on startup of the MathJax code. See the MathJax site
-# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an
-# example see the documentation.
-# This tag requires that the tag USE_MATHJAX is set to YES.
-
-MATHJAX_CODEFILE       =
-
-# When the SEARCHENGINE tag is enabled doxygen will generate a search box for
-# the HTML output. The underlying search engine uses javascript and DHTML and
-# should work on any modern browser. Note that when using HTML help
-# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET)
-# there is already a search function so this one should typically be disabled.
-# For large projects the javascript based search engine can be slow, then
-# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to
-# search using the keyboard; to jump to the search box use <access key> + S
-# (what the <access key> is depends on the OS and browser, but it is typically
-# <CTRL>, <ALT>/<option>, or both). Inside the search box use the <cursor down
-# key> to jump into the search results window, the results can be navigated
-# using the <cursor keys>. Press <Enter> to select an item or <escape> to cancel
-# the search. The filter options can be selected when the cursor is inside the
-# search box by pressing <Shift>+<cursor down>. Also here use the <cursor keys>
-# to select a filter and <Enter> or <escape> to activate or cancel the filter
-# option.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-SEARCHENGINE           = YES
-
-# When the SERVER_BASED_SEARCH tag is enabled the search engine will be
-# implemented using a web server instead of a web client using Javascript. There
-# are two flavours of web server based searching depending on the
-# EXTERNAL_SEARCH setting. When disabled, doxygen will generate a PHP script for
-# searching and an index file used by the script. When EXTERNAL_SEARCH is
-# enabled the indexing and searching needs to be provided by external tools. See
-# the section "External Indexing and Searching" for details.
-# The default value is: NO.
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-SERVER_BASED_SEARCH    = NO
-
-# When EXTERNAL_SEARCH tag is enabled doxygen will no longer generate the PHP
-# script for searching. Instead the search results are written to an XML file
-# which needs to be processed by an external indexer. Doxygen will invoke an
-# external search engine pointed to by the SEARCHENGINE_URL option to obtain the
-# search results.
-#
-# Doxygen ships with an example indexer ( doxyindexer) and search engine
-# (doxysearch.cgi) which are based on the open source search engine library
-# Xapian (see: http://xapian.org/).
-#
-# See the section "External Indexing and Searching" for details.
-# The default value is: NO.
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-EXTERNAL_SEARCH        = NO
-
-# The SEARCHENGINE_URL should point to a search engine hosted by a web server
-# which will return the search results when EXTERNAL_SEARCH is enabled.
-#
-# Doxygen ships with an example indexer ( doxyindexer) and search engine
-# (doxysearch.cgi) which are based on the open source search engine library
-# Xapian (see: http://xapian.org/). See the section "External Indexing and
-# Searching" for details.
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-SEARCHENGINE_URL       =
-
-# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the unindexed
-# search data is written to a file for indexing by an external tool. With the
-# SEARCHDATA_FILE tag the name of this file can be specified.
-# The default file is: searchdata.xml.
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-SEARCHDATA_FILE        = searchdata.xml
-
-# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the
-# EXTERNAL_SEARCH_ID tag can be used as an identifier for the project. This is
-# useful in combination with EXTRA_SEARCH_MAPPINGS to search through multiple
-# projects and redirect the results back to the right project.
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-EXTERNAL_SEARCH_ID     =
-
-# The EXTRA_SEARCH_MAPPINGS tag can be used to enable searching through doxygen
-# projects other than the one defined by this configuration file, but that are
-# all added to the same external search index. Each project needs to have a
-# unique id set via EXTERNAL_SEARCH_ID. The search mapping then maps the id of
-# to a relative location where the documentation can be found. The format is:
-# EXTRA_SEARCH_MAPPINGS = tagname1=loc1 tagname2=loc2 ...
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-EXTRA_SEARCH_MAPPINGS  =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the LaTeX output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_LATEX tag is set to YES doxygen will generate LaTeX output.
-# The default value is: YES.
-
-GENERATE_LATEX         = YES
-
-# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. If a
-# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
-# it.
-# The default directory is: latex.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_OUTPUT           = latex
-
-# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be
-# invoked.
-#
-# Note that when enabling USE_PDFLATEX this option is only used for generating
-# bitmaps for formulas in the HTML output, but not in the Makefile that is
-# written to the output directory.
-# The default file is: latex.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_CMD_NAME         = latex
-
-# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to generate
-# index for LaTeX.
-# The default file is: makeindex.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-MAKEINDEX_CMD_NAME     = makeindex
-
-# If the COMPACT_LATEX tag is set to YES doxygen generates more compact LaTeX
-# documents. This may be useful for small projects and may help to save some
-# trees in general.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-COMPACT_LATEX          = NO
-
-# The PAPER_TYPE tag can be used to set the paper type that is used by the
-# printer.
-# Possible values are: a4 (210 x 297 mm), letter (8.5 x 11 inches), legal (8.5 x
-# 14 inches) and executive (7.25 x 10.5 inches).
-# The default value is: a4.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-PAPER_TYPE             = a4
-
-# The EXTRA_PACKAGES tag can be used to specify one or more LaTeX package names
-# that should be included in the LaTeX output. To get the times font for
-# instance you can specify
-# EXTRA_PACKAGES=times
-# If left blank no extra packages will be included.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-EXTRA_PACKAGES         =
-
-# The LATEX_HEADER tag can be used to specify a personal LaTeX header for the
-# generated LaTeX document. The header should contain everything until the first
-# chapter. If it is left blank doxygen will generate a standard header. See
-# section "Doxygen usage" for information on how to let doxygen write the
-# default header to a separate file.
-#
-# Note: Only use a user-defined header if you know what you are doing! The
-# following commands have a special meaning inside the header: $title,
-# $datetime, $date, $doxygenversion, $projectname, $projectnumber. Doxygen will
-# replace them by respectively the title of the page, the current date and time,
-# only the current date, the version number of doxygen, the project name (see
-# PROJECT_NAME), or the project number (see PROJECT_NUMBER).
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_HEADER           =
-
-# The LATEX_FOOTER tag can be used to specify a personal LaTeX footer for the
-# generated LaTeX document. The footer should contain everything after the last
-# chapter. If it is left blank doxygen will generate a standard footer.
-#
-# Note: Only use a user-defined footer if you know what you are doing!
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_FOOTER           =
-
-# The LATEX_EXTRA_FILES tag can be used to specify one or more extra images or
-# other source files which should be copied to the LATEX_OUTPUT output
-# directory. Note that the files will be copied as-is; there are no commands or
-# markers available.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_EXTRA_FILES      =
-
-# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated is
-# prepared for conversion to PDF (using ps2pdf or pdflatex). The PDF file will
-# contain links (just like the HTML output) instead of page references. This
-# makes the output suitable for online browsing using a PDF viewer.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-PDF_HYPERLINKS         = YES
-
-# If the LATEX_PDFLATEX tag is set to YES, doxygen will use pdflatex to generate
-# the PDF file directly from the LaTeX files. Set this option to YES to get a
-# higher quality PDF documentation.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-USE_PDFLATEX           = YES
-
-# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \batchmode
-# command to the generated LaTeX files. This will instruct LaTeX to keep running
-# if errors occur, instead of asking the user for help. This option is also used
-# when generating formulas in HTML.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_BATCHMODE        = NO
-
-# If the LATEX_HIDE_INDICES tag is set to YES then doxygen will not include the
-# index chapters (such as File Index, Compound Index, etc.) in the output.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_HIDE_INDICES     = NO
-
-# If the LATEX_SOURCE_CODE tag is set to YES then doxygen will include source
-# code with syntax highlighting in the LaTeX output.
-#
-# Note that which sources are shown also depends on other settings such as
-# SOURCE_BROWSER.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_SOURCE_CODE      = NO
-
-# The LATEX_BIB_STYLE tag can be used to specify the style to use for the
-# bibliography, e.g. plainnat, or ieeetr. See
-# http://en.wikipedia.org/wiki/BibTeX and \cite for more info.
-# The default value is: plain.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_BIB_STYLE        = plain
-
-#---------------------------------------------------------------------------
-# Configuration options related to the RTF output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_RTF tag is set to YES doxygen will generate RTF output. The
-# RTF output is optimized for Word 97 and may not look too pretty with other RTF
-# readers/editors.
-# The default value is: NO.
-
-GENERATE_RTF           = NO
-
-# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. If a
-# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
-# it.
-# The default directory is: rtf.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-RTF_OUTPUT             = rtf
-
-# If the COMPACT_RTF tag is set to YES doxygen generates more compact RTF
-# documents. This may be useful for small projects and may help to save some
-# trees in general.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-COMPACT_RTF            = NO
-
-# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated will
-# contain hyperlink fields. The RTF file will contain links (just like the HTML
-# output) instead of page references. This makes the output suitable for online
-# browsing using Word or some other Word compatible readers that support those
-# fields.
-#
-# Note: WordPad (write) and others do not support links.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-RTF_HYPERLINKS         = NO
-
-# Load stylesheet definitions from file. Syntax is similar to doxygen's config
-# file, i.e. a series of assignments. You only have to provide replacements,
-# missing definitions are set to their default value.
-#
-# See also section "Doxygen usage" for information on how to generate the
-# default style sheet that doxygen normally uses.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-RTF_STYLESHEET_FILE    =
-
-# Set optional variables used in the generation of an RTF document. Syntax is
-# similar to doxygen's config file. A template extensions file can be generated
-# using doxygen -e rtf extensionFile.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-RTF_EXTENSIONS_FILE    =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the man page output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_MAN tag is set to YES doxygen will generate man pages for
-# classes and files.
-# The default value is: NO.
-
-GENERATE_MAN           = NO
-
-# The MAN_OUTPUT tag is used to specify where the man pages will be put. If a
-# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
-# it. A directory man3 will be created inside the directory specified by
-# MAN_OUTPUT.
-# The default directory is: man.
-# This tag requires that the tag GENERATE_MAN is set to YES.
-
-MAN_OUTPUT             = man
-
-# The MAN_EXTENSION tag determines the extension that is added to the generated
-# man pages. In case the manual section does not start with a number, the number
-# 3 is prepended. The dot (.) at the beginning of the MAN_EXTENSION tag is
-# optional.
-# The default value is: .3.
-# This tag requires that the tag GENERATE_MAN is set to YES.
-
-MAN_EXTENSION          = .3
-
-# If the MAN_LINKS tag is set to YES and doxygen generates man output, then it
-# will generate one additional man file for each entity documented in the real
-# man page(s). These additional files only source the real man page, but without
-# them the man command would be unable to find the correct page.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_MAN is set to YES.
-
-MAN_LINKS              = NO
-
-#---------------------------------------------------------------------------
-# Configuration options related to the XML output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_XML tag is set to YES doxygen will generate an XML file that
-# captures the structure of the code including all documentation.
-# The default value is: NO.
-
-GENERATE_XML           = NO
-
-# The XML_OUTPUT tag is used to specify where the XML pages will be put. If a
-# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
-# it.
-# The default directory is: xml.
-# This tag requires that the tag GENERATE_XML is set to YES.
-
-XML_OUTPUT             = xml
-
-# The XML_SCHEMA tag can be used to specify a XML schema, which can be used by a
-# validating XML parser to check the syntax of the XML files.
-# This tag requires that the tag GENERATE_XML is set to YES.
-
-XML_SCHEMA             =
-
-# The XML_DTD tag can be used to specify a XML DTD, which can be used by a
-# validating XML parser to check the syntax of the XML files.
-# This tag requires that the tag GENERATE_XML is set to YES.
-
-XML_DTD                =
-
-# If the XML_PROGRAMLISTING tag is set to YES doxygen will dump the program
-# listings (including syntax highlighting and cross-referencing information) to
-# the XML output. Note that enabling this will significantly increase the size
-# of the XML output.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_XML is set to YES.
-
-XML_PROGRAMLISTING     = YES
-
-#---------------------------------------------------------------------------
-# Configuration options related to the DOCBOOK output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_DOCBOOK tag is set to YES doxygen will generate Docbook files
-# that can be used to generate PDF.
-# The default value is: NO.
-
-GENERATE_DOCBOOK       = NO
-
-# The DOCBOOK_OUTPUT tag is used to specify where the Docbook pages will be put.
-# If a relative path is entered the value of OUTPUT_DIRECTORY will be put in
-# front of it.
-# The default directory is: docbook.
-# This tag requires that the tag GENERATE_DOCBOOK is set to YES.
-
-DOCBOOK_OUTPUT         = docbook
-
-#---------------------------------------------------------------------------
-# Configuration options for the AutoGen Definitions output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_AUTOGEN_DEF tag is set to YES doxygen will generate an AutoGen
-# Definitions (see http://autogen.sf.net) file that captures the structure of
-# the code including all documentation. Note that this feature is still
-# experimental and incomplete at the moment.
-# The default value is: NO.
-
-GENERATE_AUTOGEN_DEF   = NO
-
-#---------------------------------------------------------------------------
-# Configuration options related to the Perl module output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_PERLMOD tag is set to YES doxygen will generate a Perl module
-# file that captures the structure of the code including all documentation.
-#
-# Note that this feature is still experimental and incomplete at the moment.
-# The default value is: NO.
-
-GENERATE_PERLMOD       = NO
-
-# If the PERLMOD_LATEX tag is set to YES doxygen will generate the necessary
-# Makefile rules, Perl scripts and LaTeX code to be able to generate PDF and DVI
-# output from the Perl module output.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_PERLMOD is set to YES.
-
-PERLMOD_LATEX          = NO
-
-# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be nicely
-# formatted so it can be parsed by a human reader. This is useful if you want to
-# understand what is going on. On the other hand, if this tag is set to NO the
-# size of the Perl module output will be much smaller and Perl will parse it
-# just the same.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_PERLMOD is set to YES.
-
-PERLMOD_PRETTY         = YES
-
-# The names of the make variables in the generated doxyrules.make file are
-# prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. This is useful
-# so different doxyrules.make files included by the same Makefile don't
-# overwrite each other's variables.
-# This tag requires that the tag GENERATE_PERLMOD is set to YES.
-
-PERLMOD_MAKEVAR_PREFIX =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the preprocessor
-#---------------------------------------------------------------------------
-
-# If the ENABLE_PREPROCESSING tag is set to YES doxygen will evaluate all
-# C-preprocessor directives found in the sources and include files.
-# The default value is: YES.
-
-ENABLE_PREPROCESSING   = YES
-
-# If the MACRO_EXPANSION tag is set to YES doxygen will expand all macro names
-# in the source code. If set to NO only conditional compilation will be
-# performed. Macro expansion can be done in a controlled way by setting
-# EXPAND_ONLY_PREDEF to YES.
-# The default value is: NO.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-MACRO_EXPANSION        = NO
-
-# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES then
-# the macro expansion is limited to the macros specified with the PREDEFINED and
-# EXPAND_AS_DEFINED tags.
-# The default value is: NO.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-EXPAND_ONLY_PREDEF     = NO
-
-# If the SEARCH_INCLUDES tag is set to YES the includes files in the
-# INCLUDE_PATH will be searched if a #include is found.
-# The default value is: YES.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-SEARCH_INCLUDES        = YES
-
-# The INCLUDE_PATH tag can be used to specify one or more directories that
-# contain include files that are not input files but should be processed by the
-# preprocessor.
-# This tag requires that the tag SEARCH_INCLUDES is set to YES.
-
-INCLUDE_PATH           =
-
-# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard
-# patterns (like *.h and *.hpp) to filter out the header-files in the
-# directories. If left blank, the patterns specified with FILE_PATTERNS will be
-# used.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-INCLUDE_FILE_PATTERNS  =
-
-# The PREDEFINED tag can be used to specify one or more macro names that are
-# defined before the preprocessor is started (similar to the -D option of e.g.
-# gcc). The argument of the tag is a list of macros of the form: name or
-# name=definition (no spaces). If the definition and the "=" are omitted, "=1"
-# is assumed. To prevent a macro definition from being undefined via #undef or
-# recursively expanded use the := operator instead of the = operator.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-PREDEFINED             =
-
-# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
-# tag can be used to specify a list of macro names that should be expanded. The
-# macro definition that is found in the sources will be used. Use the PREDEFINED
-# tag if you want to use a different macro definition that overrules the
-# definition found in the source code.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-EXPAND_AS_DEFINED      =
-
-# If the SKIP_FUNCTION_MACROS tag is set to YES then doxygen's preprocessor will
-# remove all refrences to function-like macros that are alone on a line, have an
-# all uppercase name, and do not end with a semicolon. Such function macros are
-# typically used for boiler-plate code, and will confuse the parser if not
-# removed.
-# The default value is: YES.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-SKIP_FUNCTION_MACROS   = YES
-
-#---------------------------------------------------------------------------
-# Configuration options related to external references
-#---------------------------------------------------------------------------
-
-# The TAGFILES tag can be used to specify one or more tag files. For each tag
-# file the location of the external documentation should be added. The format of
-# a tag file without this location is as follows:
-# TAGFILES = file1 file2 ...
-# Adding location for the tag files is done as follows:
-# TAGFILES = file1=loc1 "file2 = loc2" ...
-# where loc1 and loc2 can be relative or absolute paths or URLs. See the
-# section "Linking to external documentation" for more information about the use
-# of tag files.
-# Note: Each tag file must have an unique name (where the name does NOT include
-# the path). If a tag file is not located in the directory in which doxygen is
-# run, you must also specify the path to the tagfile here.
-
-TAGFILES               =
-
-# When a file name is specified after GENERATE_TAGFILE, doxygen will create a
-# tag file that is based on the input files it reads. See section "Linking to
-# external documentation" for more information about the usage of tag files.
-
-GENERATE_TAGFILE       =
-
-# If the ALLEXTERNALS tag is set to YES all external class will be listed in the
-# class index. If set to NO only the inherited external classes will be listed.
-# The default value is: NO.
-
-ALLEXTERNALS           = NO
-
-# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed in
-# the modules index. If set to NO, only the current project's groups will be
-# listed.
-# The default value is: YES.
-
-EXTERNAL_GROUPS        = YES
-
-# If the EXTERNAL_PAGES tag is set to YES all external pages will be listed in
-# the related pages index. If set to NO, only the current project's pages will
-# be listed.
-# The default value is: YES.
-
-EXTERNAL_PAGES         = YES
-
-# The PERL_PATH should be the absolute path and name of the perl script
-# interpreter (i.e. the result of 'which perl').
-# The default file (with absolute path) is: /usr/bin/perl.
-
-PERL_PATH              = /usr/bin/perl
-
-#---------------------------------------------------------------------------
-# Configuration options related to the dot tool
-#---------------------------------------------------------------------------
-
-# If the CLASS_DIAGRAMS tag is set to YES doxygen will generate a class diagram
-# (in HTML and LaTeX) for classes with base or super classes. Setting the tag to
-# NO turns the diagrams off. Note that this option also works with HAVE_DOT
-# disabled, but it is recommended to install and use dot, since it yields more
-# powerful graphs.
-# The default value is: YES.
-
-CLASS_DIAGRAMS         = YES
-
-# You can define message sequence charts within doxygen comments using the \msc
-# command. Doxygen will then run the mscgen tool (see:
-# http://www.mcternan.me.uk/mscgen/)) to produce the chart and insert it in the
-# documentation. The MSCGEN_PATH tag allows you to specify the directory where
-# the mscgen tool resides. If left empty the tool is assumed to be found in the
-# default search path.
-
-MSCGEN_PATH            =
-
-# You can include diagrams made with dia in doxygen documentation. Doxygen will
-# then run dia to produce the diagram and insert it in the documentation. The
-# DIA_PATH tag allows you to specify the directory where the dia binary resides.
-# If left empty dia is assumed to be found in the default search path.
-
-DIA_PATH               =
-
-# If set to YES, the inheritance and collaboration graphs will hide inheritance
-# and usage relations if the target is undocumented or is not a class.
-# The default value is: YES.
-
-HIDE_UNDOC_RELATIONS   = YES
-
-# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is
-# available from the path. This tool is part of Graphviz (see:
-# http://www.graphviz.org/), a graph visualization toolkit from AT&T and Lucent
-# Bell Labs. The other options in this section have no effect if this option is
-# set to NO
-# The default value is: NO.
-
-HAVE_DOT               = NO
-
-# The DOT_NUM_THREADS specifies the number of dot invocations doxygen is allowed
-# to run in parallel. When set to 0 doxygen will base this on the number of
-# processors available in the system. You can set it explicitly to a value
-# larger than 0 to get control over the balance between CPU load and processing
-# speed.
-# Minimum value: 0, maximum value: 32, default value: 0.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_NUM_THREADS        = 0
-
-# When you want a differently looking font n the dot files that doxygen
-# generates you can specify the font name using DOT_FONTNAME. You need to make
-# sure dot is able to find the font, which can be done by putting it in a
-# standard location or by setting the DOTFONTPATH environment variable or by
-# setting DOT_FONTPATH to the directory containing the font.
-# The default value is: Helvetica.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_FONTNAME           = Helvetica
-
-# The DOT_FONTSIZE tag can be used to set the size (in points) of the font of
-# dot graphs.
-# Minimum value: 4, maximum value: 24, default value: 10.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_FONTSIZE           = 10
-
-# By default doxygen will tell dot to use the default font as specified with
-# DOT_FONTNAME. If you specify a different font using DOT_FONTNAME you can set
-# the path where dot can find it using this tag.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_FONTPATH           =
-
-# If the CLASS_GRAPH tag is set to YES then doxygen will generate a graph for
-# each documented class showing the direct and indirect inheritance relations.
-# Setting this tag to YES will force the CLASS_DIAGRAMS tag to NO.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-CLASS_GRAPH            = YES
-
-# If the COLLABORATION_GRAPH tag is set to YES then doxygen will generate a
-# graph for each documented class showing the direct and indirect implementation
-# dependencies (inheritance, containment, and class references variables) of the
-# class with other documented classes.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-COLLABORATION_GRAPH    = YES
-
-# If the GROUP_GRAPHS tag is set to YES then doxygen will generate a graph for
-# groups, showing the direct groups dependencies.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-GROUP_GRAPHS           = YES
-
-# If the UML_LOOK tag is set to YES doxygen will generate inheritance and
-# collaboration diagrams in a style similar to the OMG's Unified Modeling
-# Language.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-UML_LOOK               = NO
-
-# If the UML_LOOK tag is enabled, the fields and methods are shown inside the
-# class node. If there are many fields or methods and many nodes the graph may
-# become too big to be useful. The UML_LIMIT_NUM_FIELDS threshold limits the
-# number of items for each type to make the size more manageable. Set this to 0
-# for no limit. Note that the threshold may be exceeded by 50% before the limit
-# is enforced. So when you set the threshold to 10, up to 15 fields may appear,
-# but if the number exceeds 15, the total amount of fields shown is limited to
-# 10.
-# Minimum value: 0, maximum value: 100, default value: 10.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-UML_LIMIT_NUM_FIELDS   = 10
-
-# If the TEMPLATE_RELATIONS tag is set to YES then the inheritance and
-# collaboration graphs will show the relations between templates and their
-# instances.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-TEMPLATE_RELATIONS     = NO
-
-# If the INCLUDE_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are set to
-# YES then doxygen will generate a graph for each documented file showing the
-# direct and indirect include dependencies of the file with other documented
-# files.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-INCLUDE_GRAPH          = YES
-
-# If the INCLUDED_BY_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are
-# set to YES then doxygen will generate a graph for each documented file showing
-# the direct and indirect include dependencies of the file with other documented
-# files.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-INCLUDED_BY_GRAPH      = YES
-
-# If the CALL_GRAPH tag is set to YES then doxygen will generate a call
-# dependency graph for every global function or class method.
-#
-# Note that enabling this option will significantly increase the time of a run.
-# So in most cases it will be better to enable call graphs for selected
-# functions only using the \callgraph command.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-CALL_GRAPH             = NO
-
-# If the CALLER_GRAPH tag is set to YES then doxygen will generate a caller
-# dependency graph for every global function or class method.
-#
-# Note that enabling this option will significantly increase the time of a run.
-# So in most cases it will be better to enable caller graphs for selected
-# functions only using the \callergraph command.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-CALLER_GRAPH           = NO
-
-# If the GRAPHICAL_HIERARCHY tag is set to YES then doxygen will graphical
-# hierarchy of all classes instead of a textual one.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-GRAPHICAL_HIERARCHY    = YES
-
-# If the DIRECTORY_GRAPH tag is set to YES then doxygen will show the
-# dependencies a directory has on other directories in a graphical way. The
-# dependency relations are determined by the #include relations between the
-# files in the directories.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DIRECTORY_GRAPH        = YES
-
-# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images
-# generated by dot.
-# Note: If you choose svg you need to set HTML_FILE_EXTENSION to xhtml in order
-# to make the SVG files visible in IE 9+ (other browsers do not have this
-# requirement).
-# Possible values are: png, jpg, gif and svg.
-# The default value is: png.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_IMAGE_FORMAT       = png
-
-# If DOT_IMAGE_FORMAT is set to svg, then this option can be set to YES to
-# enable generation of interactive SVG images that allow zooming and panning.
-#
-# Note that this requires a modern browser other than Internet Explorer. Tested
-# and working are Firefox, Chrome, Safari, and Opera.
-# Note: For IE 9+ you need to set HTML_FILE_EXTENSION to xhtml in order to make
-# the SVG files visible. Older versions of IE do not have SVG support.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-INTERACTIVE_SVG        = NO
-
-# The DOT_PATH tag can be used to specify the path where the dot tool can be
-# found. If left blank, it is assumed the dot tool can be found in the path.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_PATH               =
-
-# The DOTFILE_DIRS tag can be used to specify one or more directories that
-# contain dot files that are included in the documentation (see the \dotfile
-# command).
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOTFILE_DIRS           =
-
-# The MSCFILE_DIRS tag can be used to specify one or more directories that
-# contain msc files that are included in the documentation (see the \mscfile
-# command).
-
-MSCFILE_DIRS           =
-
-# The DIAFILE_DIRS tag can be used to specify one or more directories that
-# contain dia files that are included in the documentation (see the \diafile
-# command).
-
-DIAFILE_DIRS           =
-
-# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of nodes
-# that will be shown in the graph. If the number of nodes in a graph becomes
-# larger than this value, doxygen will truncate the graph, which is visualized
-# by representing a node as a red box. Note that doxygen if the number of direct
-# children of the root node in a graph is already larger than
-# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note that
-# the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH.
-# Minimum value: 0, maximum value: 10000, default value: 50.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_GRAPH_MAX_NODES    = 50
-
-# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the graphs
-# generated by dot. A depth value of 3 means that only nodes reachable from the
-# root by following a path via at most 3 edges will be shown. Nodes that lay
-# further from the root node will be omitted. Note that setting this option to 1
-# or 2 may greatly reduce the computation time needed for large code bases. Also
-# note that the size of a graph can be further restricted by
-# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction.
-# Minimum value: 0, maximum value: 1000, default value: 0.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-MAX_DOT_GRAPH_DEPTH    = 0
-
-# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent
-# background. This is disabled by default, because dot on Windows does not seem
-# to support this out of the box.
-#
-# Warning: Depending on the platform used, enabling this option may lead to
-# badly anti-aliased labels on the edges of a graph (i.e. they become hard to
-# read).
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_TRANSPARENT        = NO
-
-# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output
-# files in one run (i.e. multiple -o and -T options on the command line). This
-# makes dot run faster, but since only newer versions of dot (>1.8.10) support
-# this, this feature is disabled by default.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_MULTI_TARGETS      = YES
-
-# If the GENERATE_LEGEND tag is set to YES doxygen will generate a legend page
-# explaining the meaning of the various boxes and arrows in the dot generated
-# graphs.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-GENERATE_LEGEND        = YES
-
-# If the DOT_CLEANUP tag is set to YES doxygen will remove the intermediate dot
-# files that are used to generate the various graphs.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_CLEANUP            = YES
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index 4520823..ac7ad9a 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -353,6 +353,8 @@
     // Clear any stale timestamps from the previous run.
     drainTimestampsFromService();
 
+    prepareBuffersForStart(); // tell subclasses to get ready
+
     aaudio_result_t result = mServiceInterface.startStream(mServiceStreamHandle);
     if (result == AAUDIO_ERROR_INVALID_HANDLE) {
         ALOGD("%s() INVALID_HANDLE, stream was probably stolen", __func__);
diff --git a/media/libaaudio/src/client/AudioStreamInternal.h b/media/libaaudio/src/client/AudioStreamInternal.h
index 61591b3..63be978 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.h
+++ b/media/libaaudio/src/client/AudioStreamInternal.h
@@ -123,7 +123,9 @@
 
     aaudio_result_t stopCallback();
 
-    virtual void advanceClientToMatchServerPosition() = 0;
+    virtual void prepareBuffersForStart() {}
+
+    virtual void advanceClientToMatchServerPosition(int32_t serverMargin = 0) = 0;
 
     virtual void onFlushFromServer() {}
 
diff --git a/media/libaaudio/src/client/AudioStreamInternalCapture.cpp b/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
index 9fa2e40..d014608 100644
--- a/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
@@ -41,9 +41,9 @@
 
 AudioStreamInternalCapture::~AudioStreamInternalCapture() {}
 
-void AudioStreamInternalCapture::advanceClientToMatchServerPosition() {
+void AudioStreamInternalCapture::advanceClientToMatchServerPosition(int32_t serverMargin) {
     int64_t readCounter = mAudioEndpoint->getDataReadCounter();
-    int64_t writeCounter = mAudioEndpoint->getDataWriteCounter();
+    int64_t writeCounter = mAudioEndpoint->getDataWriteCounter() + serverMargin;
 
     // Bump offset so caller does not see the retrograde motion in getFramesRead().
     int64_t offset = readCounter - writeCounter;
diff --git a/media/libaaudio/src/client/AudioStreamInternalCapture.h b/media/libaaudio/src/client/AudioStreamInternalCapture.h
index 6436a53..1d65d87 100644
--- a/media/libaaudio/src/client/AudioStreamInternalCapture.h
+++ b/media/libaaudio/src/client/AudioStreamInternalCapture.h
@@ -46,7 +46,7 @@
     }
 protected:
 
-    void advanceClientToMatchServerPosition() override;
+    void advanceClientToMatchServerPosition(int32_t serverOffset = 0) override;
 
 /**
  * Low level data processing that will not block. It will just read or write as much as it can.
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
index 1303daf..6337b53 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
@@ -86,8 +86,13 @@
     return mServiceInterface.flushStream(mServiceStreamHandle);
 }
 
-void AudioStreamInternalPlay::advanceClientToMatchServerPosition() {
-    int64_t readCounter = mAudioEndpoint->getDataReadCounter();
+void AudioStreamInternalPlay::prepareBuffersForStart() {
+    // Prevent stale data from being played.
+    mAudioEndpoint->eraseDataMemory();
+}
+
+void AudioStreamInternalPlay::advanceClientToMatchServerPosition(int32_t serverMargin) {
+    int64_t readCounter = mAudioEndpoint->getDataReadCounter() + serverMargin;
     int64_t writeCounter = mAudioEndpoint->getDataWriteCounter();
 
     // Bump offset so caller does not see the retrograde motion in getFramesRead().
@@ -145,7 +150,9 @@
     if (mNeedCatchUp.isRequested()) {
         // Catch an MMAP pointer that is already advancing.
         // This will avoid initial underruns caused by a slow cold start.
-        advanceClientToMatchServerPosition();
+        // We add a one burst margin in case the DSP advances before we can write the data.
+        // This can help prevent the beginning of the stream from being skipped.
+        advanceClientToMatchServerPosition(getFramesPerBurst());
         mNeedCatchUp.acknowledge();
     }
 
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.h b/media/libaaudio/src/client/AudioStreamInternalPlay.h
index 2e93157..be95da6 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.h
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.h
@@ -65,7 +65,9 @@
 
 protected:
 
-    void advanceClientToMatchServerPosition() override;
+    void prepareBuffersForStart() override;
+
+    void advanceClientToMatchServerPosition(int32_t serverMargin = 0) override;
 
     void onFlushFromServer() override;
 
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index f5c75ca..983887b 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -315,8 +315,11 @@
 
 void AudioStream::setState(aaudio_stream_state_t state) {
     ALOGD("%s(s#%d) from %d to %d", __func__, getId(), mState, state);
+    if (state == mState) {
+        return; // no change
+    }
     // Track transition to DISCONNECTED state.
-    if (state == AAUDIO_STREAM_STATE_DISCONNECTED && mState != state) {
+    if (state == AAUDIO_STREAM_STATE_DISCONNECTED) {
         android::mediametrics::LogItem(mMetricsId)
                 .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_DISCONNECT)
                 .set(AMEDIAMETRICS_PROP_STATE, AudioGlobal_convertStreamStateToText(getState()))
@@ -324,18 +327,18 @@
     }
     // CLOSED is a final state
     if (mState == AAUDIO_STREAM_STATE_CLOSED) {
-        ALOGE("%s(%d) tried to set to %d but already CLOSED", __func__, getId(), state);
+        ALOGW("%s(%d) tried to set to %d but already CLOSED", __func__, getId(), state);
 
     // Once CLOSING, we can only move to CLOSED state.
     } else if (mState == AAUDIO_STREAM_STATE_CLOSING
                && state != AAUDIO_STREAM_STATE_CLOSED) {
-        ALOGE("%s(%d) tried to set to %d but already CLOSING", __func__, getId(), state);
+        ALOGW("%s(%d) tried to set to %d but already CLOSING", __func__, getId(), state);
 
     // Once DISCONNECTED, we can only move to CLOSING or CLOSED state.
     } else if (mState == AAUDIO_STREAM_STATE_DISCONNECTED
                && !(state == AAUDIO_STREAM_STATE_CLOSING
                    || state == AAUDIO_STREAM_STATE_CLOSED)) {
-        ALOGE("%s(%d) tried to set to %d but already DISCONNECTED", __func__, getId(), state);
+        ALOGW("%s(%d) tried to set to %d but already DISCONNECTED", __func__, getId(), state);
 
     } else {
         mState = state;
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.cpp b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
index b0dc59e..3bfa2b7 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
@@ -212,9 +212,6 @@
     setSamplesPerFrame(mAudioRecord->channelCount());
 
     int32_t actualSampleRate = mAudioRecord->getSampleRate();
-    ALOGW_IF(actualSampleRate != getSampleRate(),
-             "open() sampleRate changed from %d to %d",
-             getSampleRate(), actualSampleRate);
     setSampleRate(actualSampleRate);
 
     // We may need to pass the data through a block size adapter to guarantee constant size.
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index 4869480..0427220 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -195,9 +195,6 @@
     setDeviceFormat(mAudioTrack->format());
 
     int32_t actualSampleRate = mAudioTrack->getSampleRate();
-    ALOGW_IF(actualSampleRate != getSampleRate(),
-             "open() sampleRate changed from %d to %d",
-             getSampleRate(), actualSampleRate);
     setSampleRate(actualSampleRate);
 
     // We may need to pass the data through a block size adapter to guarantee constant size.
@@ -240,11 +237,11 @@
 
     setSharingMode(AAUDIO_SHARING_MODE_SHARED); // EXCLUSIVE mode not supported in legacy
 
-    // Log warning if we did not get what we asked for.
-    ALOGW_IF(actualFlags != flags,
+    // Log if we did not get what we asked for.
+    ALOGD_IF(actualFlags != flags,
              "open() flags changed from 0x%08X to 0x%08X",
              flags, actualFlags);
-    ALOGW_IF(actualPerformanceMode != perfMode,
+    ALOGD_IF(actualPerformanceMode != perfMode,
              "open() perfMode changed from %d to %d",
              perfMode, actualPerformanceMode);
 
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index 16d2232..6d79aba 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -1001,7 +1001,7 @@
             break;
     }
 
-    // Whitelist of relevant events to trigger log merging.
+    // List of relevant events that trigger log merging.
     // Log merging should activate during audio activity of any kind. This are considered the
     // most relevant events.
     // TODO should select more wisely the items from the list
diff --git a/media/libaudioprocessing/AudioMixer.cpp b/media/libaudioprocessing/AudioMixer.cpp
index 1a31420..2ea215f 100644
--- a/media/libaudioprocessing/AudioMixer.cpp
+++ b/media/libaudioprocessing/AudioMixer.cpp
@@ -423,7 +423,7 @@
             }
             } break;
         case HAPTIC_INTENSITY: {
-            const haptic_intensity_t hapticIntensity = static_cast<haptic_intensity_t>(valueInt);
+            const os::HapticScale hapticIntensity = static_cast<os::HapticScale>(valueInt);
             if (track->mHapticIntensity != hapticIntensity) {
                 track->mHapticIntensity = hapticIntensity;
             }
@@ -545,7 +545,7 @@
     t->mPlaybackRate = AUDIO_PLAYBACK_RATE_DEFAULT;
     // haptic
     t->mHapticPlaybackEnabled = false;
-    t->mHapticIntensity = HAPTIC_SCALE_NONE;
+    t->mHapticIntensity = os::HapticScale::NONE;
     t->mMixerHapticChannelMask = AUDIO_CHANNEL_NONE;
     t->mMixerHapticChannelCount = 0;
     t->mAdjustInChannelCount = t->channelCount + t->mHapticChannelCount;
@@ -590,19 +590,12 @@
             const std::shared_ptr<Track> &t = getTrack(name);
             if (t->mHapticPlaybackEnabled) {
                 size_t sampleCount = mFrameCount * t->mMixerHapticChannelCount;
-                float gamma = t->getHapticScaleGamma();
-                float maxAmplitudeRatio = t->getHapticMaxAmplitudeRatio();
                 uint8_t* buffer = (uint8_t*)pair.first + mFrameCount * audio_bytes_per_frame(
                         t->mMixerChannelCount, t->mMixerFormat);
                 switch (t->mMixerFormat) {
                 // Mixer format should be AUDIO_FORMAT_PCM_FLOAT.
                 case AUDIO_FORMAT_PCM_FLOAT: {
-                    float* fout = (float*) buffer;
-                    for (size_t i = 0; i < sampleCount; i++) {
-                        float mul = fout[i] >= 0 ? 1.0 : -1.0;
-                        fout[i] = powf(fabsf(fout[i] / HAPTIC_MAX_AMPLITUDE_FLOAT), gamma)
-                                * maxAmplitudeRatio * HAPTIC_MAX_AMPLITUDE_FLOAT * mul;
-                    }
+                    os::scaleHapticData((float*) buffer, sampleCount, t->mHapticIntensity);
                 } break;
                 default:
                     LOG_ALWAYS_FATAL("bad mMixerFormat: %#x", t->mMixerFormat);
diff --git a/media/libaudioprocessing/AudioMixerOps.h b/media/libaudioprocessing/AudioMixerOps.h
index 80bd093..8d374c9 100644
--- a/media/libaudioprocessing/AudioMixerOps.h
+++ b/media/libaudioprocessing/AudioMixerOps.h
@@ -234,17 +234,20 @@
     static_assert(NCHAN > 0 && NCHAN <= 8);
     static_assert(MIXTYPE == MIXTYPE_MULTI_STEREOVOL
             || MIXTYPE == MIXTYPE_MULTI_SAVEONLY_STEREOVOL
-            || MIXTYPE == MIXTYPE_STEREOEXPAND);
+            || MIXTYPE == MIXTYPE_STEREOEXPAND
+            || MIXTYPE == MIXTYPE_MONOEXPAND);
     auto proc = [](auto& a, const auto& b) {
         if constexpr (MIXTYPE == MIXTYPE_MULTI_STEREOVOL
-                || MIXTYPE == MIXTYPE_STEREOEXPAND) {
+                || MIXTYPE == MIXTYPE_STEREOEXPAND
+                || MIXTYPE == MIXTYPE_MONOEXPAND) {
             a += b;
         } else {
             a = b;
         }
     };
     auto inp = [&in]() -> const TI& {
-        if constexpr (MIXTYPE == MIXTYPE_STEREOEXPAND) {
+        if constexpr (MIXTYPE == MIXTYPE_STEREOEXPAND
+                || MIXTYPE == MIXTYPE_MONOEXPAND) {
             return *in;
         } else {
             return *in++;
@@ -312,6 +315,8 @@
  *   TV/TAV: int32_t (U4.28) or int16_t (U4.12) or float
  *   Input channel count is 1.
  *   vol: represents volume array.
+ *   This uses stereo balanced volume vol[0] and vol[1].
+ *   Before R, this was a full volume array but was called only for channels <= 2.
  *
  *   This accumulates into the out pointer.
  *
@@ -356,17 +361,13 @@
         do {
             TA auxaccum = 0;
             if constexpr (MIXTYPE == MIXTYPE_MULTI) {
+                static_assert(NCHAN <= 2);
                 for (int i = 0; i < NCHAN; ++i) {
                     *out++ += MixMulAux<TO, TI, TV, TA>(*in++, vol[i], &auxaccum);
                     vol[i] += volinc[i];
                 }
-            } else if constexpr (MIXTYPE == MIXTYPE_MONOEXPAND) {
-                for (int i = 0; i < NCHAN; ++i) {
-                    *out++ += MixMulAux<TO, TI, TV, TA>(*in, vol[i], &auxaccum);
-                    vol[i] += volinc[i];
-                }
-                in++;
             } else if constexpr (MIXTYPE == MIXTYPE_MULTI_SAVEONLY) {
+                static_assert(NCHAN <= 2);
                 for (int i = 0; i < NCHAN; ++i) {
                     *out++ = MixMulAux<TO, TI, TV, TA>(*in++, vol[i], &auxaccum);
                     vol[i] += volinc[i];
@@ -383,11 +384,13 @@
                 vol[0] += volinc[0];
             } else if constexpr (MIXTYPE == MIXTYPE_MULTI_STEREOVOL
                     || MIXTYPE == MIXTYPE_MULTI_SAVEONLY_STEREOVOL
+                    || MIXTYPE == MIXTYPE_MONOEXPAND
                     || MIXTYPE == MIXTYPE_STEREOEXPAND) {
                 stereoVolumeHelper<MIXTYPE, NCHAN>(
                         out, in, vol, [&auxaccum] (auto &a, const auto &b) {
                     return MixMulAux<TO, TI, TV, TA>(a, b, &auxaccum);
                 });
+                if constexpr (MIXTYPE == MIXTYPE_MONOEXPAND) in += 1;
                 if constexpr (MIXTYPE == MIXTYPE_STEREOEXPAND) in += 2;
                 vol[0] += volinc[0];
                 vol[1] += volinc[1];
@@ -401,17 +404,13 @@
     } else {
         do {
             if constexpr (MIXTYPE == MIXTYPE_MULTI) {
+                static_assert(NCHAN <= 2);
                 for (int i = 0; i < NCHAN; ++i) {
                     *out++ += MixMul<TO, TI, TV>(*in++, vol[i]);
                     vol[i] += volinc[i];
                 }
-            } else if constexpr (MIXTYPE == MIXTYPE_MONOEXPAND) {
-                for (int i = 0; i < NCHAN; ++i) {
-                    *out++ += MixMul<TO, TI, TV>(*in, vol[i]);
-                    vol[i] += volinc[i];
-                }
-                in++;
             } else if constexpr (MIXTYPE == MIXTYPE_MULTI_SAVEONLY) {
+                static_assert(NCHAN <= 2);
                 for (int i = 0; i < NCHAN; ++i) {
                     *out++ = MixMul<TO, TI, TV>(*in++, vol[i]);
                     vol[i] += volinc[i];
@@ -428,10 +427,12 @@
                 vol[0] += volinc[0];
             } else if constexpr (MIXTYPE == MIXTYPE_MULTI_STEREOVOL
                     || MIXTYPE == MIXTYPE_MULTI_SAVEONLY_STEREOVOL
+                    || MIXTYPE == MIXTYPE_MONOEXPAND
                     || MIXTYPE == MIXTYPE_STEREOEXPAND) {
                 stereoVolumeHelper<MIXTYPE, NCHAN>(out, in, vol, [] (auto &a, const auto &b) {
                     return MixMul<TO, TI, TV>(a, b);
                 });
+                if constexpr (MIXTYPE == MIXTYPE_MONOEXPAND) in += 1;
                 if constexpr (MIXTYPE == MIXTYPE_STEREOEXPAND) in += 2;
                 vol[0] += volinc[0];
                 vol[1] += volinc[1];
@@ -454,15 +455,12 @@
         do {
             TA auxaccum = 0;
             if constexpr (MIXTYPE == MIXTYPE_MULTI) {
+                static_assert(NCHAN <= 2);
                 for (int i = 0; i < NCHAN; ++i) {
                     *out++ += MixMulAux<TO, TI, TV, TA>(*in++, vol[i], &auxaccum);
                 }
-            } else if constexpr (MIXTYPE == MIXTYPE_MONOEXPAND) {
-                for (int i = 0; i < NCHAN; ++i) {
-                    *out++ += MixMulAux<TO, TI, TV, TA>(*in, vol[i], &auxaccum);
-                }
-                in++;
             } else if constexpr (MIXTYPE == MIXTYPE_MULTI_SAVEONLY) {
+                static_assert(NCHAN <= 2);
                 for (int i = 0; i < NCHAN; ++i) {
                     *out++ = MixMulAux<TO, TI, TV, TA>(*in++, vol[i], &auxaccum);
                 }
@@ -476,11 +474,13 @@
                 }
             } else if constexpr (MIXTYPE == MIXTYPE_MULTI_STEREOVOL
                     || MIXTYPE == MIXTYPE_MULTI_SAVEONLY_STEREOVOL
+                    || MIXTYPE == MIXTYPE_MONOEXPAND
                     || MIXTYPE == MIXTYPE_STEREOEXPAND) {
                 stereoVolumeHelper<MIXTYPE, NCHAN>(
                         out, in, vol, [&auxaccum] (auto &a, const auto &b) {
                     return MixMulAux<TO, TI, TV, TA>(a, b, &auxaccum);
                 });
+                if constexpr (MIXTYPE == MIXTYPE_MONOEXPAND) in += 1;
                 if constexpr (MIXTYPE == MIXTYPE_STEREOEXPAND) in += 2;
             } else /* constexpr */ {
                 static_assert(dependent_false<MIXTYPE>, "invalid mixtype");
@@ -490,16 +490,14 @@
         } while (--frameCount);
     } else {
         do {
+            // ALOGD("Mixtype:%d NCHAN:%d", MIXTYPE, NCHAN);
             if constexpr (MIXTYPE == MIXTYPE_MULTI) {
+                static_assert(NCHAN <= 2);
                 for (int i = 0; i < NCHAN; ++i) {
                     *out++ += MixMul<TO, TI, TV>(*in++, vol[i]);
                 }
-            } else if constexpr (MIXTYPE == MIXTYPE_MONOEXPAND) {
-                for (int i = 0; i < NCHAN; ++i) {
-                    *out++ += MixMul<TO, TI, TV>(*in, vol[i]);
-                }
-                in++;
             } else if constexpr (MIXTYPE == MIXTYPE_MULTI_SAVEONLY) {
+                static_assert(NCHAN <= 2);
                 for (int i = 0; i < NCHAN; ++i) {
                     *out++ = MixMul<TO, TI, TV>(*in++, vol[i]);
                 }
@@ -513,10 +511,12 @@
                 }
             } else if constexpr (MIXTYPE == MIXTYPE_MULTI_STEREOVOL
                     || MIXTYPE == MIXTYPE_MULTI_SAVEONLY_STEREOVOL
+                    || MIXTYPE == MIXTYPE_MONOEXPAND
                     || MIXTYPE == MIXTYPE_STEREOEXPAND) {
                 stereoVolumeHelper<MIXTYPE, NCHAN>(out, in, vol, [] (auto &a, const auto &b) {
                     return MixMul<TO, TI, TV>(a, b);
                 });
+                if constexpr (MIXTYPE == MIXTYPE_MONOEXPAND) in += 1;
                 if constexpr (MIXTYPE == MIXTYPE_STEREOEXPAND) in += 2;
             } else /* constexpr */ {
                 static_assert(dependent_false<MIXTYPE>, "invalid mixtype");
diff --git a/media/libaudioprocessing/include/media/AudioMixer.h b/media/libaudioprocessing/include/media/AudioMixer.h
index 3f7cd48..70eafe3 100644
--- a/media/libaudioprocessing/include/media/AudioMixer.h
+++ b/media/libaudioprocessing/include/media/AudioMixer.h
@@ -22,10 +22,10 @@
 #include <stdint.h>
 #include <sys/types.h>
 
-#include <android/os/IExternalVibratorService.h>
 #include <media/AudioMixerBase.h>
 #include <media/BufferProviders.h>
 #include <utils/threads.h>
+#include <vibrator/ExternalVibrationUtils.h>
 
 // FIXME This is actually unity gain, which might not be max in future, expressed in U.12
 #define MAX_GAIN_INT AudioMixerBase::UNITY_GAIN_INT
@@ -55,32 +55,6 @@
                                   // parameter 'value' is a pointer to the new playback rate.
     };
 
-    typedef enum { // Haptic intensity, should keep consistent with VibratorService
-        HAPTIC_SCALE_MUTE = os::IExternalVibratorService::SCALE_MUTE,
-        HAPTIC_SCALE_VERY_LOW = os::IExternalVibratorService::SCALE_VERY_LOW,
-        HAPTIC_SCALE_LOW = os::IExternalVibratorService::SCALE_LOW,
-        HAPTIC_SCALE_NONE = os::IExternalVibratorService::SCALE_NONE,
-        HAPTIC_SCALE_HIGH = os::IExternalVibratorService::SCALE_HIGH,
-        HAPTIC_SCALE_VERY_HIGH = os::IExternalVibratorService::SCALE_VERY_HIGH,
-    } haptic_intensity_t;
-    static constexpr float HAPTIC_SCALE_VERY_LOW_RATIO = 2.0f / 3.0f;
-    static constexpr float HAPTIC_SCALE_LOW_RATIO = 3.0f / 4.0f;
-    static const constexpr float HAPTIC_MAX_AMPLITUDE_FLOAT = 1.0f;
-
-    static inline bool isValidHapticIntensity(haptic_intensity_t hapticIntensity) {
-        switch (hapticIntensity) {
-        case HAPTIC_SCALE_MUTE:
-        case HAPTIC_SCALE_VERY_LOW:
-        case HAPTIC_SCALE_LOW:
-        case HAPTIC_SCALE_NONE:
-        case HAPTIC_SCALE_HIGH:
-        case HAPTIC_SCALE_VERY_HIGH:
-            return true;
-        default:
-            return false;
-        }
-    }
-
     AudioMixer(size_t frameCount, uint32_t sampleRate)
             : AudioMixerBase(frameCount, sampleRate) {
         pthread_once(&sOnceControl, &sInitRoutine);
@@ -170,7 +144,7 @@
 
         // Haptic
         bool                 mHapticPlaybackEnabled;
-        haptic_intensity_t   mHapticIntensity;
+        os::HapticScale      mHapticIntensity;
         audio_channel_mask_t mHapticChannelMask;
         uint32_t             mHapticChannelCount;
         audio_channel_mask_t mMixerHapticChannelMask;
@@ -180,38 +154,6 @@
         uint32_t             mAdjustNonDestructiveInChannelCount;
         uint32_t             mAdjustNonDestructiveOutChannelCount;
         bool                 mKeepContractedChannels;
-
-        float getHapticScaleGamma() const {
-        // Need to keep consistent with the value in VibratorService.
-        switch (mHapticIntensity) {
-        case HAPTIC_SCALE_VERY_LOW:
-            return 2.0f;
-        case HAPTIC_SCALE_LOW:
-            return 1.5f;
-        case HAPTIC_SCALE_HIGH:
-            return 0.5f;
-        case HAPTIC_SCALE_VERY_HIGH:
-            return 0.25f;
-        default:
-            return 1.0f;
-        }
-        }
-
-        float getHapticMaxAmplitudeRatio() const {
-        // Need to keep consistent with the value in VibratorService.
-        switch (mHapticIntensity) {
-        case HAPTIC_SCALE_VERY_LOW:
-            return HAPTIC_SCALE_VERY_LOW_RATIO;
-        case HAPTIC_SCALE_LOW:
-            return HAPTIC_SCALE_LOW_RATIO;
-        case HAPTIC_SCALE_NONE:
-        case HAPTIC_SCALE_HIGH:
-        case HAPTIC_SCALE_VERY_HIGH:
-            return 1.0f;
-        default:
-            return 0.0f;
-        }
-        }
     };
 
     inline std::shared_ptr<Track> getTrack(int name) {
diff --git a/media/libaudioprocessing/tests/mixerops_benchmark.cpp b/media/libaudioprocessing/tests/mixerops_benchmark.cpp
index 86f5429..7a4c5c7 100644
--- a/media/libaudioprocessing/tests/mixerops_benchmark.cpp
+++ b/media/libaudioprocessing/tests/mixerops_benchmark.cpp
@@ -74,28 +74,32 @@
     }
 }
 
-BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI, 2);
-BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY, 2);
+// MULTI mode and MULTI_SAVEONLY mode are not used by AudioMixer for channels > 2,
+// which is ensured by a static_assert (won't compile for those configurations).
+// So we benchmark MIXTYPE_MULTI_MONOVOL and MIXTYPE_MULTI_SAVEONLY_MONOVOL compared
+// with MIXTYPE_MULTI_STEREOVOL and MIXTYPE_MULTI_SAVEONLY_STEREOVOL.
+BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_MONOVOL, 2);
+BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY_MONOVOL, 2);
 BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_STEREOVOL, 2);
 BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY_STEREOVOL, 2);
 
-BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI, 4);
-BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY, 4);
+BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_MONOVOL, 4);
+BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY_MONOVOL, 4);
 BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_STEREOVOL, 4);
 BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY_STEREOVOL, 4);
 
-BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI, 5);
-BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY, 5);
+BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_MONOVOL, 5);
+BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY_MONOVOL, 5);
 BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_STEREOVOL, 5);
 BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY_STEREOVOL, 5);
 
-BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI, 8);
-BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY, 8);
+BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_MONOVOL, 8);
+BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY_MONOVOL, 8);
 BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_STEREOVOL, 8);
 BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY_STEREOVOL, 8);
 
-BENCHMARK_TEMPLATE(BM_VolumeMulti, MIXTYPE_MULTI, 8);
-BENCHMARK_TEMPLATE(BM_VolumeMulti, MIXTYPE_MULTI_SAVEONLY, 8);
+BENCHMARK_TEMPLATE(BM_VolumeMulti, MIXTYPE_MULTI_MONOVOL, 8);
+BENCHMARK_TEMPLATE(BM_VolumeMulti, MIXTYPE_MULTI_SAVEONLY_MONOVOL, 8);
 BENCHMARK_TEMPLATE(BM_VolumeMulti, MIXTYPE_MULTI_STEREOVOL, 8);
 BENCHMARK_TEMPLATE(BM_VolumeMulti, MIXTYPE_MULTI_SAVEONLY_STEREOVOL, 8);
 
diff --git a/media/libeffects/data/audio_effects.xml b/media/libeffects/data/audio_effects.xml
index 2e5f529..93a2181 100644
--- a/media/libeffects/data/audio_effects.xml
+++ b/media/libeffects/data/audio_effects.xml
@@ -21,6 +21,7 @@
         <library name="downmix" path="libdownmix.so"/>
         <library name="loudness_enhancer" path="libldnhncr.so"/>
         <library name="dynamics_processing" path="libdynproc.so"/>
+        <library name="haptic_generator" path="libhapticgenerator.so"/>
     </libraries>
 
     <!-- list of effects to load.
@@ -58,6 +59,7 @@
         <effect name="downmix" library="downmix" uuid="93f04452-e4fe-41cc-91f9-e475b6d1d69f"/>
         <effect name="loudness_enhancer" library="loudness_enhancer" uuid="fa415329-2034-4bea-b5dc-5b381c8d1e2c"/>
         <effect name="dynamics_processing" library="dynamics_processing" uuid="e0e6539b-1781-7261-676f-6d7573696340"/>
+        <effect name="haptic_generator" library="haptic_generator" uuid="97c4acd1-8b82-4f2f-832e-c2fe5d7a9931"/>
     </effects>
 
     <!-- Audio pre processor configurations.
diff --git a/media/libeffects/hapticgenerator/Android.bp b/media/libeffects/hapticgenerator/Android.bp
new file mode 100644
index 0000000..f947339
--- /dev/null
+++ b/media/libeffects/hapticgenerator/Android.bp
@@ -0,0 +1,51 @@
+// Copyright (C) 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// HapticGenerator library
+cc_library_shared {
+    name: "libhapticgenerator",
+
+    vendor: true,
+
+    srcs: [
+        "EffectHapticGenerator.cpp",
+        "Processors.cpp",
+    ],
+
+    cflags: [
+        "-O2", // Turning on the optimization in order to reduce effect processing time.
+               // The latency is around 1/5 less than without the optimization.
+        "-Wall",
+        "-Werror",
+        "-ffast-math", // This is needed for the non-zero coefficients optimization for
+                       // BiquadFilter. Try the biquad_filter_benchmark test in audio_utils
+                       // with/without `-ffast-math` for more context.
+        "-fvisibility=hidden",
+    ],
+
+    shared_libs: [
+        "libaudioutils",
+        "libbinder",
+        "liblog",
+        "libutils",
+        "libvibrator",
+    ],
+
+    relative_install_path: "soundfx",
+
+    header_libs: [
+        "libaudioeffects",
+    ],
+}
+
diff --git a/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp b/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
new file mode 100644
index 0000000..311e5ba
--- /dev/null
+++ b/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
@@ -0,0 +1,519 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "EffectHG"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include "EffectHapticGenerator.h"
+
+#include <algorithm>
+#include <memory>
+#include <utility>
+
+#include <errno.h>
+#include <inttypes.h>
+
+#include <audio_effects/effect_hapticgenerator.h>
+#include <audio_utils/format.h>
+#include <system/audio.h>
+
+// This is the only symbol that needs to be exported
+__attribute__ ((visibility ("default")))
+audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
+        .tag = AUDIO_EFFECT_LIBRARY_TAG,
+        .version = EFFECT_LIBRARY_API_VERSION,
+        .name = "HapticGenerator Library",
+        .implementor = "The Android Open Source Project",
+        .create_effect = android::audio_effect::haptic_generator::HapticGeneratorLib_Create,
+        .release_effect = android::audio_effect::haptic_generator::HapticGeneratorLib_Release,
+        .get_descriptor = android::audio_effect::haptic_generator::HapticGeneratorLib_GetDescriptor,
+};
+
+namespace android::audio_effect::haptic_generator {
+
+// effect_handle_t interface implementation for haptic generator effect
+const struct effect_interface_s gHapticGeneratorInterface = {
+        HapticGenerator_Process,
+        HapticGenerator_Command,
+        HapticGenerator_GetDescriptor,
+        nullptr /* no process_reverse function, no reference stream needed */
+};
+
+//-----------------------------------------------------------------------------
+// Effect Descriptor
+//-----------------------------------------------------------------------------
+
+// UUIDs for effect types have been generated from http://www.itu.int/ITU-T/asn1/uuid.html
+// Haptic Generator
+static const effect_descriptor_t gHgDescriptor = {
+        FX_IID_HAPTICGENERATOR_, // type
+        {0x97c4acd1, 0x8b82, 0x4f2f, 0x832e, {0xc2, 0xfe, 0x5d, 0x7a, 0x99, 0x31}}, // uuid
+        EFFECT_CONTROL_API_VERSION,
+        EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_FIRST,
+        0, // FIXME what value should be reported? // cpu load
+        0, // FIXME what value should be reported? // memory usage
+        "Haptic Generator",
+        "The Android Open Source Project"
+};
+
+//-----------------------------------------------------------------------------
+// Internal functions
+//-----------------------------------------------------------------------------
+
+namespace {
+
+int HapticGenerator_Init(struct HapticGeneratorContext *context) {
+    context->itfe = &gHapticGeneratorInterface;
+
+    context->config.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
+    context->config.inputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+    context->config.inputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
+    context->config.inputCfg.samplingRate = 0;
+    context->config.inputCfg.bufferProvider.getBuffer = nullptr;
+    context->config.inputCfg.bufferProvider.releaseBuffer = nullptr;
+    context->config.inputCfg.bufferProvider.cookie = nullptr;
+    context->config.inputCfg.mask = EFFECT_CONFIG_ALL;
+    context->config.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_ACCUMULATE;
+    context->config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+    context->config.outputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
+    context->config.outputCfg.samplingRate = 0;
+    context->config.outputCfg.bufferProvider.getBuffer = nullptr;
+    context->config.outputCfg.bufferProvider.releaseBuffer = nullptr;
+    context->config.outputCfg.bufferProvider.cookie = nullptr;
+    context->config.outputCfg.mask = EFFECT_CONFIG_ALL;
+
+    memset(context->param.hapticChannelSource, 0, sizeof(context->param.hapticChannelSource));
+    context->param.hapticChannelCount = 0;
+    context->param.audioChannelCount = 0;
+    context->param.maxHapticIntensity = os::HapticScale::MUTE;
+
+    context->state = HAPTICGENERATOR_STATE_INITIALIZED;
+    return 0;
+}
+
+void addBiquadFilter(
+        std::vector<std::function<void(float *, const float *, size_t)>> &processingChain,
+        struct HapticGeneratorProcessorsRecord &processorsRecord,
+        std::shared_ptr<BiquadFilter> filter) {
+    // The process chain captures the shared pointer of the filter in lambda.
+    // The process record will keep a shared pointer to the filter so that it is possible to access
+    // the filter outside of the process chain.
+    processorsRecord.filters.push_back(filter);
+    processingChain.push_back([filter](float *out, const float *in, size_t frameCount) {
+            filter->process(out, in, frameCount);
+    });
+}
+
+/**
+ * \brief build haptic generator processing chain.
+ *
+ * \param processingChain
+ * \param processorsRecord a structure to cache all the shared pointers for processors
+ * \param sampleRate the audio sampling rate. Use a float here as it may be used to create filters
+ * \param channelCount haptic channel count
+ */
+void HapticGenerator_buildProcessingChain(
+        std::vector<std::function<void(float*, const float*, size_t)>>& processingChain,
+        struct HapticGeneratorProcessorsRecord& processorsRecord,
+        float sampleRate, size_t channelCount) {
+    float highPassCornerFrequency = 100.0f;
+    auto hpf = createHPF2(highPassCornerFrequency, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, hpf);
+    float lowPassCornerFrequency = 3000.0f;
+    auto lpf = createLPF2(lowPassCornerFrequency, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, lpf);
+
+    auto ramp = std::make_shared<Ramp>(channelCount);
+    // The process chain captures the shared pointer of the ramp in lambda. It will be the only
+    // reference to the ramp.
+    // The process record will keep a weak pointer to the ramp so that it is possible to access
+    // the ramp outside of the process chain.
+    processorsRecord.ramps.push_back(ramp);
+    processingChain.push_back([ramp](float *out, const float *in, size_t frameCount) {
+            ramp->process(out, in, frameCount);
+    });
+
+    highPassCornerFrequency = 60.0f;
+    hpf = createHPF2(highPassCornerFrequency, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, hpf);
+    lowPassCornerFrequency = 700.0f;
+    lpf = createLPF2(lowPassCornerFrequency, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, lpf);
+
+    lowPassCornerFrequency = 5.0f;
+    float normalizationPower = -0.3f;
+    // The process chain captures the shared pointer of the slow envelope in lambda. It will
+    // be the only reference to the slow envelope.
+    // The process record will keep a weak pointer to the slow envelope so that it is possible
+    // to access the slow envelope outside of the process chain.
+    auto slowEnv = std::make_shared<SlowEnvelope>(
+            lowPassCornerFrequency, sampleRate, normalizationPower, channelCount);
+    processorsRecord.slowEnvs.push_back(slowEnv);
+    processingChain.push_back([slowEnv](float *out, const float *in, size_t frameCount) {
+            slowEnv->process(out, in, frameCount);
+    });
+
+    lowPassCornerFrequency = 400.0f;
+    lpf = createLPF2(lowPassCornerFrequency, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, lpf);
+    lowPassCornerFrequency = 500.0f;
+    lpf = createLPF2(lowPassCornerFrequency, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, lpf);
+
+    auto apf = createAPF2(400.0f, 200.0f, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, apf);
+    apf = createAPF2(100.0f, 50.0f, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, apf);
+    float allPassCornerFrequency = 25.0f;
+    apf = createAPF(allPassCornerFrequency, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, apf);
+
+    float resonantFrequency = 150.0f;
+    float bandpassQ = 1.0f;
+    auto bpf = createBPF(resonantFrequency, bandpassQ, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, bpf);
+
+    float zeroQ = 8.0f;
+    float poleQ = 4.0f;
+    auto bsf = createBSF(resonantFrequency, zeroQ, poleQ, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, bsf);
+}
+
+int HapticGenerator_Configure(struct HapticGeneratorContext *context, effect_config_t *config) {
+    if (config->inputCfg.samplingRate != config->outputCfg.samplingRate ||
+        config->inputCfg.format != config->outputCfg.format ||
+        config->inputCfg.format != AUDIO_FORMAT_PCM_FLOAT ||
+        config->inputCfg.channels != config->outputCfg.channels ||
+        config->inputCfg.buffer.frameCount != config->outputCfg.buffer.frameCount) {
+        return -EINVAL;
+    }
+    if (&context->config != config) {
+        context->processingChain.clear();
+        context->processorsRecord.filters.clear();
+        context->processorsRecord.ramps.clear();
+        context->processorsRecord.slowEnvs.clear();
+        memcpy(&context->config, config, sizeof(effect_config_t));
+        context->param.audioChannelCount = audio_channel_count_from_out_mask(
+                ((audio_channel_mask_t) config->inputCfg.channels) & ~AUDIO_CHANNEL_HAPTIC_ALL);
+        context->param.hapticChannelCount = audio_channel_count_from_out_mask(
+                ((audio_channel_mask_t) config->outputCfg.channels) & AUDIO_CHANNEL_HAPTIC_ALL);
+        ALOG_ASSERT(context->param.hapticChannelCount <= 2,
+                    "haptic channel count(%zu) is too large",
+                    context->param.hapticChannelCount);
+        context->audioDataBytesPerFrame = audio_bytes_per_frame(
+                context->param.audioChannelCount, (audio_format_t) config->inputCfg.format);
+        for (size_t i = 0; i < context->param.hapticChannelCount; ++i) {
+            // By default, use the first audio channel to generate haptic channels.
+            context->param.hapticChannelSource[i] = 0;
+        }
+
+        HapticGenerator_buildProcessingChain(context->processingChain,
+                                             context->processorsRecord,
+                                             config->inputCfg.samplingRate,
+                                             context->param.hapticChannelCount);
+    }
+    return 0;
+}
+
+int HapticGenerator_Reset(struct HapticGeneratorContext *context) {
+    for (auto& filter : context->processorsRecord.filters) {
+        filter->clear();
+    }
+    for (auto& slowEnv : context->processorsRecord.slowEnvs) {
+        slowEnv->clear();
+    }
+    return 0;
+}
+
+int HapticGenerator_SetParameter(struct HapticGeneratorContext *context,
+                                 int32_t param,
+                                 uint32_t size,
+                                 void *value) {
+    switch (param) {
+    case HG_PARAM_HAPTIC_INTENSITY: {
+        if (value == nullptr || size != (uint32_t) (2 * sizeof(int))) {
+            return -EINVAL;
+        }
+        int id = *(int *) value;
+        os::HapticScale hapticIntensity = static_cast<os::HapticScale>(*((int *) value + 1));
+        if (hapticIntensity == os::HapticScale::MUTE) {
+            context->param.id2Intensity.erase(id);
+        } else {
+            context->param.id2Intensity.emplace(id, hapticIntensity);
+        }
+        context->param.maxHapticIntensity = hapticIntensity;
+        for (const auto&[id, intensity] : context->param.id2Intensity) {
+            context->param.maxHapticIntensity = std::max(
+                    context->param.maxHapticIntensity, intensity);
+        }
+        break;
+    }
+
+    default:
+        ALOGW("Unknown param: %d", param);
+        return -EINVAL;
+    }
+
+    return 0;
+}
+
+/**
+ * \brief run the processing chain to generate haptic data from audio data
+ *
+ * \param processingChain the processing chain for generating haptic data
+ * \param buf1 a buffer contains raw audio data
+ * \param buf2 a buffer that is large enough to keep all the data
+ * \param frameCount frame count of the data
+ * \return a pointer to the output buffer
+ */
+float* HapticGenerator_runProcessingChain(
+        const std::vector<std::function<void(float*, const float*, size_t)>>& processingChain,
+        float* buf1, float* buf2, size_t frameCount) {
+    float *in = buf1;
+    float *out = buf2;
+    for (const auto processingFunc : processingChain) {
+        processingFunc(out, in, frameCount);
+        std::swap(in, out);
+    }
+    return in;
+}
+
+} // namespace (anonymous)
+
+//-----------------------------------------------------------------------------
+// Effect API Implementation
+//-----------------------------------------------------------------------------
+
+/*--- Effect Library Interface Implementation ---*/
+
+int32_t HapticGeneratorLib_Create(const effect_uuid_t *uuid,
+                                  int32_t sessionId __unused,
+                                  int32_t ioId __unused,
+                                  effect_handle_t *handle) {
+    if (handle == nullptr || uuid == nullptr) {
+        return -EINVAL;
+    }
+
+    if (memcmp(uuid, &gHgDescriptor.uuid, sizeof(*uuid)) != 0) {
+        return -EINVAL;
+    }
+
+    HapticGeneratorContext *context = new HapticGeneratorContext;
+    HapticGenerator_Init(context);
+
+    *handle = (effect_handle_t) context;
+    ALOGV("%s context is %p", __func__, context);
+    return 0;
+}
+
+int32_t HapticGeneratorLib_Release(effect_handle_t handle) {
+    HapticGeneratorContext *context = (HapticGeneratorContext *) handle;
+    delete context;
+    return 0;
+}
+
+int32_t HapticGeneratorLib_GetDescriptor(const effect_uuid_t *uuid,
+                                         effect_descriptor_t *descriptor) {
+
+    if (descriptor == nullptr || uuid == nullptr) {
+        ALOGE("%s() called with NULL pointer", __func__);
+        return -EINVAL;
+    }
+
+    if (memcmp(uuid, &gHgDescriptor.uuid, sizeof(*uuid)) == 0) {
+        *descriptor = gHgDescriptor;
+        return 0;
+    }
+
+    return -EINVAL;
+}
+
+/*--- Effect Control Interface Implementation ---*/
+
+int32_t HapticGenerator_Process(effect_handle_t self,
+                                audio_buffer_t *inBuffer, audio_buffer_t *outBuffer) {
+    HapticGeneratorContext *context = (HapticGeneratorContext *) self;
+
+    if (inBuffer == nullptr || inBuffer->raw == nullptr
+            || outBuffer == nullptr || outBuffer->raw == nullptr) {
+        return 0;
+    }
+
+    // The audio data must not be modified but just written to
+    // output buffer according the access mode.
+    size_t audioBytes = context->audioDataBytesPerFrame * inBuffer->frameCount;
+    size_t audioSampleCount = inBuffer->frameCount * context->param.audioChannelCount;
+    if (inBuffer->raw != outBuffer->raw) {
+        if (context->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
+            for (size_t i = 0; i < audioSampleCount; ++i) {
+                outBuffer->f32[i] += inBuffer->f32[i];
+            }
+        } else {
+            memcpy(outBuffer->raw, inBuffer->raw, audioBytes);
+        }
+    }
+
+    if (context->state != HAPTICGENERATOR_STATE_ACTIVE) {
+        ALOGE("State(%d) is not HAPTICGENERATOR_STATE_ACTIVE when calling %s",
+                context->state, __func__);
+        return -ENODATA;
+    }
+
+    if (context->param.maxHapticIntensity == os::HapticScale::MUTE) {
+        // Haptic channels are muted, not need to generate haptic data.
+        return 0;
+    }
+
+    // Resize buffer if the haptic sample count is greater than buffer size.
+    size_t hapticSampleCount = inBuffer->frameCount * context->param.hapticChannelCount;
+    if (hapticSampleCount > context->inputBuffer.size()) {
+        // The context->inputBuffer and context->outputBuffer must have the same size,
+        // which must be at least the haptic sample count.
+        context->inputBuffer.resize(hapticSampleCount);
+        context->outputBuffer.resize(hapticSampleCount);
+    }
+
+    // Construct input buffer according to haptic channel source
+    for (size_t i = 0; i < inBuffer->frameCount; ++i) {
+        for (size_t j = 0; j < context->param.hapticChannelCount; ++j) {
+            context->inputBuffer[i * context->param.hapticChannelCount + j] =
+                    inBuffer->f32[i * context->param.audioChannelCount
+                            + context->param.hapticChannelSource[j]];
+        }
+    }
+
+    float* hapticOutBuffer = HapticGenerator_runProcessingChain(
+            context->processingChain, context->inputBuffer.data(),
+            context->outputBuffer.data(), inBuffer->frameCount);
+    os::scaleHapticData(hapticOutBuffer, hapticSampleCount, context->param.maxHapticIntensity);
+
+    // For haptic data, the haptic playback thread will copy the data from effect input buffer,
+    // which contains haptic data at the end of the buffer, directly to sink buffer.
+    // In that case, copy haptic data to input buffer instead of output buffer.
+    // Note: this may not work with rpc/binder calls
+    memcpy_by_audio_format(static_cast<char*>(inBuffer->raw) + audioBytes,
+                           static_cast<audio_format_t>(context->config.outputCfg.format),
+                           hapticOutBuffer,
+                           AUDIO_FORMAT_PCM_FLOAT,
+                           hapticSampleCount);
+
+    return 0;
+}
+
+int32_t HapticGenerator_Command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize,
+                                void *cmdData, uint32_t *replySize, void *replyData) {
+    HapticGeneratorContext *context = (HapticGeneratorContext *) self;
+
+    if (context == nullptr || context->state == HAPTICGENERATOR_STATE_UNINITIALIZED) {
+        return -EINVAL;
+    }
+
+    ALOGV("HapticGenerator_Command command %u cmdSize %u", cmdCode, cmdSize);
+
+    switch (cmdCode) {
+        case EFFECT_CMD_INIT:
+            if (replyData == nullptr || replySize == nullptr || *replySize != sizeof(int)) {
+                return -EINVAL;
+            }
+            *(int *) replyData = HapticGenerator_Init(context);
+            break;
+
+        case EFFECT_CMD_SET_CONFIG:
+            if (cmdData == nullptr || cmdSize != sizeof(effect_config_t)
+                || replyData == nullptr || replySize == nullptr || *replySize != sizeof(int)) {
+                return -EINVAL;
+            }
+            *(int *) replyData = HapticGenerator_Configure(
+                    context, (effect_config_t *) cmdData);
+            break;
+
+        case EFFECT_CMD_RESET:
+            HapticGenerator_Reset(context);
+            break;
+
+        case EFFECT_CMD_GET_PARAM:
+            ALOGV("HapticGenerator_Command EFFECT_CMD_GET_PARAM cmdData %p,"
+                  "*replySize %u, replyData: %p",
+                  cmdData, *replySize, replyData);
+            break;
+
+        case EFFECT_CMD_SET_PARAM: {
+            ALOGV("HapticGenerator_Command EFFECT_CMD_SET_PARAM cmdSize %d cmdData %p, "
+                  "*replySize %u, replyData %p", cmdSize, cmdData,
+                  replySize ? *replySize : 0, replyData);
+            if (cmdData == nullptr || (cmdSize < (int) (sizeof(effect_param_t) + sizeof(int32_t)))
+                || replyData == nullptr || replySize == nullptr ||
+                *replySize != (int) sizeof(int32_t)) {
+                return -EINVAL;
+            }
+            effect_param_t *cmd = (effect_param_t *) cmdData;
+            *(int *) replyData = HapticGenerator_SetParameter(
+                    context, *(int32_t *) cmd->data, cmd->vsize, cmd->data + sizeof(int32_t));
+        }
+            break;
+
+        case EFFECT_CMD_ENABLE:
+            if (replyData == nullptr || replySize == nullptr || *replySize != sizeof(int)) {
+                return -EINVAL;
+            }
+            if (context->state != HAPTICGENERATOR_STATE_INITIALIZED) {
+                return -ENOSYS;
+            }
+            context->state = HAPTICGENERATOR_STATE_ACTIVE;
+            ALOGV("EFFECT_CMD_ENABLE() OK");
+            *(int *) replyData = 0;
+            break;
+
+        case EFFECT_CMD_DISABLE:
+            if (replyData == nullptr || replySize == nullptr || *replySize != sizeof(int)) {
+                return -EINVAL;
+            }
+            if (context->state != HAPTICGENERATOR_STATE_ACTIVE) {
+                return -ENOSYS;
+            }
+            context->state = HAPTICGENERATOR_STATE_INITIALIZED;
+            ALOGV("EFFECT_CMD_DISABLE() OK");
+            *(int *) replyData = 0;
+            break;
+
+        case EFFECT_CMD_SET_VOLUME:
+        case EFFECT_CMD_SET_DEVICE:
+        case EFFECT_CMD_SET_AUDIO_MODE:
+            break;
+
+        default:
+            ALOGW("HapticGenerator_Command invalid command %u", cmdCode);
+            return -EINVAL;
+    }
+
+    return 0;
+}
+
+int32_t HapticGenerator_GetDescriptor(effect_handle_t self, effect_descriptor_t *descriptor) {
+    HapticGeneratorContext *context = (HapticGeneratorContext *) self;
+
+    if (context == nullptr ||
+        context->state == HAPTICGENERATOR_STATE_UNINITIALIZED) {
+        return -EINVAL;
+    }
+
+    memcpy(descriptor, &gHgDescriptor, sizeof(effect_descriptor_t));
+
+    return 0;
+}
+
+} // namespace android::audio_effect::haptic_generator
diff --git a/media/libeffects/hapticgenerator/EffectHapticGenerator.h b/media/libeffects/hapticgenerator/EffectHapticGenerator.h
new file mode 100644
index 0000000..a5688de
--- /dev/null
+++ b/media/libeffects/hapticgenerator/EffectHapticGenerator.h
@@ -0,0 +1,118 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_EFFECTHAPTICGENERATOR_H_
+#define ANDROID_EFFECTHAPTICGENERATOR_H_
+
+#include <functional>
+#include <vector>
+#include <map>
+
+#include <hardware/audio_effect.h>
+#include <system/audio_effect.h>
+#include <vibrator/ExternalVibrationUtils.h>
+
+#include "Processors.h"
+
+namespace android::audio_effect::haptic_generator {
+
+//-----------------------------------------------------------------------------
+// Definition
+//-----------------------------------------------------------------------------
+
+enum hapticgenerator_state_t {
+    HAPTICGENERATOR_STATE_UNINITIALIZED,
+    HAPTICGENERATOR_STATE_INITIALIZED,
+    HAPTICGENERATOR_STATE_ACTIVE,
+};
+
+// parameters for each haptic generator
+struct HapticGeneratorParam {
+    uint32_t hapticChannelSource[2]; // The audio channels used to generate haptic channels.
+                                     // The first channel will be used to generate HAPTIC_A,
+                                     // The second channel will be used to generate HAPTIC_B
+                                     // The value will be offset of audio channel
+    uint32_t audioChannelCount;
+    uint32_t hapticChannelCount;
+
+    // A map from track id to haptic intensity.
+    std::map<int, os::HapticScale> id2Intensity;
+    os::HapticScale maxHapticIntensity; // max intensity will be used to scale haptic data.
+};
+
+// A structure to keep all shared pointers for all processors in HapticGenerator.
+struct HapticGeneratorProcessorsRecord {
+    std::vector<std::shared_ptr<BiquadFilter>> filters;
+    std::vector<std::shared_ptr<Ramp>> ramps;
+    std::vector<std::shared_ptr<SlowEnvelope>> slowEnvs;
+};
+
+// A structure to keep all the context for HapticGenerator.
+struct HapticGeneratorContext {
+    const struct effect_interface_s *itfe;
+    effect_config_t config;
+    hapticgenerator_state_t state;
+    struct HapticGeneratorParam param;
+    size_t audioDataBytesPerFrame;
+
+    // A cache for all shared pointers of the HapticGenerator
+    struct HapticGeneratorProcessorsRecord processorsRecord;
+
+    // Using a vector of functions to record the processing chain for haptic-generating algorithm.
+    // The three parameters of the processing functions are pointer to output buffer, pointer to
+    // input buffer and frame count.
+    std::vector<std::function<void(float*, const float*, size_t)>> processingChain;
+
+    // inputBuffer is where to keep input buffer for the generating algorithm. It will be
+    // constructed according to HapticGeneratorParam.hapticChannelSource.
+    std::vector<float> inputBuffer;
+
+    // outputBuffer is a buffer having the same length as inputBuffer. It can be used as
+    // intermediate buffer in the generating algorithm.
+    std::vector<float> outputBuffer;
+};
+
+//-----------------------------------------------------------------------------
+// Effect API
+//-----------------------------------------------------------------------------
+
+int32_t HapticGeneratorLib_Create(const effect_uuid_t *uuid,
+                                  int32_t sessionId,
+                                  int32_t ioId,
+                                  effect_handle_t *handle);
+
+int32_t HapticGeneratorLib_Release(effect_handle_t handle);
+
+int32_t HapticGeneratorLib_GetDescriptor(const effect_uuid_t *uuid,
+                                         effect_descriptor_t *descriptor);
+
+int32_t HapticGenerator_Process(effect_handle_t self,
+                                audio_buffer_t *inBuffer,
+                                audio_buffer_t *outBuffer);
+
+int32_t HapticGenerator_Command(effect_handle_t self,
+                                uint32_t cmdCode,
+                                uint32_t cmdSize,
+                                void *cmdData,
+                                uint32_t *replySize,
+                                void *replyData);
+
+int32_t HapticGenerator_GetDescriptor(effect_handle_t self,
+                                      effect_descriptor_t *descriptor);
+
+} // namespace android::audio_effect::haptic_generator
+
+#endif // ANDROID_EFFECTHAPTICGENERATOR_H_
diff --git a/media/libeffects/hapticgenerator/MODULE_LICENSE_APACHE2 b/media/libeffects/hapticgenerator/MODULE_LICENSE_APACHE2
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/media/libeffects/hapticgenerator/MODULE_LICENSE_APACHE2
diff --git a/media/libeffects/hapticgenerator/Processors.cpp b/media/libeffects/hapticgenerator/Processors.cpp
new file mode 100644
index 0000000..179b5dc
--- /dev/null
+++ b/media/libeffects/hapticgenerator/Processors.cpp
@@ -0,0 +1,234 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "EffectHG_Processors"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include <assert.h>
+
+#include <cmath>
+
+#include "Processors.h"
+
+#if defined(__aarch64__) || defined(__ARM_NEON__)
+#ifndef USE_NEON
+#define USE_NEON (true)
+#endif
+#else
+#define USE_NEON (false)
+#endif
+#if USE_NEON
+#include <arm_neon.h>
+#endif
+
+namespace android::audio_effect::haptic_generator {
+
+float getRealPoleZ(float cornerFrequency, float sampleRate) {
+    // This will be a pole of a first order filter.
+    float realPoleS = -2 * M_PI * cornerFrequency;
+    return exp(realPoleS / sampleRate); // zero-pole matching
+}
+
+std::pair<float, float> getComplexPoleZ(float ringingFrequency, float q, float sampleRate) {
+    // This is the pole for 1/(s^2 + s/q + 1) in normalized frequency. The other pole is
+    // the complex conjugate of this.
+    float poleImagS = 2 * M_PI * ringingFrequency;
+    float poleRealS = -poleImagS / (2 * q);
+    float poleRadius = exp(poleRealS / sampleRate);
+    float poleImagZ = poleRadius * sin(poleImagS / sampleRate);
+    float poleRealZ = poleRadius * cos(poleImagS / sampleRate);
+    return {poleRealZ, poleImagZ};
+}
+
+// Implementation of Ramp
+
+Ramp::Ramp(size_t channelCount) : mChannelCount(channelCount) {}
+
+void Ramp::process(float *out, const float *in, size_t frameCount) {
+    size_t i = 0;
+#if USE_NEON
+    size_t sampleCount = frameCount * mChannelCount;
+    float32x2_t allZero = vdup_n_f32(0.0f);
+    while (i + 1 < sampleCount) {
+        vst1_f32(out, vmax_f32(vld1_f32(in), allZero));
+        in += 2;
+        out += 2;
+        i += 2;
+    }
+#endif // USE_NEON
+    for (; i < frameCount * mChannelCount; ++i) {
+        *out = *in >= 0.0f ? *in : 0.0f;
+        out++;
+        in++;
+    }
+}
+
+// Implementation of SlowEnvelope
+
+SlowEnvelope::SlowEnvelope(
+        float cornerFrequency,
+        float sampleRate,
+        float normalizationPower,
+        size_t channelCount)
+        : mLpf(createLPF(cornerFrequency, sampleRate, channelCount)),
+          mNormalizationPower(normalizationPower),
+          mChannelCount(channelCount),
+          mEnv(0.25 * (sampleRate / (2 * M_PI * cornerFrequency))) {}
+
+void SlowEnvelope::process(float* out, const float* in, size_t frameCount) {
+    size_t sampleCount = frameCount * mChannelCount;
+    if (sampleCount > mLpfInBuffer.size()) {
+        mLpfInBuffer.resize(sampleCount, mEnv);
+        mLpfOutBuffer.resize(sampleCount);
+    }
+    mLpf->process(mLpfOutBuffer.data(), mLpfInBuffer.data(), frameCount);
+    for (size_t i = 0; i < sampleCount; ++i) {
+        *out = *in * pow(mLpfOutBuffer[i], mNormalizationPower);
+        out++;
+        in++;
+    }
+}
+
+void SlowEnvelope::clear() {
+    mLpf->clear();
+}
+
+// Implementation of helper functions
+
+BiquadFilterCoefficients cascadeFirstOrderFilters(const BiquadFilterCoefficients &coefs1,
+                                                   const BiquadFilterCoefficients &coefs2) {
+    assert(coefs1[2] == 0.0f);
+    assert(coefs2[2] == 0.0f);
+    assert(coefs1[4] == 0.0f);
+    assert(coefs2[4] == 0.0f);
+    return {coefs1[0] * coefs2[0],
+            coefs1[0] * coefs2[1] + coefs1[1] * coefs2[0],
+            coefs1[1] * coefs2[1],
+            coefs1[3] + coefs2[3],
+            coefs1[3] * coefs2[3]};
+}
+
+BiquadFilterCoefficients lpfCoefs(const float cornerFrequency, const float sampleRate) {
+    BiquadFilterCoefficients coefficient;
+    float realPoleZ = getRealPoleZ(cornerFrequency, sampleRate);
+    // This is a zero at nyquist
+    coefficient[0] = 0.5f * (1 - realPoleZ);
+    coefficient[1] = coefficient[0];
+    coefficient[2] = 0.0f;
+    coefficient[3] = -realPoleZ; // This is traditional 1/(s+1) filter
+    coefficient[4] = 0.0f;
+    return coefficient;
+}
+
+std::shared_ptr<BiquadFilter> createLPF(const float cornerFrequency,
+                                        const float sampleRate,
+                                        const size_t channelCount) {
+    BiquadFilterCoefficients coefficient = lpfCoefs(cornerFrequency, sampleRate);
+    return std::make_shared<BiquadFilter>(channelCount, coefficient);
+}
+
+std::shared_ptr<BiquadFilter> createLPF2(const float cornerFrequency,
+                                         const float sampleRate,
+                                         const size_t channelCount) {
+    BiquadFilterCoefficients coefficient = lpfCoefs(cornerFrequency, sampleRate);
+    return std::make_shared<BiquadFilter>(
+            channelCount, cascadeFirstOrderFilters(coefficient, coefficient));
+}
+
+std::shared_ptr<BiquadFilter> createHPF2(const float cornerFrequency,
+                                         const float sampleRate,
+                                         const size_t channelCount) {
+    BiquadFilterCoefficients coefficient;
+    // Note: this is valid only when corner frequency is less than nyquist / 2.
+    float realPoleZ = getRealPoleZ(cornerFrequency, sampleRate);
+
+    // Note: this is a zero at DC
+    coefficient[0] = 0.5f * (1 + realPoleZ);
+    coefficient[1] = -coefficient[0];
+    coefficient[2] = 0.0f;
+    coefficient[3] = -realPoleZ;
+    coefficient[4] = 0.0f;
+    return std::make_shared<BiquadFilter>(
+            channelCount, cascadeFirstOrderFilters(coefficient, coefficient));
+}
+
+BiquadFilterCoefficients apfCoefs(const float cornerFrequency, const float sampleRate) {
+    BiquadFilterCoefficients coefficient;
+    float realPoleZ = getRealPoleZ(cornerFrequency, sampleRate);
+    float zeroZ = 1.0f / realPoleZ;
+    coefficient[0] = (1.0f - realPoleZ) / (1.0f - zeroZ);
+    coefficient[1] = -coefficient[0] * zeroZ;
+    coefficient[2] = 0.0f;
+    coefficient[3] = -realPoleZ;
+    coefficient[4] = 0.0f;
+    return coefficient;
+}
+
+std::shared_ptr<BiquadFilter> createAPF(const float cornerFrequency,
+                                        const float sampleRate,
+                                        const size_t channelCount) {
+    BiquadFilterCoefficients coefficient = apfCoefs(cornerFrequency, sampleRate);
+    return std::make_shared<BiquadFilter>(channelCount, coefficient);
+}
+
+std::shared_ptr<BiquadFilter> createAPF2(const float cornerFrequency1,
+                                         const float cornerFrequency2,
+                                         const float sampleRate,
+                                         const size_t channelCount) {
+    BiquadFilterCoefficients coefs1 = apfCoefs(cornerFrequency1, sampleRate);
+    BiquadFilterCoefficients coefs2 = apfCoefs(cornerFrequency2, sampleRate);
+    return std::make_shared<BiquadFilter>(
+            channelCount, cascadeFirstOrderFilters(coefs1, coefs2));
+}
+
+std::shared_ptr<BiquadFilter> createBPF(const float ringingFrequency,
+                                        const float q,
+                                        const float sampleRate,
+                                        const size_t channelCount) {
+    BiquadFilterCoefficients coefficient;
+    const auto [real, img] = getComplexPoleZ(ringingFrequency, q, sampleRate);
+    // Note: this is not a standard cookbook BPF, but a low pass filter with zero at DC
+    coefficient[0] = 1.0f;
+    coefficient[1] = -1.0f;
+    coefficient[2] = 0.0f;
+    coefficient[3] = -2 * real;
+    coefficient[4] = real * real + img * img;
+    return std::make_shared<BiquadFilter>(channelCount, coefficient);
+}
+
+std::shared_ptr<BiquadFilter> createBSF(const float ringingFrequency,
+                                        const float zq,
+                                        const float pq,
+                                        const float sampleRate,
+                                        const size_t channelCount) {
+    BiquadFilterCoefficients coefficient;
+    const auto [zeroReal, zeroImg] = getComplexPoleZ(ringingFrequency, zq, sampleRate);
+    float zeroCoeff1 = -2 * zeroReal;
+    float zeroCoeff2 = zeroReal* zeroReal + zeroImg * zeroImg;
+    const auto [poleReal, poleImg] = getComplexPoleZ(ringingFrequency, pq, sampleRate);
+    float poleCoeff1 = -2 * poleReal;
+    float poleCoeff2 = poleReal * poleReal + poleImg * poleImg;
+    const float norm = (1.0f + poleCoeff1 + poleCoeff2) / (1.0f + zeroCoeff1 + zeroCoeff2);
+    coefficient[0] = 1.0f * norm;
+    coefficient[1] = zeroCoeff1 * norm;
+    coefficient[2] = zeroCoeff2 * norm;
+    coefficient[3] = poleCoeff1;
+    coefficient[4] = poleCoeff2;
+    return std::make_shared<BiquadFilter>(channelCount, coefficient);
+}
+
+} // namespace android::audio_effect::haptic_generator
diff --git a/media/libeffects/hapticgenerator/Processors.h b/media/libeffects/hapticgenerator/Processors.h
new file mode 100644
index 0000000..e14458b
--- /dev/null
+++ b/media/libeffects/hapticgenerator/Processors.h
@@ -0,0 +1,104 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _EFFECT_HAPTIC_GENERATOR_PROCESSORS_H_
+#define _EFFECT_HAPTIC_GENERATOR_PROCESSORS_H_
+
+#include <sys/types.h>
+
+#include <memory>
+#include <vector>
+
+#include <audio_utils/BiquadFilter.h>
+
+using android::audio_utils::BiquadFilter;
+using BiquadFilterCoefficients = std::array<float, android::audio_utils::kBiquadNumCoefs>;
+
+namespace android::audio_effect::haptic_generator {
+
+// A class providing a process function that makes input data non-negative.
+class Ramp {
+public:
+    explicit Ramp(size_t channelCount);
+
+    void process(float *out, const float *in, size_t frameCount);
+
+private:
+    const size_t mChannelCount;
+};
+
+
+class SlowEnvelope {
+public:
+    SlowEnvelope(float cornerFrequency, float sampleRate,
+                 float normalizationPower, size_t channelCount);
+
+    void process(float *out, const float *in, size_t frameCount);
+
+    void clear();
+
+private:
+    const std::shared_ptr<BiquadFilter> mLpf;
+    std::vector<float> mLpfInBuffer;
+    std::vector<float> mLpfOutBuffer;
+    const float mNormalizationPower;
+    const float mChannelCount;
+    const float mEnv;
+};
+
+// Helper functions
+
+BiquadFilterCoefficients cascadeFirstOrderFilters(const BiquadFilterCoefficients &coefs1,
+                                                  const BiquadFilterCoefficients &coefs2);
+
+std::shared_ptr<BiquadFilter> createLPF(const float cornerFrequency,
+                                        const float sampleRate,
+                                        const size_t channelCount);
+
+// Create two cascaded LPF with same corner frequency.
+std::shared_ptr<BiquadFilter> createLPF2(const float cornerFrequency,
+                                         const float sampleRate,
+                                         const size_t channelCount);
+
+// Create two cascaded HPF with same corner frequency.
+std::shared_ptr<BiquadFilter> createHPF2(const float cornerFrequency,
+                                         const float sampleRate,
+                                         const size_t channelCount);
+
+std::shared_ptr<BiquadFilter> createAPF(const float cornerFrequency,
+                                        const float sampleRate,
+                                        const size_t channelCount);
+
+// Create two cascaded APF with two different corner frequency.
+std::shared_ptr<BiquadFilter> createAPF2(const float cornerFrequency1,
+                                         const float cornerFrequency2,
+                                         const float sampleRate,
+                                         const size_t channelCount);
+
+std::shared_ptr<BiquadFilter> createBPF(const float ringingFrequency,
+                                        const float q,
+                                        const float sampleRate,
+                                        const size_t channelCount);
+
+std::shared_ptr<BiquadFilter> createBSF(const float ringingFrequency,
+                                        const float zq,
+                                        const float pq,
+                                        const float sampleRate,
+                                        const size_t channelCount);
+
+} // namespace android::audio_effect::haptic_generator
+
+#endif // _EFFECT_HAPTIC_GENERATOR_PROCESSORS_H_
diff --git a/media/libeffects/visualizer/Android.bp b/media/libeffects/visualizer/Android.bp
new file mode 100644
index 0000000..f6c585e
--- /dev/null
+++ b/media/libeffects/visualizer/Android.bp
@@ -0,0 +1,32 @@
+// Visualizer library
+cc_library_shared {
+    name: "libvisualizer",
+
+    vendor: true,
+
+    srcs: [
+        "EffectVisualizer.cpp",
+    ],
+
+    cflags: [
+        "-O2",
+        "-fvisibility=hidden",
+
+        "-DBUILD_FLOAT",
+        "-DSUPPORT_MC",
+
+        "-Wall",
+        "-Werror",
+    ],
+
+    shared_libs: [
+        "liblog",
+    ],
+
+    relative_install_path: "soundfx",
+
+    header_libs: [
+        "libaudioeffects",
+        "libaudioutils_headers",
+    ],
+}
diff --git a/media/libeffects/visualizer/Android.mk b/media/libeffects/visualizer/Android.mk
deleted file mode 100644
index 35e2f3d..0000000
--- a/media/libeffects/visualizer/Android.mk
+++ /dev/null
@@ -1,28 +0,0 @@
-LOCAL_PATH:= $(call my-dir)
-
-# Visualizer library
-include $(CLEAR_VARS)
-
-LOCAL_VENDOR_MODULE := true
-LOCAL_SRC_FILES:= \
-	EffectVisualizer.cpp
-
-LOCAL_CFLAGS+= -O2 -fvisibility=hidden
-LOCAL_CFLAGS += -Wall -Werror
-LOCAL_CFLAGS += -DBUILD_FLOAT -DSUPPORT_MC
-
-LOCAL_SHARED_LIBRARIES := \
-	libcutils \
-	liblog \
-	libdl
-
-LOCAL_MODULE_RELATIVE_PATH := soundfx
-LOCAL_MODULE:= libvisualizer
-
-LOCAL_C_INCLUDES := \
-	$(call include-path-for, audio-effects) \
-	$(call include-path-for, audio-utils)
-
-
-LOCAL_HEADER_LIBRARIES += libhardware_headers
-include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp
index 20bc23d..c08d187 100644
--- a/media/libmedia/IMediaPlayer.cpp
+++ b/media/libmedia/IMediaPlayer.cpp
@@ -40,6 +40,7 @@
     SET_DATA_SOURCE_FD,
     SET_DATA_SOURCE_STREAM,
     SET_DATA_SOURCE_CALLBACK,
+    SET_DATA_SOURCE_RTP,
     SET_BUFFERING_SETTINGS,
     GET_BUFFERING_SETTINGS,
     PREPARE_ASYNC,
@@ -161,6 +162,15 @@
         return reply.readInt32();
     }
 
+    status_t setDataSource(const String8& rtpParams) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
+        data.writeString8(rtpParams);
+        remote()->transact(SET_DATA_SOURCE_RTP, data, &reply);
+
+        return reply.readInt32();
+    }
+
     // pass the buffered IGraphicBufferProducer to the media player service
     status_t setVideoSurfaceTexture(const sp<IGraphicBufferProducer>& bufferProducer)
     {
@@ -685,6 +695,12 @@
             }
             return NO_ERROR;
         }
+        case SET_DATA_SOURCE_RTP: {
+            CHECK_INTERFACE(IMediaPlayer, data, reply);
+            String8 rtpParams = data.readString8();
+            reply->writeInt32(setDataSource(rtpParams));
+            return NO_ERROR;
+        }
         case SET_VIDEO_SURFACETEXTURE: {
             CHECK_INTERFACE(IMediaPlayer, data, reply);
             sp<IGraphicBufferProducer> bufferProducer =
diff --git a/media/libmedia/include/media/IMediaPlayer.h b/media/libmedia/include/media/IMediaPlayer.h
index a4c0ec6..3548a1e 100644
--- a/media/libmedia/include/media/IMediaPlayer.h
+++ b/media/libmedia/include/media/IMediaPlayer.h
@@ -59,6 +59,7 @@
     virtual status_t        setDataSource(int fd, int64_t offset, int64_t length) = 0;
     virtual status_t        setDataSource(const sp<IStreamSource>& source) = 0;
     virtual status_t        setDataSource(const sp<IDataSource>& source) = 0;
+    virtual status_t        setDataSource(const String8& rtpParams) = 0;
     virtual status_t        setVideoSurfaceTexture(
                                     const sp<IGraphicBufferProducer>& bufferProducer) = 0;
     virtual status_t        getBufferingSettings(
diff --git a/media/libmedia/include/media/mediaplayer.h b/media/libmedia/include/media/mediaplayer.h
index 2335c5a..d0a8e38 100644
--- a/media/libmedia/include/media/mediaplayer.h
+++ b/media/libmedia/include/media/mediaplayer.h
@@ -60,6 +60,7 @@
     MEDIA_META_DATA         = 202,
     MEDIA_DRM_INFO          = 210,
     MEDIA_TIME_DISCONTINUITY = 211,
+    MEDIA_IMS_RX_NOTICE     = 300,
     MEDIA_AUDIO_ROUTING_CHANGED = 10000,
 };
 
@@ -217,6 +218,7 @@
 
             status_t        setDataSource(int fd, int64_t offset, int64_t length);
             status_t        setDataSource(const sp<IDataSource> &source);
+            status_t        setDataSource(const String8& rtpParams);
             status_t        setVideoSurfaceTexture(
                                     const sp<IGraphicBufferProducer>& bufferProducer);
             status_t        setListener(const sp<MediaPlayerListener>& listener);
diff --git a/media/libmedia/include/media/mediarecorder.h b/media/libmedia/include/media/mediarecorder.h
index 6e2d94d..fbcdb28 100644
--- a/media/libmedia/include/media/mediarecorder.h
+++ b/media/libmedia/include/media/mediarecorder.h
@@ -291,6 +291,8 @@
     bool                        mIsOutputFileSet;
     Mutex                       mLock;
     Mutex                       mNotifyLock;
+
+    output_format               mOutputFormat;
 };
 
 };  // namespace android
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 1fadc94..1b89fc7 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -195,6 +195,22 @@
     return err;
 }
 
+status_t MediaPlayer::setDataSource(const String8& rtpParams)
+{
+    ALOGV("setDataSource(rtpParams)");
+    status_t err = UNKNOWN_ERROR;
+    const sp<IMediaPlayerService> service(getMediaPlayerService());
+    if (service != 0) {
+        sp<IMediaPlayer> player(service->create(this, mAudioSessionId));
+        if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
+            (NO_ERROR != player->setDataSource(rtpParams))) {
+            player.clear();
+        }
+        err = attachNewPlayer(player);
+    }
+    return err;
+}
+
 status_t MediaPlayer::invoke(const Parcel& request, Parcel *reply)
 {
     Mutex::Autolock _l(mLock);
@@ -943,6 +959,9 @@
     case MEDIA_META_DATA:
         ALOGV("Received timed metadata message");
         break;
+    case MEDIA_IMS_RX_NOTICE:
+        ALOGV("Received IMS Rx notice message");
+        break;
     default:
         ALOGV("unrecognized message: (%d, %d, %d)", msg, ext1, ext2);
         break;
diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp
index 70655d5..d9d1f25 100644
--- a/media/libmedia/mediarecorder.cpp
+++ b/media/libmedia/mediarecorder.cpp
@@ -244,6 +244,7 @@
         mCurrentState = MEDIA_RECORDER_ERROR;
         return ret;
     }
+    mOutputFormat = (output_format)of;
     mCurrentState = MEDIA_RECORDER_DATASOURCE_CONFIGURED;
     return ret;
 }
@@ -479,6 +480,13 @@
                            (MEDIA_RECORDER_PREPARED |
                             MEDIA_RECORDER_RECORDING |
                             MEDIA_RECORDER_ERROR));
+
+    // For RTP video, parameter should be set dynamically.
+    if (isInvalidState) {
+        if (mCurrentState == MEDIA_RECORDER_RECORDING &&
+            mOutputFormat == OUTPUT_FORMAT_RTP_AVP)
+            isInvalidState = false;
+    }
     if (isInvalidState) {
         ALOGE("setParameters is called in an invalid state: %d", mCurrentState);
         return INVALID_OPERATION;
@@ -737,6 +745,7 @@
     mIsAudioEncoderSet = false;
     mIsVideoEncoderSet = false;
     mIsOutputFileSet   = false;
+    mOutputFormat      = OUTPUT_FORMAT_DEFAULT;
 }
 
 // Release should be OK in any state
diff --git a/media/libmediaplayerservice/Android.bp b/media/libmediaplayerservice/Android.bp
index 5301f5c..324f4ae 100644
--- a/media/libmediaplayerservice/Android.bp
+++ b/media/libmediaplayerservice/Android.bp
@@ -16,6 +16,7 @@
         "android.hardware.media.c2@1.0",
         "android.hardware.media.omx@1.0",
         "libbase",
+        "libandroid_net",
         "libaudioclient",
         "libbinder",
         "libcamera_client",
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index c0da0ce..555f459 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -1062,6 +1062,17 @@
     return mStatus = setDataSource_post(p, p->setDataSource(dataSource));
 }
 
+status_t MediaPlayerService::Client::setDataSource(
+        const String8& rtpParams) {
+    player_type playerType = NU_PLAYER;
+    sp<MediaPlayerBase> p = setDataSource_pre(playerType);
+    if (p == NULL) {
+        return NO_INIT;
+    }
+    // now set data source
+    return mStatus = setDataSource_post(p, p->setDataSource(rtpParams));
+}
+
 void MediaPlayerService::Client::disconnectNativeWindow_l() {
     if (mConnectedWindow != NULL) {
         status_t err = nativeWindowDisconnect(
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 6431ca1..3d596a5 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -368,6 +368,7 @@
 
         virtual status_t        setDataSource(const sp<IStreamSource> &source);
         virtual status_t        setDataSource(const sp<IDataSource> &source);
+        virtual status_t        setDataSource(const String8& rtpParams);
 
 
         sp<MediaPlayerBase>     setDataSource_pre(player_type playerType);
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 7897959..7e5fe56 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -44,6 +44,7 @@
 #include <media/stagefright/CameraSourceTimeLapse.h>
 #include <media/stagefright/MPEG2TSWriter.h>
 #include <media/stagefright/MPEG4Writer.h>
+#include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/MediaCodecSource.h>
@@ -117,6 +118,11 @@
       mAudioSource((audio_source_t)AUDIO_SOURCE_CNT), // initialize with invalid value
       mPrivacySensitive(PRIVACY_SENSITIVE_DEFAULT),
       mVideoSource(VIDEO_SOURCE_LIST_END),
+      mRTPCVOExtMap(-1),
+      mRTPCVODegrees(0),
+      mRTPSockDscp(0),
+      mRTPSockNetwork(0),
+      mLastSeqNo(0),
       mStarted(false),
       mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
       mDeviceCallbackEnabled(false),
@@ -567,6 +573,30 @@
     // range that a specific encoder supports. The mismatch between the
     // the target and requested bit rate will NOT be treated as an error.
     mVideoBitRate = bitRate;
+
+    // A new bitrate(TMMBR) should be applied on runtime as well if OutputFormat is RTP_AVP
+    if (mOutputFormat == OUTPUT_FORMAT_RTP_AVP && mStarted && mPauseStartTimeUs == 0) {
+        // Regular I frames may overload the network so we reduce the bitrate to allow
+        // margins for the I frame overruns.
+        // Still send requested bitrate (TMMBR) in the reply (TMMBN).
+        const float coefficient = 0.8f;
+        mVideoBitRate = (bitRate * coefficient) / 1000 * 1000;
+        mVideoEncoderSource->setEncodingBitrate(mVideoBitRate);
+        ARTPWriter* rtpWriter  = static_cast<ARTPWriter*>(mWriter.get());
+        rtpWriter->setTMMBNInfo(mOpponentID, bitRate);
+    }
+
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamVideoBitRateMode(int32_t bitRateMode) {
+    ALOGV("setParamVideoBitRateMode: %d", bitRateMode);
+    // TODO: clarify what bitrate mode of -1 is as these start from 0
+    if (bitRateMode < -1) {
+        ALOGE("Unsupported video bitrate mode: %d", bitRateMode);
+        return BAD_VALUE;
+    }
+    mVideoBitRateMode = bitRateMode;
     return OK;
 }
 
@@ -776,6 +806,105 @@
     return OK;
 }
 
+status_t StagefrightRecorder::setParamRtpLocalIp(const String8 &localIp) {
+    ALOGV("setParamVideoLocalIp: %s", localIp.string());
+
+    mLocalIp.setTo(localIp.string());
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamRtpLocalPort(int32_t localPort) {
+    ALOGV("setParamVideoLocalPort: %d", localPort);
+
+    mLocalPort = localPort;
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamRtpRemoteIp(const String8 &remoteIp) {
+    ALOGV("setParamVideoRemoteIp: %s", remoteIp.string());
+
+    mRemoteIp.setTo(remoteIp.string());
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamRtpRemotePort(int32_t remotePort) {
+    ALOGV("setParamVideoRemotePort: %d", remotePort);
+
+    mRemotePort = remotePort;
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamSelfID(int32_t selfID) {
+    ALOGV("setParamSelfID: %x", selfID);
+
+    mSelfID = selfID;
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamVideoOpponentID(int32_t opponentID) {
+    mOpponentID = opponentID;
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamPayloadType(int32_t payloadType) {
+    ALOGV("setParamPayloadType: %d", payloadType);
+
+    mPayloadType = payloadType;
+
+    if (mStarted && mOutputFormat == OUTPUT_FORMAT_RTP_AVP) {
+        mWriter->updatePayloadType(mPayloadType);
+    }
+
+    return OK;
+}
+
+status_t StagefrightRecorder::setRTPCVOExtMap(int32_t extmap) {
+    ALOGV("setRtpCvoExtMap: %d", extmap);
+
+    mRTPCVOExtMap = extmap;
+    return OK;
+}
+
+status_t StagefrightRecorder::setRTPCVODegrees(int32_t cvoDegrees) {
+    Mutex::Autolock autolock(mLock);
+    ALOGV("setRtpCvoDegrees: %d", cvoDegrees);
+
+    mRTPCVODegrees = cvoDegrees;
+
+    if (mStarted && mOutputFormat == OUTPUT_FORMAT_RTP_AVP) {
+        mWriter->updateCVODegrees(mRTPCVODegrees);
+    }
+
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamRtpDscp(int32_t dscp) {
+    ALOGV("setParamRtpDscp: %d", dscp);
+
+    mRTPSockDscp = dscp;
+    return OK;
+}
+
+status_t StagefrightRecorder::setSocketNetwork(int64_t networkHandle) {
+    ALOGV("setSocketNetwork: %llu", (unsigned long long) networkHandle);
+
+    mRTPSockNetwork = networkHandle;
+    if (mStarted && mOutputFormat == OUTPUT_FORMAT_RTP_AVP) {
+        mWriter->updateSocketNetwork(mRTPSockNetwork);
+    }
+    return OK;
+}
+
+status_t StagefrightRecorder::requestIDRFrame() {
+    status_t ret = BAD_VALUE;
+    if (mVideoEncoderSource != NULL) {
+        ret = mVideoEncoderSource->requestIDRFrame();
+    } else {
+        ALOGV("requestIDRFrame: Encoder not ready");
+    }
+    return ret;
+}
+
 status_t StagefrightRecorder::setParameter(
         const String8 &key, const String8 &value) {
     ALOGV("setParameter: key (%s) => value (%s)", key.string(), value.string());
@@ -844,6 +973,11 @@
         if (safe_strtoi32(value.string(), &video_bitrate)) {
             return setParamVideoEncodingBitRate(video_bitrate);
         }
+    } else if (key == "video-param-bitrate-mode") {
+        int32_t video_bitrate_mode;
+        if (safe_strtoi32(value.string(), &video_bitrate_mode)) {
+            return setParamVideoBitRateMode(video_bitrate_mode);
+        }
     } else if (key == "video-param-rotation-angle-degrees") {
         int32_t degrees;
         if (safe_strtoi32(value.string(), &degrees)) {
@@ -884,6 +1018,61 @@
         if (safe_strtod(value.string(), &fps)) {
             return setParamCaptureFps(fps);
         }
+    } else if (key == "rtp-param-local-ip") {
+        return setParamRtpLocalIp(value);
+    } else if (key == "rtp-param-local-port") {
+        int32_t localPort;
+        if (safe_strtoi32(value.string(), &localPort)) {
+            return setParamRtpLocalPort(localPort);
+        }
+    } else if (key == "rtp-param-remote-ip") {
+        return setParamRtpRemoteIp(value);
+    } else if (key == "rtp-param-remote-port") {
+        int32_t remotePort;
+        if (safe_strtoi32(value.string(), &remotePort)) {
+            return setParamRtpRemotePort(remotePort);
+        }
+    } else if (key == "rtp-param-self-id") {
+        int32_t selfID;
+        int64_t temp;
+        if (safe_strtoi64(value.string(), &temp)) {
+            selfID = static_cast<int32_t>(temp);
+            return setParamSelfID(selfID);
+        }
+    } else if (key == "rtp-param-opponent-id") {
+        int32_t opnId;
+        int64_t temp;
+        if (safe_strtoi64(value.string(), &temp)) {
+            opnId = static_cast<int32_t>(temp);
+            return setParamVideoOpponentID(opnId);
+        }
+    } else if (key == "rtp-param-payload-type") {
+        int32_t payloadType;
+        if (safe_strtoi32(value.string(), &payloadType)) {
+            return setParamPayloadType(payloadType);
+        }
+    } else if (key == "rtp-param-ext-cvo-extmap") {
+        int32_t extmap;
+        if (safe_strtoi32(value.string(), &extmap)) {
+            return setRTPCVOExtMap(extmap);
+        }
+    } else if (key == "rtp-param-ext-cvo-degrees") {
+        int32_t degrees;
+        if (safe_strtoi32(value.string(), &degrees)) {
+            return setRTPCVODegrees(degrees);
+        }
+    } else if (key == "video-param-request-i-frame") {
+        return requestIDRFrame();
+    } else if (key == "rtp-param-set-socket-dscp") {
+        int32_t dscp;
+        if (safe_strtoi32(value.string(), &dscp)) {
+            return setParamRtpDscp(dscp);
+        }
+    } else if (key == "rtp-param-set-socket-network") {
+        int64_t networkHandle;
+        if (safe_strtoi64(value.string(), &networkHandle)) {
+            return setSocketNetwork(networkHandle);
+        }
     } else {
         ALOGE("setParameter: failed to find key %s", key.string());
     }
@@ -1050,6 +1239,17 @@
             sp<MetaData> meta = new MetaData;
             int64_t startTimeUs = systemTime() / 1000;
             meta->setInt64(kKeyTime, startTimeUs);
+            meta->setInt32(kKeySelfID, mSelfID);
+            meta->setInt32(kKeyPayloadType, mPayloadType);
+            meta->setInt64(kKeySocketNetwork, mRTPSockNetwork);
+            if (mRTPCVOExtMap > 0) {
+                meta->setInt32(kKeyRtpExtMap, mRTPCVOExtMap);
+                meta->setInt32(kKeyRtpCvoDegrees, mRTPCVODegrees);
+            }
+            if (mRTPSockDscp > 0) {
+                meta->setInt32(kKeyRtpDscp, mRTPSockDscp);
+            }
+
             status = mWriter->start(meta.get());
             break;
         }
@@ -1330,7 +1530,7 @@
         mVideoEncoderSource = source;
     }
 
-    mWriter = new ARTPWriter(mOutputFd);
+    mWriter = new ARTPWriter(mOutputFd, mLocalIp, mLocalPort, mRemoteIp, mRemotePort, mLastSeqNo);
     mWriter->addSource(source);
     mWriter->setListener(mListener);
 
@@ -1767,6 +1967,10 @@
         format->setInt32("stride", stride);
         format->setInt32("slice-height", sliceHeight);
         format->setInt32("color-format", colorFormat);
+        if (mOutputFormat == OUTPUT_FORMAT_RTP_AVP) {
+            // This indicates that a raw image provided to encoder needs to be rotated.
+            format->setInt32("rotation-degrees", mRotationDegrees);
+        }
     } else {
         format->setInt32("width", mVideoWidth);
         format->setInt32("height", mVideoHeight);
@@ -1785,6 +1989,7 @@
     }
 
     format->setInt32("bitrate", mVideoBitRate);
+    format->setInt32("bitrate-mode", mVideoBitRateMode);
     format->setInt32("frame-rate", mFrameRate);
     format->setInt32("i-frame-interval", mIFramesIntervalSec);
 
@@ -2130,6 +2335,7 @@
 
     if (mWriter != NULL) {
         err = mWriter->stop();
+        mLastSeqNo = mWriter->getSequenceNum();
         mWriter.clear();
     }
 
@@ -2206,6 +2412,8 @@
     mVideoHeight   = 144;
     mFrameRate     = -1;
     mVideoBitRate  = 192000;
+    // Following MediaCodec's default
+    mVideoBitRateMode = BITRATE_MODE_VBR;
     mSampleRate    = 8000;
     mAudioChannels = 1;
     mAudioBitRate  = 12200;
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index a725bee..0362edd 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -119,6 +119,7 @@
     int32_t mVideoWidth, mVideoHeight;
     int32_t mFrameRate;
     int32_t mVideoBitRate;
+    int32_t mVideoBitRateMode;
     int32_t mAudioBitRate;
     int32_t mAudioChannels;
     int32_t mSampleRate;
@@ -138,6 +139,18 @@
     int32_t mLongitudex10000;
     int32_t mStartTimeOffsetMs;
     int32_t mTotalBitRate;
+    String8 mLocalIp;
+    String8 mRemoteIp;
+    int32_t mLocalPort;
+    int32_t mRemotePort;
+    int32_t mSelfID;
+    int32_t mOpponentID;
+    int32_t mPayloadType;
+    int32_t mRTPCVOExtMap;
+    int32_t mRTPCVODegrees;
+    int32_t mRTPSockDscp;
+    int64_t mRTPSockNetwork;
+    uint32_t mLastSeqNo;
 
     int64_t mDurationRecordedUs;
     int64_t mStartedRecordingUs;
@@ -205,6 +218,7 @@
     status_t setParamCaptureFpsEnable(int32_t timeLapseEnable);
     status_t setParamCaptureFps(double fps);
     status_t setParamVideoEncodingBitRate(int32_t bitRate);
+    status_t setParamVideoBitRateMode(int32_t bitRateMode);
     status_t setParamVideoIFramesInterval(int32_t seconds);
     status_t setParamVideoEncoderProfile(int32_t profile);
     status_t setParamVideoEncoderLevel(int32_t level);
@@ -219,6 +233,18 @@
     status_t setParamMovieTimeScale(int32_t timeScale);
     status_t setParamGeoDataLongitude(int64_t longitudex10000);
     status_t setParamGeoDataLatitude(int64_t latitudex10000);
+    status_t setParamRtpLocalIp(const String8 &localIp);
+    status_t setParamRtpLocalPort(int32_t localPort);
+    status_t setParamRtpRemoteIp(const String8 &remoteIp);
+    status_t setParamRtpRemotePort(int32_t remotePort);
+    status_t setParamSelfID(int32_t selfID);
+    status_t setParamVideoOpponentID(int32_t opponentID);
+    status_t setParamPayloadType(int32_t payloadType);
+    status_t setRTPCVOExtMap(int32_t extmap);
+    status_t setRTPCVODegrees(int32_t cvoDegrees);
+    status_t setParamRtpDscp(int32_t dscp);
+    status_t setSocketNetwork(int64_t networkHandle);
+    status_t requestIDRFrame();
     void clipVideoBitRate();
     void clipVideoFrameRate();
     void clipVideoFrameWidth();
diff --git a/media/libmediaplayerservice/include/MediaPlayerInterface.h b/media/libmediaplayerservice/include/MediaPlayerInterface.h
index 436cb31..1b5cb4b 100644
--- a/media/libmediaplayerservice/include/MediaPlayerInterface.h
+++ b/media/libmediaplayerservice/include/MediaPlayerInterface.h
@@ -183,6 +183,10 @@
         return INVALID_OPERATION;
     }
 
+    virtual status_t    setDataSource(const String8& /* rtpParams */) {
+        return INVALID_OPERATION;
+    }
+
     // pass the buffered IGraphicBufferProducer to the media player service
     virtual status_t    setVideoSurfaceTexture(
                                 const sp<IGraphicBufferProducer>& bufferProducer) = 0;
diff --git a/media/libmediaplayerservice/nuplayer/AWakeLock.cpp b/media/libmediaplayerservice/nuplayer/AWakeLock.cpp
index 684ba2e..7bee002 100644
--- a/media/libmediaplayerservice/nuplayer/AWakeLock.cpp
+++ b/media/libmediaplayerservice/nuplayer/AWakeLock.cpp
@@ -52,18 +52,19 @@
             if (binder == NULL) {
                 ALOGW("could not get the power manager service");
             } else {
-                mPowerManager = interface_cast<IPowerManager>(binder);
+                mPowerManager = interface_cast<os::IPowerManager>(binder);
                 binder->linkToDeath(mDeathRecipient);
             }
         }
         if (mPowerManager != NULL) {
             sp<IBinder> binder = new BBinder();
             int64_t token = IPCThreadState::self()->clearCallingIdentity();
-            status_t status = mPowerManager->acquireWakeLock(
-                    POWERMANAGER_PARTIAL_WAKE_LOCK,
-                    binder, String16("AWakeLock"), String16("media"));
+            binder::Status status = mPowerManager->acquireWakeLock(
+                    binder, POWERMANAGER_PARTIAL_WAKE_LOCK,
+                    String16("AWakeLock"), String16("media"),
+                    {} /* workSource */, {} /* historyTag */);
             IPCThreadState::self()->restoreCallingIdentity(token);
-            if (status == NO_ERROR) {
+            if (status.isOk()) {
                 mWakeLockToken = binder;
                 mWakeLockCount++;
                 return true;
diff --git a/media/libmediaplayerservice/nuplayer/AWakeLock.h b/media/libmediaplayerservice/nuplayer/AWakeLock.h
index 323e7d7..8aa3b41 100644
--- a/media/libmediaplayerservice/nuplayer/AWakeLock.h
+++ b/media/libmediaplayerservice/nuplayer/AWakeLock.h
@@ -18,7 +18,7 @@
 #define A_WAKELOCK_H_
 
 #include <media/stagefright/foundation/ABase.h>
-#include <powermanager/IPowerManager.h>
+#include <android/os/IPowerManager.h>
 #include <utils/RefBase.h>
 
 namespace android {
@@ -37,7 +37,7 @@
     virtual ~AWakeLock();
 
 private:
-    sp<IPowerManager> mPowerManager;
+    sp<os::IPowerManager> mPowerManager;
     sp<IBinder>       mWakeLockToken;
     uint32_t          mWakeLockCount;
 
diff --git a/media/libmediaplayerservice/nuplayer/Android.bp b/media/libmediaplayerservice/nuplayer/Android.bp
index 32c97cf..f5e44c7 100644
--- a/media/libmediaplayerservice/nuplayer/Android.bp
+++ b/media/libmediaplayerservice/nuplayer/Android.bp
@@ -14,6 +14,7 @@
         "NuPlayerRenderer.cpp",
         "NuPlayerStreamListener.cpp",
         "RTSPSource.cpp",
+        "RTPSource.cpp",
         "StreamingSource.cpp",
     ],
 
@@ -30,6 +31,7 @@
         "frameworks/av/media/libstagefright/mpeg2ts",
         "frameworks/av/media/libstagefright/rtsp",
         "frameworks/av/media/libstagefright/timedtext",
+        "frameworks/native/include/android",
     ],
 
     cflags: [
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index c1c4b55..4e7daa5 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -31,6 +31,7 @@
 #include "NuPlayerDriver.h"
 #include "NuPlayerRenderer.h"
 #include "NuPlayerSource.h"
+#include "RTPSource.h"
 #include "RTSPSource.h"
 #include "StreamingSource.h"
 #include "GenericSource.h"
@@ -368,6 +369,18 @@
     return err;
 }
 
+void NuPlayer::setDataSourceAsync(const String8& rtpParams) {
+    ALOGD("setDataSourceAsync for RTP = %s", rtpParams.string());
+    sp<AMessage> msg = new AMessage(kWhatSetDataSource, this);
+
+    sp<AMessage> notify = new AMessage(kWhatSourceNotify, this);
+    sp<Source> source = new RTPSource(notify, rtpParams);
+
+    msg->setObject("source", source);
+    msg->post();
+    mDataSourceType = DATA_SOURCE_TYPE_RTP;
+}
+
 void NuPlayer::prepareAsync() {
     ALOGV("prepareAsync");
 
@@ -1915,6 +1928,11 @@
 
     format->setInt32("priority", 0 /* realtime */);
 
+    if (mDataSourceType == DATA_SOURCE_TYPE_RTP) {
+        ALOGV("instantiateDecoder: set decoder error free on stream corrupt.");
+        format->setInt32("corrupt-free", true);
+    }
+
     if (!audio) {
         AString mime;
         CHECK(format->findString("mime", &mime));
@@ -2715,6 +2733,14 @@
             break;
         }
 
+        case Source::kWhatIMSRxNotice:
+        {
+            sp<AMessage> IMSRxNotice;
+            CHECK(msg->findMessage("message", &IMSRxNotice));
+            sendIMSRxNotice(IMSRxNotice);
+            break;
+        }
+
         default:
             TRESPASS();
     }
@@ -2817,11 +2843,53 @@
     }
 }
 
+void NuPlayer::sendIMSRxNotice(const sp<AMessage> &msg) {
+    int32_t payloadType;
+
+    CHECK(msg->findInt32("payload-type", &payloadType));
+
+    Parcel in;
+    in.writeInt32(payloadType);
+
+    switch (payloadType) {
+        case NuPlayer::RTPSource::RTCP_TSFB:   // RTCP TSFB
+        case NuPlayer::RTPSource::RTCP_PSFB:   // RTCP PSFB
+        case NuPlayer::RTPSource::RTP_AUTODOWN:
+        {
+            int32_t feedbackType, id;
+            CHECK(msg->findInt32("feedback-type", &feedbackType));
+            CHECK(msg->findInt32("sender", &id));
+            in.writeInt32(feedbackType);
+            in.writeInt32(id);
+            if (payloadType == NuPlayer::RTPSource::RTCP_TSFB) {
+                int32_t bitrate;
+                CHECK(msg->findInt32("bit-rate", &bitrate));
+                in.writeInt32(bitrate);
+            }
+            break;
+        }
+        case NuPlayer::RTPSource::RTP_CVO:
+        {
+            int32_t cvo;
+            CHECK(msg->findInt32("cvo", &cvo));
+            in.writeInt32(cvo);
+            break;
+        }
+        default:
+        break;
+    }
+
+    notifyListener(MEDIA_IMS_RX_NOTICE, 0, 0, &in);
+}
+
 const char *NuPlayer::getDataSourceType() {
     switch (mDataSourceType) {
         case DATA_SOURCE_TYPE_HTTP_LIVE:
             return "HTTPLive";
 
+        case DATA_SOURCE_TYPE_RTP:
+            return "RTP";
+
         case DATA_SOURCE_TYPE_RTSP:
             return "RTSP";
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index ef4354c..0105248 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -51,6 +51,8 @@
 
     void setDataSourceAsync(const sp<DataSource> &source);
 
+    void setDataSourceAsync(const String8& rtpParams);
+
     status_t getBufferingSettings(BufferingSettings* buffering /* nonnull */);
     status_t setBufferingSettings(const BufferingSettings& buffering);
 
@@ -117,6 +119,7 @@
     struct GenericSource;
     struct HTTPLiveSource;
     struct Renderer;
+    struct RTPSource;
     struct RTSPSource;
     struct StreamingSource;
     struct Action;
@@ -257,6 +260,7 @@
     typedef enum {
         DATA_SOURCE_TYPE_NONE,
         DATA_SOURCE_TYPE_HTTP_LIVE,
+        DATA_SOURCE_TYPE_RTP,
         DATA_SOURCE_TYPE_RTSP,
         DATA_SOURCE_TYPE_GENERIC_URL,
         DATA_SOURCE_TYPE_GENERIC_FD,
@@ -334,6 +338,7 @@
     void sendSubtitleData(const sp<ABuffer> &buffer, int32_t baseIndex);
     void sendTimedMetaData(const sp<ABuffer> &buffer);
     void sendTimedTextData(const sp<ABuffer> &buffer);
+    void sendIMSRxNotice(const sp<AMessage> &msg);
 
     void writeTrackInfo(Parcel* reply, const sp<AMessage>& format) const;
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
index f734439..8628edc 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
@@ -1050,7 +1050,7 @@
         uint32_t flags = 0;
         CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
 
-        int32_t eos, csd;
+        int32_t eos, csd, cvo;
         // we do not expect SYNCFRAME for decoder
         if (buffer->meta()->findInt32("eos", &eos) && eos) {
             flags |= MediaCodec::BUFFER_FLAG_EOS;
@@ -1058,6 +1058,24 @@
             flags |= MediaCodec::BUFFER_FLAG_CODECCONFIG;
         }
 
+        if (buffer->meta()->findInt32("cvo", (int32_t*)&cvo)) {
+            ALOGV("[%s] cvo(%d) found at %lld us", mComponentName.c_str(), cvo, (long long)timeUs);
+            switch (cvo) {
+                case 0:
+                    codecBuffer->meta()->setInt32("cvo", MediaCodec::CVO_DEGREE_0);
+                    break;
+                case 1:
+                    codecBuffer->meta()->setInt32("cvo", MediaCodec::CVO_DEGREE_90);
+                    break;
+                case 2:
+                    codecBuffer->meta()->setInt32("cvo", MediaCodec::CVO_DEGREE_180);
+                    break;
+                case 3:
+                    codecBuffer->meta()->setInt32("cvo", MediaCodec::CVO_DEGREE_270);
+                    break;
+            }
+        }
+
         // Modular DRM
         MediaBufferBase *mediaBuf = NULL;
         NuPlayerDrm::CryptoInfo *cryptInfo = NULL;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index dc144b2..2d82944 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -218,6 +218,26 @@
     return mAsyncResult;
 }
 
+status_t NuPlayerDriver::setDataSource(const String8& rtpParams) {
+    ALOGV("setDataSource(%p) rtp source", this);
+    Mutex::Autolock autoLock(mLock);
+
+    if (mState != STATE_IDLE) {
+        return INVALID_OPERATION;
+    }
+
+    mState = STATE_SET_DATASOURCE_PENDING;
+
+    mPlayer->setDataSourceAsync(rtpParams);
+
+    while (mState == STATE_SET_DATASOURCE_PENDING) {
+        mCondition.wait(mLock);
+    }
+
+    return mAsyncResult;
+}
+
+
 status_t NuPlayerDriver::setVideoSurfaceTexture(
         const sp<IGraphicBufferProducer> &bufferProducer) {
     ALOGV("setVideoSurfaceTexture(%p)", this);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
index f4b1968..55a0fad 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
@@ -43,6 +43,8 @@
 
     virtual status_t setDataSource(const sp<DataSource>& dataSource);
 
+    virtual status_t setDataSource(const String8& rtpParams);
+
     virtual status_t setVideoSurfaceTexture(
             const sp<IGraphicBufferProducer> &bufferProducer);
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
index f137c52..eb39870 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
@@ -58,6 +58,7 @@
         kWhatInstantiateSecureDecoders,
         // Modular DRM
         kWhatDrmInfo,
+        kWhatIMSRxNotice,
     };
 
     // The provides message is used to notify the player about various
diff --git a/media/libmediaplayerservice/nuplayer/RTPSource.cpp b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
new file mode 100644
index 0000000..a6601cd
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
@@ -0,0 +1,774 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "RTPSource"
+#include <utils/Log.h>
+
+#include "RTPSource.h"
+
+
+
+
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaData.h>
+#include <string.h>
+
+namespace android {
+
+const int64_t kNearEOSTimeoutUs = 2000000ll; // 2 secs
+static int32_t kMaxAllowedStaleAccessUnits = 20;
+
+NuPlayer::RTPSource::RTPSource(
+        const sp<AMessage> &notify,
+        const String8& rtpParams)
+    : Source(notify),
+      mRTPParams(rtpParams),
+      mFlags(0),
+      mState(DISCONNECTED),
+      mFinalResult(OK),
+      mBuffering(false),
+      mInPreparationPhase(true),
+      mRTPConn(new ARTPConnection(ARTPConnection::kViLTEConnection)),
+      mEOSTimeoutAudio(0),
+      mEOSTimeoutVideo(0),
+      mLastCVOUpdated(-1) {
+      ALOGD("RTPSource initialized with rtpParams=%s", rtpParams.string());
+}
+
+NuPlayer::RTPSource::~RTPSource() {
+    if (mLooper != NULL) {
+        mLooper->unregisterHandler(id());
+        mLooper->unregisterHandler(mRTPConn->id());
+        mLooper->stop();
+    }
+}
+
+status_t NuPlayer::RTPSource::getBufferingSettings(
+            BufferingSettings* buffering /* nonnull */) {
+    Mutex::Autolock _l(mBufferingSettingsLock);
+    *buffering = mBufferingSettings;
+    return OK;
+}
+
+status_t NuPlayer::RTPSource::setBufferingSettings(const BufferingSettings& buffering) {
+    Mutex::Autolock _l(mBufferingSettingsLock);
+    mBufferingSettings = buffering;
+    return OK;
+}
+
+void NuPlayer::RTPSource::prepareAsync() {
+    if (mLooper == NULL) {
+        mLooper = new ALooper;
+        mLooper->setName("rtp");
+        mLooper->start();
+
+        mLooper->registerHandler(this);
+        mLooper->registerHandler(mRTPConn);
+    }
+
+    CHECK_EQ(mState, (int)DISCONNECTED);
+    mState = CONNECTING;
+
+    setParameters(mRTPParams);
+
+    TrackInfo *info = NULL;
+    unsigned i;
+    for (i = 0; i < mTracks.size(); i++) {
+        info = &mTracks.editItemAt(i);
+
+        if (info == NULL)
+            break;
+
+        AString sdp;
+        ASessionDescription::SDPStringFactory(sdp, info->mLocalIp,
+                info->mIsAudio, info->mLocalPort, info->mPayloadType, info->mAS, info->mCodecName,
+                NULL, info->mWidth, info->mHeight, info->mCVOExtMap);
+        ALOGD("RTPSource SDP =>\n%s", sdp.c_str());
+
+        sp<ASessionDescription> desc = new ASessionDescription;
+        bool isValidSdp = desc->setTo(sdp.c_str(), sdp.size());
+        ALOGV("RTPSource isValidSdp => %d", isValidSdp);
+
+        int sockRtp, sockRtcp;
+        ARTPConnection::MakeRTPSocketPair(&sockRtp, &sockRtcp, info->mLocalIp, info->mRemoteIp,
+                info->mLocalPort, info->mRemotePort, info->mSocketNetwork);
+
+        sp<AMessage> notify = new AMessage('accu', this);
+
+        ALOGV("RTPSource addStream. track-index=%d", i);
+        notify->setSize("trackIndex", i);
+        // index(i) should be started from 1. 0 is reserved for [root]
+        mRTPConn->addStream(sockRtp, sockRtcp, desc, i + 1, notify, false);
+        mRTPConn->setSelfID(info->mSelfID);
+        mRTPConn->setMinMaxBitrate(kMinVideoBitrate, info->mAS * 1000 /* kbps */);
+
+        info->mRTPSocket = sockRtp;
+        info->mRTCPSocket = sockRtcp;
+        info->mFirstSeqNumInSegment = 0;
+        info->mNewSegment = true;
+        info->mAllowedStaleAccessUnits = kMaxAllowedStaleAccessUnits;
+        info->mRTPAnchor = 0;
+        info->mNTPAnchorUs = -1;
+        info->mNormalPlayTimeRTP = 0;
+        info->mNormalPlayTimeUs = 0ll;
+
+        // index(i) should be started from 1. 0 is reserved for [root]
+        info->mPacketSource = new APacketSource(desc, i + 1);
+
+        int32_t timeScale;
+        sp<MetaData> format = getTrackFormat(i, &timeScale);
+        sp<AnotherPacketSource> source = new AnotherPacketSource(format);
+
+        if (info->mIsAudio) {
+            mAudioTrack = source;
+        } else {
+            mVideoTrack = source;
+        }
+
+        info->mSource = source;
+    }
+
+    if (mInPreparationPhase) {
+        mInPreparationPhase = false;
+        notifyPrepared();
+    }
+}
+
+void NuPlayer::RTPSource::start() {
+}
+
+void NuPlayer::RTPSource::pause() {
+    mState = PAUSED;
+}
+
+void NuPlayer::RTPSource::resume() {
+    mState = CONNECTING;
+}
+
+void NuPlayer::RTPSource::stop() {
+    if (mLooper == NULL) {
+        return;
+    }
+    sp<AMessage> msg = new AMessage(kWhatDisconnect, this);
+
+    sp<AMessage> dummy;
+    msg->postAndAwaitResponse(&dummy);
+}
+
+status_t NuPlayer::RTPSource::feedMoreTSData() {
+    Mutex::Autolock _l(mBufferingLock);
+    return mFinalResult;
+}
+
+sp<MetaData> NuPlayer::RTPSource::getFormatMeta(bool audio) {
+    sp<AnotherPacketSource> source = getSource(audio);
+
+    if (source == NULL) {
+        return NULL;
+    }
+
+    return source->getFormat();
+}
+
+bool NuPlayer::RTPSource::haveSufficientDataOnAllTracks() {
+    // We're going to buffer at least 2 secs worth data on all tracks before
+    // starting playback (both at startup and after a seek).
+
+    static const int64_t kMinDurationUs = 2000000ll;
+
+    int64_t mediaDurationUs = 0;
+    getDuration(&mediaDurationUs);
+    if ((mAudioTrack != NULL && mAudioTrack->isFinished(mediaDurationUs))
+            || (mVideoTrack != NULL && mVideoTrack->isFinished(mediaDurationUs))) {
+        return true;
+    }
+
+    status_t err;
+    int64_t durationUs;
+    if (mAudioTrack != NULL
+            && (durationUs = mAudioTrack->getBufferedDurationUs(&err))
+                    < kMinDurationUs
+            && err == OK) {
+        ALOGV("audio track doesn't have enough data yet. (%.2f secs buffered)",
+              durationUs / 1E6);
+        return false;
+    }
+
+    if (mVideoTrack != NULL
+            && (durationUs = mVideoTrack->getBufferedDurationUs(&err))
+                    < kMinDurationUs
+            && err == OK) {
+        ALOGV("video track doesn't have enough data yet. (%.2f secs buffered)",
+              durationUs / 1E6);
+        return false;
+    }
+
+    return true;
+}
+
+status_t NuPlayer::RTPSource::dequeueAccessUnit(
+        bool audio, sp<ABuffer> *accessUnit) {
+
+    sp<AnotherPacketSource> source = getSource(audio);
+
+    if (mState == PAUSED) {
+        ALOGV("-EWOULDBLOCK");
+        return -EWOULDBLOCK;
+    }
+
+    status_t finalResult;
+    if (!source->hasBufferAvailable(&finalResult)) {
+        if (finalResult == OK) {
+            int64_t mediaDurationUs = 0;
+            getDuration(&mediaDurationUs);
+            sp<AnotherPacketSource> otherSource = getSource(!audio);
+            status_t otherFinalResult;
+
+            // If other source already signaled EOS, this source should also signal EOS
+            if (otherSource != NULL &&
+                    !otherSource->hasBufferAvailable(&otherFinalResult) &&
+                    otherFinalResult == ERROR_END_OF_STREAM) {
+                source->signalEOS(ERROR_END_OF_STREAM);
+                return ERROR_END_OF_STREAM;
+            }
+
+            // If this source has detected near end, give it some time to retrieve more
+            // data before signaling EOS
+            if (source->isFinished(mediaDurationUs)) {
+                int64_t eosTimeout = audio ? mEOSTimeoutAudio : mEOSTimeoutVideo;
+                if (eosTimeout == 0) {
+                    setEOSTimeout(audio, ALooper::GetNowUs());
+                } else if ((ALooper::GetNowUs() - eosTimeout) > kNearEOSTimeoutUs) {
+                    setEOSTimeout(audio, 0);
+                    source->signalEOS(ERROR_END_OF_STREAM);
+                    return ERROR_END_OF_STREAM;
+                }
+                return -EWOULDBLOCK;
+            }
+
+            if (!(otherSource != NULL && otherSource->isFinished(mediaDurationUs))) {
+                // We should not enter buffering mode
+                // if any of the sources already have detected EOS.
+                // TODO: needs to be checked whether below line is needed or not.
+                // startBufferingIfNecessary();
+            }
+
+            return -EWOULDBLOCK;
+        }
+        return finalResult;
+    }
+
+    setEOSTimeout(audio, 0);
+
+    finalResult = source->dequeueAccessUnit(accessUnit);
+    if (finalResult != OK) {
+        return finalResult;
+    }
+
+    int32_t cvo;
+    if ((*accessUnit) != NULL && (*accessUnit)->meta()->findInt32("cvo", &cvo)) {
+        if (cvo != mLastCVOUpdated) {
+            sp<AMessage> msg = new AMessage();
+            msg->setInt32("payload-type", NuPlayer::RTPSource::RTP_CVO);
+            msg->setInt32("cvo", cvo);
+
+            sp<AMessage> notify = dupNotify();
+            notify->setInt32("what", kWhatIMSRxNotice);
+            notify->setMessage("message", msg);
+            notify->post();
+
+            ALOGV("notify cvo updated (%d)->(%d) to upper layer", mLastCVOUpdated, cvo);
+            mLastCVOUpdated = cvo;
+        }
+    }
+
+    return finalResult;
+}
+
+sp<AnotherPacketSource> NuPlayer::RTPSource::getSource(bool audio) {
+    return audio ? mAudioTrack : mVideoTrack;
+}
+
+void NuPlayer::RTPSource::setEOSTimeout(bool audio, int64_t timeout) {
+    if (audio) {
+        mEOSTimeoutAudio = timeout;
+    } else {
+        mEOSTimeoutVideo = timeout;
+    }
+}
+
+status_t NuPlayer::RTPSource::getDuration(int64_t *durationUs) {
+    *durationUs = 0ll;
+
+    int64_t audioDurationUs;
+    if (mAudioTrack != NULL
+            && mAudioTrack->getFormat()->findInt64(
+                kKeyDuration, &audioDurationUs)
+            && audioDurationUs > *durationUs) {
+        *durationUs = audioDurationUs;
+    }
+
+    int64_t videoDurationUs;
+    if (mVideoTrack != NULL
+            && mVideoTrack->getFormat()->findInt64(
+                kKeyDuration, &videoDurationUs)
+            && videoDurationUs > *durationUs) {
+        *durationUs = videoDurationUs;
+    }
+
+    return OK;
+}
+
+status_t NuPlayer::RTPSource::seekTo(int64_t seekTimeUs, MediaPlayerSeekMode mode) {
+    ALOGV("RTPSource::seekTo=%d, mode=%d", (int)seekTimeUs, mode);
+    return OK;
+}
+
+void NuPlayer::RTPSource::schedulePollBuffering() {
+    sp<AMessage> msg = new AMessage(kWhatPollBuffering, this);
+    msg->post(kBufferingPollIntervalUs); // 1 second intervals
+}
+
+void NuPlayer::RTPSource::onPollBuffering() {
+    schedulePollBuffering();
+}
+
+void NuPlayer::RTPSource::onMessageReceived(const sp<AMessage> &msg) {
+    ALOGV("onMessageReceived =%d", msg->what());
+
+    switch (msg->what()) {
+        case kWhatAccessUnitComplete:
+        {
+            if (mState == CONNECTING) {
+                mState = CONNECTED;
+            }
+
+            int32_t timeUpdate;
+            //"time-update" raised from ARTPConnection::parseSR()
+            if (msg->findInt32("time-update", &timeUpdate) && timeUpdate) {
+                size_t trackIndex;
+                CHECK(msg->findSize("trackIndex", &trackIndex));
+
+                uint32_t rtpTime;
+                uint64_t ntpTime;
+                CHECK(msg->findInt32("rtp-time", (int32_t *)&rtpTime));
+                CHECK(msg->findInt64("ntp-time", (int64_t *)&ntpTime));
+
+                onTimeUpdate(trackIndex, rtpTime, ntpTime);
+                break;
+            }
+
+            int32_t firstRTCP;
+            if (msg->findInt32("first-rtcp", &firstRTCP)) {
+                // There won't be an access unit here, it's just a notification
+                // that the data communication worked since we got the first
+                // rtcp packet.
+                ALOGV("first-rtcp");
+                break;
+            }
+
+            int32_t IMSRxNotice;
+            if (msg->findInt32("rtcp-event", &IMSRxNotice)) {
+                int32_t payloadType, feedbackType;
+                CHECK(msg->findInt32("payload-type", &payloadType));
+                CHECK(msg->findInt32("feedback-type", &feedbackType));
+
+                sp<AMessage> notify = dupNotify();
+                notify->setInt32("what", kWhatIMSRxNotice);
+                notify->setMessage("message", msg);
+                notify->post();
+
+                ALOGV("IMSRxNotice \t\t payload : %d feedback : %d",
+                      payloadType, feedbackType);
+                break;
+            }
+
+            size_t trackIndex;
+            CHECK(msg->findSize("trackIndex", &trackIndex));
+
+            sp<ABuffer> accessUnit;
+            if (msg->findBuffer("access-unit", &accessUnit) == false) {
+                break;
+            }
+
+            int32_t damaged;
+            if (accessUnit->meta()->findInt32("damaged", &damaged)
+                    && damaged) {
+                ALOGD("dropping damaged access unit.");
+                break;
+            }
+
+            // Implicitly assert on valid trackIndex here, which we ensure by
+            // never removing tracks.
+            TrackInfo *info = &mTracks.editItemAt(trackIndex);
+
+            sp<AnotherPacketSource> source = info->mSource;
+            if (source != NULL) {
+                uint32_t rtpTime;
+                CHECK(accessUnit->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+
+                /* AnotherPacketSource make an assertion if there is no ntp provided
+                   RTPSource should provide ntpUs all the times.
+                if (!info->mNPTMappingValid) {
+                    // This is a live stream, we didn't receive any normal
+                    // playtime mapping. We won't map to npt time.
+                    source->queueAccessUnit(accessUnit);
+                    break;
+                }
+                */
+
+                int64_t nptUs =
+                    ((double)rtpTime - (double)info->mRTPTime)
+                        / info->mTimeScale
+                        * 1000000ll
+                        + info->mNormalPlaytimeUs;
+
+                accessUnit->meta()->setInt64("timeUs", nptUs);
+
+                source->queueAccessUnit(accessUnit);
+            }
+
+            break;
+        }
+        case kWhatDisconnect:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            for (size_t i = 0; i < mTracks.size(); ++i) {
+                TrackInfo *info = &mTracks.editItemAt(i);
+
+                if (info->mIsAudio) {
+                    mAudioTrack->signalEOS(ERROR_END_OF_STREAM);
+                    mAudioTrack = NULL;
+                    ALOGV("mAudioTrack disconnected");
+                } else {
+                    mVideoTrack->signalEOS(ERROR_END_OF_STREAM);
+                    mVideoTrack = NULL;
+                    ALOGV("mVideoTrack disconnected");
+                }
+
+                mRTPConn->removeStream(info->mRTPSocket, info->mRTCPSocket);
+                close(info->mRTPSocket);
+                close(info->mRTCPSocket);
+            }
+
+            mTracks.clear();
+            mFirstAccessUnit = true;
+            mAllTracksHaveTime = false;
+            mNTPAnchorUs = -1;
+            mMediaAnchorUs = -1;
+            mLastMediaTimeUs = -1;
+            mNumAccessUnitsReceived = 0;
+            mReceivedFirstRTCPPacket = false;
+            mReceivedFirstRTPPacket = false;
+            mPausing = false;
+            mPauseGeneration = 0;
+
+            (new AMessage)->postReply(replyID);
+
+            break;
+        }
+        case kWhatPollBuffering:
+            break;
+        default:
+            TRESPASS();
+    }
+}
+
+void NuPlayer::RTPSource::onTimeUpdate(int32_t trackIndex, uint32_t rtpTime, uint64_t ntpTime) {
+    ALOGV("onTimeUpdate track %d, rtpTime = 0x%08x, ntpTime = %#016llx",
+         trackIndex, rtpTime, (long long)ntpTime);
+
+    // convert ntpTime in Q32 seconds to microseconds. Note: this will not lose precision
+    // because ntpTimeUs is at most 52 bits (double holds 53 bits)
+    int64_t ntpTimeUs = (int64_t)(ntpTime * 1E6 / (1ll << 32));
+
+    TrackInfo *track = &mTracks.editItemAt(trackIndex);
+
+    track->mRTPAnchor = rtpTime;
+    track->mNTPAnchorUs = ntpTimeUs;
+
+    if (mNTPAnchorUs < 0) {
+        mNTPAnchorUs = ntpTimeUs;
+        mMediaAnchorUs = mLastMediaTimeUs;
+    }
+
+    if (!mAllTracksHaveTime) {
+        bool allTracksHaveTime = (mTracks.size() > 0);
+        for (size_t i = 0; i < mTracks.size(); ++i) {
+            TrackInfo *track = &mTracks.editItemAt(i);
+            if (track->mNTPAnchorUs < 0) {
+                allTracksHaveTime = false;
+                break;
+            }
+        }
+        if (allTracksHaveTime) {
+            mAllTracksHaveTime = true;
+            ALOGI("Time now established for all tracks.");
+        }
+    }
+    if (mAllTracksHaveTime && dataReceivedOnAllChannels()) {
+        // Time is now established, lets start timestamping immediately
+        for (size_t i = 0; i < mTracks.size(); ++i) {
+            TrackInfo *trackInfo = &mTracks.editItemAt(i);
+            while (!trackInfo->mPackets.empty()) {
+                sp<ABuffer> accessUnit = *trackInfo->mPackets.begin();
+                trackInfo->mPackets.erase(trackInfo->mPackets.begin());
+
+                if (addMediaTimestamp(i, trackInfo, accessUnit)) {
+                    postQueueAccessUnit(i, accessUnit);
+                }
+            }
+        }
+    }
+}
+
+bool NuPlayer::RTPSource::addMediaTimestamp(
+        int32_t trackIndex, const TrackInfo *track,
+        const sp<ABuffer> &accessUnit) {
+
+    uint32_t rtpTime;
+    CHECK(accessUnit->meta()->findInt32(
+                "rtp-time", (int32_t *)&rtpTime));
+
+    int64_t relRtpTimeUs =
+        (((int64_t)rtpTime - (int64_t)track->mRTPAnchor) * 1000000ll)
+        / track->mTimeScale;
+
+    int64_t ntpTimeUs = track->mNTPAnchorUs + relRtpTimeUs;
+
+    int64_t mediaTimeUs = mMediaAnchorUs + ntpTimeUs - mNTPAnchorUs;
+
+    if (mediaTimeUs > mLastMediaTimeUs) {
+        mLastMediaTimeUs = mediaTimeUs;
+    }
+
+    if (mediaTimeUs < 0) {
+        ALOGV("dropping early accessUnit.");
+        return false;
+    }
+
+    ALOGV("track %d rtpTime=%u mediaTimeUs = %lld us (%.2f secs)",
+            trackIndex, rtpTime, (long long)mediaTimeUs, mediaTimeUs / 1E6);
+
+    accessUnit->meta()->setInt64("timeUs", mediaTimeUs);
+
+    return true;
+}
+
+bool NuPlayer::RTPSource::dataReceivedOnAllChannels() {
+    TrackInfo *track;
+    for (size_t i = 0; i < mTracks.size(); ++i) {
+        track = &mTracks.editItemAt(i);
+        if (track->mPackets.empty()) {
+            return false;
+        }
+    }
+    return true;
+}
+
+void NuPlayer::RTPSource::postQueueAccessUnit(
+        size_t trackIndex, const sp<ABuffer> &accessUnit) {
+    sp<AMessage> msg = new AMessage(kWhatAccessUnit, this);
+    msg->setInt32("what", kWhatAccessUnit);
+    msg->setSize("trackIndex", trackIndex);
+    msg->setBuffer("accessUnit", accessUnit);
+    msg->post();
+}
+
+void NuPlayer::RTPSource::postQueueEOS(size_t trackIndex, status_t finalResult) {
+    sp<AMessage> msg = new AMessage(kWhatEOS, this);
+    msg->setInt32("what", kWhatEOS);
+    msg->setSize("trackIndex", trackIndex);
+    msg->setInt32("finalResult", finalResult);
+    msg->post();
+}
+
+sp<MetaData> NuPlayer::RTPSource::getTrackFormat(size_t index, int32_t *timeScale) {
+    CHECK_GE(index, 0u);
+    CHECK_LT(index, mTracks.size());
+
+    const TrackInfo &info = mTracks.itemAt(index);
+
+    *timeScale = info.mTimeScale;
+
+    return info.mPacketSource->getFormat();
+}
+
+void NuPlayer::RTPSource::onConnected() {
+    ALOGV("onConnected");
+    mState = CONNECTED;
+}
+
+void NuPlayer::RTPSource::onDisconnected(const sp<AMessage> &msg) {
+    if (mState == DISCONNECTED) {
+        return;
+    }
+
+    status_t err;
+    CHECK(msg->findInt32("result", &err));
+    CHECK_NE(err, (status_t)OK);
+
+//    mLooper->unregisterHandler(mHandler->id());
+//    mHandler.clear();
+
+    if (mState == CONNECTING) {
+        // We're still in the preparation phase, signal that it
+        // failed.
+        notifyPrepared(err);
+    }
+
+    mState = DISCONNECTED;
+//    setError(err);
+
+}
+
+status_t NuPlayer::RTPSource::setParameter(const String8 &key, const String8 &value) {
+    ALOGV("setParameter: key (%s) => value (%s)", key.string(), value.string());
+
+    bool isAudioKey = key.contains("audio");
+    TrackInfo *info = NULL;
+    for (unsigned i = 0; i < mTracks.size(); ++i) {
+        info = &mTracks.editItemAt(i);
+        if (info != NULL && info->mIsAudio == isAudioKey) {
+            ALOGV("setParameter: %s track (%d) found", isAudioKey ? "audio" : "video" , i);
+            break;
+        }
+    }
+
+    if (info == NULL) {
+        TrackInfo newTrackInfo;
+        newTrackInfo.mIsAudio = isAudioKey;
+        mTracks.push(newTrackInfo);
+        info = &mTracks.editTop();
+    }
+
+    if (key == "rtp-param-mime-type") {
+        info->mMimeType = value;
+
+        const char *mime = value.string();
+        const char *delimiter = strchr(mime, '/');
+        info->mCodecName = delimiter ? (delimiter + 1) : "<none>";
+
+        ALOGV("rtp-param-mime-type: mMimeType (%s) => mCodecName (%s)",
+                info->mMimeType.string(), info->mCodecName.string());
+    } else if (key == "video-param-decoder-profile") {
+        info->mCodecProfile = atoi(value);
+    } else if (key == "video-param-decoder-level") {
+        info->mCodecLevel = atoi(value);
+    } else if (key == "video-param-width") {
+        info->mWidth = atoi(value);
+    } else if (key == "video-param-height") {
+        info->mHeight = atoi(value);
+    } else if (key == "rtp-param-local-ip") {
+        info->mLocalIp = value;
+    } else if (key == "rtp-param-local-port") {
+        info->mLocalPort = atoi(value);
+    } else if (key == "rtp-param-remote-ip") {
+        info->mRemoteIp = value;
+    } else if (key == "rtp-param-remote-port") {
+        info->mRemotePort = atoi(value);
+    } else if (key == "rtp-param-payload-type") {
+        info->mPayloadType = atoi(value);
+    } else if (key == "rtp-param-as") {
+        //AS means guaranteed bit rate that negotiated from sdp.
+        info->mAS = atoi(value);
+    } else if (key == "rtp-param-rtp-timeout") {
+    } else if (key == "rtp-param-rtcp-timeout") {
+    } else if (key == "rtp-param-time-scale") {
+    } else if (key == "rtp-param-self-id") {
+        info->mSelfID = atoi(value);
+    } else if (key == "rtp-param-ext-cvo-extmap") {
+        info->mCVOExtMap = atoi(value);
+    } else if (key == "rtp-param-set-socket-network") {
+        int64_t networkHandle = atoll(value);
+        setSocketNetwork(networkHandle);
+    }
+
+    return OK;
+}
+
+status_t NuPlayer::RTPSource::setParameters(const String8 &params) {
+    ALOGV("setParameters: %s", params.string());
+    const char *cparams = params.string();
+    const char *key_start = cparams;
+    for (;;) {
+        const char *equal_pos = strchr(key_start, '=');
+        if (equal_pos == NULL) {
+            ALOGE("Parameters %s miss a value", cparams);
+            return BAD_VALUE;
+        }
+        String8 key(key_start, equal_pos - key_start);
+        TrimString(&key);
+        if (key.length() == 0) {
+            ALOGE("Parameters %s contains an empty key", cparams);
+            return BAD_VALUE;
+        }
+        const char *value_start = equal_pos + 1;
+        const char *semicolon_pos = strchr(value_start, ';');
+        String8 value;
+        if (semicolon_pos == NULL) {
+            value.setTo(value_start);
+        } else {
+            value.setTo(value_start, semicolon_pos - value_start);
+        }
+        if (setParameter(key, value) != OK) {
+            return BAD_VALUE;
+        }
+        if (semicolon_pos == NULL) {
+            break;  // Reaches the end
+        }
+        key_start = semicolon_pos + 1;
+    }
+    return OK;
+}
+
+void NuPlayer::RTPSource::setSocketNetwork(int64_t networkHandle) {
+    ALOGV("setSocketNetwork: %llu", (unsigned long long)networkHandle);
+
+    TrackInfo *info = NULL;
+    for (size_t i = 0; i < mTracks.size(); ++i) {
+        info = &mTracks.editItemAt(i);
+
+        if (info == NULL)
+            break;
+
+        info->mSocketNetwork = networkHandle;
+    }
+}
+
+// Trim both leading and trailing whitespace from the given string.
+//static
+void NuPlayer::RTPSource::TrimString(String8 *s) {
+    size_t num_bytes = s->bytes();
+    const char *data = s->string();
+
+    size_t leading_space = 0;
+    while (leading_space < num_bytes && isspace(data[leading_space])) {
+        ++leading_space;
+    }
+
+    size_t i = num_bytes;
+    while (i > leading_space && isspace(data[i - 1])) {
+        --i;
+    }
+
+    s->setTo(String8(&data[leading_space], i - leading_space));
+}
+
+}  // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/RTPSource.h b/media/libmediaplayerservice/nuplayer/RTPSource.h
new file mode 100644
index 0000000..5085a7e
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/RTPSource.h
@@ -0,0 +1,218 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef RTP_SOURCE_H_
+
+#define RTP_SOURCE_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/Utils.h>
+#include <media/BufferingSettings.h>
+
+#include <utils/KeyedVector.h>
+#include <utils/Vector.h>
+#include <utils/RefBase.h>
+
+#include "AnotherPacketSource.h"
+#include "APacketSource.h"
+#include "ARTPConnection.h"
+#include "ASessionDescription.h"
+#include "NuPlayerSource.h"
+
+
+
+
+
+
+namespace android {
+
+struct ALooper;
+struct AnotherPacketSource;
+
+struct NuPlayer::RTPSource : public NuPlayer::Source {
+    RTPSource(
+            const sp<AMessage> &notify,
+            const String8& rtpParams);
+
+    enum {
+        RTCP_TSFB = 205,
+        RTCP_PSFB = 206,
+        RTP_CVO = 300,
+        RTP_AUTODOWN = 400,
+    };
+
+    virtual status_t getBufferingSettings(
+            BufferingSettings* buffering /* nonnull */) override;
+    virtual status_t setBufferingSettings(const BufferingSettings& buffering) override;
+
+    virtual void prepareAsync();
+    virtual void start();
+    virtual void stop();
+    virtual void pause();
+    virtual void resume();
+
+    virtual status_t feedMoreTSData();
+
+    virtual status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit);
+
+    virtual status_t getDuration(int64_t *durationUs);
+    virtual status_t seekTo(
+            int64_t seekTimeUs,
+            MediaPlayerSeekMode mode = MediaPlayerSeekMode::SEEK_PREVIOUS_SYNC) override;
+
+    void onMessageReceived(const sp<AMessage> &msg);
+
+protected:
+    virtual ~RTPSource();
+
+    virtual sp<MetaData> getFormatMeta(bool audio);
+
+private:
+    enum {
+        kWhatAccessUnit = 'accU',
+        kWhatAccessUnitComplete = 'accu',
+        kWhatDisconnect = 'disc',
+        kWhatEOS = 'eos!',
+        kWhatPollBuffering = 'poll',
+        kWhatSetBufferingSettings = 'sBuS',
+    };
+
+    const int64_t kBufferingPollIntervalUs = 1000000ll;
+    const int32_t kMinVideoBitrate = 192000; /* bps */
+
+    enum State {
+        DISCONNECTED,
+        CONNECTING,
+        CONNECTED,
+        PAUSED,
+    };
+
+    struct TrackInfo {
+
+        /* SDP of track */
+        bool mIsAudio;
+        int32_t mPayloadType;
+        String8 mMimeType;
+        String8 mCodecName;
+        int32_t mCodecProfile;
+        int32_t mCodecLevel;
+        int32_t mWidth;
+        int32_t mHeight;
+        String8 mLocalIp;
+        String8 mRemoteIp;
+        int32_t mLocalPort;
+        int32_t mRemotePort;
+        int64_t mSocketNetwork;
+        int32_t mTimeScale;
+        int32_t mAS;
+
+        /* Unique ID indicates itself */
+        uint32_t mSelfID;
+        /* extmap:<value> for CVO will be set to here */
+        int32_t mCVOExtMap;
+
+        /* a copy of TrackInfo in RTSPSource */
+        sp<AnotherPacketSource> mSource;
+        uint32_t mRTPTime;
+        int64_t mNormalPlaytimeUs;
+        bool mNPTMappingValid;
+
+        /* a copy of TrackInfo in MyHandler.h */
+        int mRTPSocket;
+        int mRTCPSocket;
+        uint32_t mFirstSeqNumInSegment;
+        bool mNewSegment;
+        int32_t mAllowedStaleAccessUnits;
+        uint32_t mRTPAnchor;
+        int64_t mNTPAnchorUs;
+        bool mEOSReceived;
+        uint32_t mNormalPlayTimeRTP;
+        int64_t mNormalPlayTimeUs;
+        sp<APacketSource> mPacketSource;
+        List<sp<ABuffer>> mPackets;
+    };
+
+    const String8 mRTPParams;
+    uint32_t mFlags;
+    State mState;
+    status_t mFinalResult;
+
+    // below 3 parameters need to be checked whether it needed or not.
+    Mutex mBufferingLock;
+    bool mBuffering;
+    bool mInPreparationPhase;
+    Mutex mBufferingSettingsLock;
+    BufferingSettings mBufferingSettings;
+
+    sp<ALooper> mLooper;
+
+    sp<ARTPConnection> mRTPConn;
+
+    Vector<TrackInfo> mTracks;
+    sp<AnotherPacketSource> mAudioTrack;
+    sp<AnotherPacketSource> mVideoTrack;
+
+    int64_t mEOSTimeoutAudio;
+    int64_t mEOSTimeoutVideo;
+
+    /* MyHandler.h */
+    bool mFirstAccessUnit;
+    bool mAllTracksHaveTime;
+    int64_t mNTPAnchorUs;
+    int64_t mMediaAnchorUs;
+    int64_t mLastMediaTimeUs;
+    int64_t mNumAccessUnitsReceived;
+    int32_t mLastCVOUpdated;
+    bool mReceivedFirstRTCPPacket;
+    bool mReceivedFirstRTPPacket;
+    bool mPausing;
+    int32_t mPauseGeneration;
+
+    sp<AnotherPacketSource> getSource(bool audio);
+
+    /* MyHandler.h */
+    void onTimeUpdate(int32_t trackIndex, uint32_t rtpTime, uint64_t ntpTime);
+    bool addMediaTimestamp(int32_t trackIndex, const TrackInfo *track,
+            const sp<ABuffer> &accessUnit);
+    bool dataReceivedOnAllChannels();
+    void postQueueAccessUnit(size_t trackIndex, const sp<ABuffer> &accessUnit);
+    void postQueueEOS(size_t trackIndex, status_t finalResult);
+    sp<MetaData> getTrackFormat(size_t index, int32_t *timeScale);
+    void onConnected();
+    void onDisconnected(const sp<AMessage> &msg);
+
+    void schedulePollBuffering();
+    void onPollBuffering();
+
+    bool haveSufficientDataOnAllTracks();
+
+    void setEOSTimeout(bool audio, int64_t timeout);
+
+    status_t setParameters(const String8 &params);
+    status_t setParameter(const String8 &key, const String8 &value);
+    void setSocketNetwork(int64_t networkHandle);
+    static void TrimString(String8 *s);
+
+    DISALLOW_EVIL_CONSTRUCTORS(RTPSource);
+};
+
+}  // namespace android
+
+#endif  // RTP_SOURCE_H_
diff --git a/media/libmediatranscoding/.clang-format b/media/libmediatranscoding/.clang-format
index 3198d00..f23b842 100644
--- a/media/libmediatranscoding/.clang-format
+++ b/media/libmediatranscoding/.clang-format
@@ -26,4 +26,26 @@
 DerivePointerAlignment: false
 IndentWidth: 4
 PointerAlignment: Left
-TabWidth: 4
\ No newline at end of file
+TabWidth: 4
+
+# Deviations from the above file:
+# "Don't indent the section label"
+AccessModifierOffset: -4
+# "Each line of text in your code should be at most 100 columns long."
+ColumnLimit: 100
+# "Constructor initializer lists can be all on one line or with subsequent
+# lines indented eight spaces.". clang-format does not support having the colon
+# on the same line as the constructor function name, so this is the best
+# approximation of that rule, which makes all entries in the list (except the
+# first one) have an eight space indentation.
+ConstructorInitializerIndentWidth: 6
+# There is nothing in go/droidcppstyle about case labels, but there seems to be
+# more code that does not indent the case labels in frameworks/base.
+IndentCaseLabels: false
+# There have been some bugs in which subsequent formatting operations introduce
+# weird comment jumps.
+ReflowComments: false
+# Android supports C++17 now, but it seems only Cpp11 will work now.
+# "Cpp11 is a deprecated alias for Latest" according to
+# https://clang.llvm.org/docs/ClangFormatStyleOptions.html
+Standard: Cpp11
diff --git a/media/libmediatranscoding/Android.bp b/media/libmediatranscoding/Android.bp
index f948bd8..29ed65a 100644
--- a/media/libmediatranscoding/Android.bp
+++ b/media/libmediatranscoding/Android.bp
@@ -21,22 +21,35 @@
     local_include_dir: "aidl",
     srcs: [
         "aidl/android/media/IMediaTranscodingService.aidl",
-        "aidl/android/media/ITranscodingServiceClient.aidl",
+        "aidl/android/media/ITranscodingClient.aidl",
+        "aidl/android/media/ITranscodingClientCallback.aidl",
         "aidl/android/media/TranscodingErrorCode.aidl",
         "aidl/android/media/TranscodingJobPriority.aidl",
+        "aidl/android/media/TranscodingJobStats.aidl",
         "aidl/android/media/TranscodingType.aidl",
         "aidl/android/media/TranscodingVideoCodecType.aidl",
+        "aidl/android/media/TranscodingVideoTrackFormat.aidl",
         "aidl/android/media/TranscodingJobParcel.aidl",
         "aidl/android/media/TranscodingRequestParcel.aidl",
         "aidl/android/media/TranscodingResultParcel.aidl",
+        "aidl/android/media/TranscodingTestConfig.aidl",
     ],
+    backend:
+    {
+        java: {
+            enabled: true,
+        },
+    },
 }
 
 cc_library_shared {
     name: "libmediatranscoding",
 
     srcs: [
-        "TranscodingClientManager.cpp"
+        "TranscodingClientManager.cpp",
+        "TranscodingJobScheduler.cpp",
+        "TranscodingUidPolicy.cpp",
+        "TranscoderWrapper.cpp",
     ],
 
     shared_libs: [
@@ -44,6 +57,9 @@
         "libcutils",
         "liblog",
         "libutils",
+        "libmediatranscoder",
+        "libbinder",
+        "libmediandk",
     ],
 
     export_include_dirs: ["include"],
@@ -53,9 +69,13 @@
     ],
 
     cflags: [
-        "-Werror",
-        "-Wno-error=deprecated-declarations",
         "-Wall",
+        "-Werror",
+        "-Wformat",
+        "-Wno-error=deprecated-declarations",
+        "-Wthread-safety",
+        "-Wunused",
+        "-Wunreachable-code",
     ],
 
     sanitize: {
diff --git a/media/libmediatranscoding/OWNERS b/media/libmediatranscoding/OWNERS
index 02287cb..b08d573 100644
--- a/media/libmediatranscoding/OWNERS
+++ b/media/libmediatranscoding/OWNERS
@@ -1,3 +1,5 @@
-akersten@google.com
+chz@google.com
+gokrishnan@google.com
 hkuang@google.com
 lnilsson@google.com
+pawin@google.com
diff --git a/media/libmediatranscoding/TranscoderWrapper.cpp b/media/libmediatranscoding/TranscoderWrapper.cpp
new file mode 100644
index 0000000..bd03671
--- /dev/null
+++ b/media/libmediatranscoding/TranscoderWrapper.cpp
@@ -0,0 +1,469 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "TranscoderWrapper"
+
+#include <aidl/android/media/TranscodingErrorCode.h>
+#include <aidl/android/media/TranscodingRequestParcel.h>
+#include <media/MediaTranscoder.h>
+#include <media/NdkCommon.h>
+#include <media/TranscoderWrapper.h>
+#include <utils/Log.h>
+
+#include <thread>
+
+namespace android {
+using Status = ::ndk::ScopedAStatus;
+using ::aidl::android::media::TranscodingErrorCode;
+using ::aidl::android::media::TranscodingVideoCodecType;
+using ::aidl::android::media::TranscodingVideoTrackFormat;
+
+static TranscodingErrorCode toTranscodingError(media_status_t status) {
+    switch (status) {
+    case AMEDIA_OK:
+        return TranscodingErrorCode::kNoError;
+    case AMEDIACODEC_ERROR_INSUFFICIENT_RESOURCE:  // FALLTHRU
+    case AMEDIACODEC_ERROR_RECLAIMED:
+        return TranscodingErrorCode::kInsufficientResources;
+    case AMEDIA_ERROR_MALFORMED:
+        return TranscodingErrorCode::kMalformed;
+    case AMEDIA_ERROR_UNSUPPORTED:
+        return TranscodingErrorCode::kUnsupported;
+    case AMEDIA_ERROR_INVALID_OBJECT:  // FALLTHRU
+    case AMEDIA_ERROR_INVALID_PARAMETER:
+        return TranscodingErrorCode::kInvalidParameter;
+    case AMEDIA_ERROR_INVALID_OPERATION:
+        return TranscodingErrorCode::kInvalidOperation;
+    case AMEDIA_ERROR_IO:
+        return TranscodingErrorCode::kErrorIO;
+    case AMEDIA_ERROR_UNKNOWN:  // FALLTHRU
+    default:
+        return TranscodingErrorCode::kUnknown;
+    }
+}
+
+static AMediaFormat* getVideoFormat(
+        const char* originalMime,
+        const std::optional<TranscodingVideoTrackFormat>& requestedFormat) {
+    if (requestedFormat == std::nullopt) {
+        return nullptr;
+    }
+
+    AMediaFormat* format = AMediaFormat_new();
+    bool changed = false;
+    if (requestedFormat->codecType == TranscodingVideoCodecType::kHevc &&
+        strcmp(originalMime, AMEDIA_MIMETYPE_VIDEO_HEVC)) {
+        AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, AMEDIA_MIMETYPE_VIDEO_HEVC);
+        changed = true;
+    } else if (requestedFormat->codecType == TranscodingVideoCodecType::kAvc &&
+               strcmp(originalMime, AMEDIA_MIMETYPE_VIDEO_AVC)) {
+        AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, AMEDIA_MIMETYPE_VIDEO_AVC);
+        changed = true;
+    }
+    if (requestedFormat->bitrateBps > 0) {
+        AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, requestedFormat->bitrateBps);
+        changed = true;
+    }
+    // TODO: translate other fields from requestedFormat to the format for MediaTranscoder.
+    // Also need to determine more settings to expose in TranscodingVideoTrackFormat.
+    if (!changed) {
+        AMediaFormat_delete(format);
+        // Use null format for passthru.
+        format = nullptr;
+    }
+    return format;
+}
+
+//static
+const char* TranscoderWrapper::toString(Event::Type type) {
+    switch (type) {
+    case Event::Start:
+        return "Start";
+    case Event::Pause:
+        return "Pause";
+    case Event::Resume:
+        return "Resume";
+    case Event::Stop:
+        return "Stop";
+    case Event::Finish:
+        return "Finish";
+    case Event::Error:
+        return "Error";
+    case Event::Progress:
+        return "Progress";
+    default:
+        break;
+    }
+    return "(unknown)";
+}
+
+class TranscoderWrapper::CallbackImpl : public MediaTranscoder::CallbackInterface {
+public:
+    CallbackImpl(const std::shared_ptr<TranscoderWrapper>& owner, ClientIdType clientId,
+                 JobIdType jobId)
+          : mOwner(owner), mClientId(clientId), mJobId(jobId) {}
+
+    virtual void onFinished(const MediaTranscoder* transcoder __unused) override {
+        auto owner = mOwner.lock();
+        if (owner != nullptr) {
+            owner->onFinish(mClientId, mJobId);
+        }
+    }
+
+    virtual void onError(const MediaTranscoder* transcoder __unused,
+                         media_status_t error) override {
+        auto owner = mOwner.lock();
+        if (owner != nullptr) {
+            owner->onError(mClientId, mJobId, toTranscodingError(error));
+        }
+    }
+
+    virtual void onProgressUpdate(const MediaTranscoder* transcoder __unused,
+                                  int32_t progress) override {
+        auto owner = mOwner.lock();
+        if (owner != nullptr) {
+            owner->onProgress(mClientId, mJobId, progress);
+        }
+    }
+
+    virtual void onCodecResourceLost(const MediaTranscoder* transcoder __unused,
+                                     const std::shared_ptr<const Parcel>& pausedState
+                                             __unused) override {
+        ALOGV("%s: job {%lld, %d}", __FUNCTION__, (long long)mClientId, mJobId);
+    }
+
+private:
+    std::weak_ptr<TranscoderWrapper> mOwner;
+    ClientIdType mClientId;
+    JobIdType mJobId;
+};
+
+TranscoderWrapper::TranscoderWrapper() : mCurrentClientId(0), mCurrentJobId(-1) {
+    std::thread(&TranscoderWrapper::threadLoop, this).detach();
+}
+
+void TranscoderWrapper::setCallback(const std::shared_ptr<TranscoderCallbackInterface>& cb) {
+    mCallback = cb;
+}
+
+void TranscoderWrapper::start(ClientIdType clientId, JobIdType jobId,
+                              const TranscodingRequestParcel& request,
+                              const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
+    queueEvent(Event::Start, clientId, jobId, [=] {
+        TranscodingErrorCode err = handleStart(clientId, jobId, request, clientCb);
+
+        auto callback = mCallback.lock();
+        if (err != TranscodingErrorCode::kNoError) {
+            cleanup();
+
+            if (callback != nullptr) {
+                callback->onError(clientId, jobId, err);
+            }
+        } else {
+            if (callback != nullptr) {
+                callback->onStarted(clientId, jobId);
+            }
+        }
+    });
+}
+
+void TranscoderWrapper::pause(ClientIdType clientId, JobIdType jobId) {
+    queueEvent(Event::Pause, clientId, jobId, [=] {
+        TranscodingErrorCode err = handlePause(clientId, jobId);
+
+        cleanup();
+
+        auto callback = mCallback.lock();
+        if (callback != nullptr) {
+            if (err != TranscodingErrorCode::kNoError) {
+                callback->onError(clientId, jobId, err);
+            } else {
+                callback->onPaused(clientId, jobId);
+            }
+        }
+    });
+}
+
+void TranscoderWrapper::resume(ClientIdType clientId, JobIdType jobId,
+                               const TranscodingRequestParcel& request,
+                               const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
+    queueEvent(Event::Resume, clientId, jobId, [=] {
+        TranscodingErrorCode err = handleResume(clientId, jobId, request, clientCb);
+
+        auto callback = mCallback.lock();
+        if (err != TranscodingErrorCode::kNoError) {
+            cleanup();
+
+            if (callback != nullptr) {
+                callback->onError(clientId, jobId, err);
+            }
+        } else {
+            if (callback != nullptr) {
+                callback->onResumed(clientId, jobId);
+            }
+        }
+    });
+}
+
+void TranscoderWrapper::stop(ClientIdType clientId, JobIdType jobId) {
+    queueEvent(Event::Stop, clientId, jobId, [=] {
+        if (mTranscoder != nullptr && clientId == mCurrentClientId && jobId == mCurrentJobId) {
+            // Cancelling the currently running job.
+            media_status_t err = mTranscoder->cancel();
+            if (err != AMEDIA_OK) {
+                ALOGE("failed to stop transcoder: %d", err);
+            } else {
+                ALOGI("transcoder stopped");
+            }
+            cleanup();
+        } else {
+            // For jobs that's not currently running, release any pausedState for the job.
+            mPausedStateMap.erase(JobKeyType(clientId, jobId));
+        }
+        // No callback needed for stop.
+    });
+}
+
+void TranscoderWrapper::onFinish(ClientIdType clientId, JobIdType jobId) {
+    queueEvent(Event::Finish, clientId, jobId, [=] {
+        if (mTranscoder != nullptr && clientId == mCurrentClientId && jobId == mCurrentJobId) {
+            cleanup();
+        }
+
+        auto callback = mCallback.lock();
+        if (callback != nullptr) {
+            callback->onFinish(clientId, jobId);
+        }
+    });
+}
+
+void TranscoderWrapper::onError(ClientIdType clientId, JobIdType jobId,
+                                TranscodingErrorCode error) {
+    queueEvent(Event::Error, clientId, jobId, [=] {
+        if (mTranscoder != nullptr && clientId == mCurrentClientId && jobId == mCurrentJobId) {
+            cleanup();
+        }
+
+        auto callback = mCallback.lock();
+        if (callback != nullptr) {
+            callback->onError(clientId, jobId, error);
+        }
+    });
+}
+
+void TranscoderWrapper::onProgress(ClientIdType clientId, JobIdType jobId, int32_t progress) {
+    queueEvent(Event::Progress, clientId, jobId, [=] {
+        auto callback = mCallback.lock();
+        if (callback != nullptr) {
+            callback->onProgressUpdate(clientId, jobId, progress);
+        }
+    });
+}
+
+TranscodingErrorCode TranscoderWrapper::setupTranscoder(
+        ClientIdType clientId, JobIdType jobId, const TranscodingRequestParcel& request,
+        const std::shared_ptr<ITranscodingClientCallback>& clientCb,
+        const std::shared_ptr<const Parcel>& pausedState) {
+    if (clientCb == nullptr) {
+        ALOGE("client callback is null");
+        return TranscodingErrorCode::kInvalidParameter;
+    }
+
+    if (mTranscoder != nullptr) {
+        ALOGE("transcoder already running");
+        return TranscodingErrorCode::kInvalidOperation;
+    }
+
+    Status status;
+    ::ndk::ScopedFileDescriptor srcFd, dstFd;
+    status = clientCb->openFileDescriptor(request.sourceFilePath, "r", &srcFd);
+    if (!status.isOk() || srcFd.get() < 0) {
+        ALOGE("failed to open source");
+        return TranscodingErrorCode::kErrorIO;
+    }
+
+    // Open dest file with "rw", as the transcoder could potentially reuse part of it
+    // for resume case. We might want the further differentiate and open with "w" only
+    // for start.
+    status = clientCb->openFileDescriptor(request.destinationFilePath, "rw", &dstFd);
+    if (!status.isOk() || dstFd.get() < 0) {
+        ALOGE("failed to open destination");
+        return TranscodingErrorCode::kErrorIO;
+    }
+
+    mCurrentClientId = clientId;
+    mCurrentJobId = jobId;
+    mTranscoderCb = std::make_shared<CallbackImpl>(shared_from_this(), clientId, jobId);
+    mTranscoder = MediaTranscoder::create(mTranscoderCb, pausedState);
+    if (mTranscoder == nullptr) {
+        ALOGE("failed to create transcoder");
+        return TranscodingErrorCode::kUnknown;
+    }
+
+    media_status_t err = mTranscoder->configureSource(srcFd.get());
+    if (err != AMEDIA_OK) {
+        ALOGE("failed to configure source: %d", err);
+        return toTranscodingError(err);
+    }
+
+    std::vector<std::shared_ptr<AMediaFormat>> trackFormats = mTranscoder->getTrackFormats();
+    if (trackFormats.size() == 0) {
+        ALOGE("failed to get track formats!");
+        return TranscodingErrorCode::kMalformed;
+    }
+
+    for (int i = 0; i < trackFormats.size(); ++i) {
+        AMediaFormat* format = nullptr;
+        const char* mime = nullptr;
+        AMediaFormat_getString(trackFormats[i].get(), AMEDIAFORMAT_KEY_MIME, &mime);
+
+        if (!strncmp(mime, "video/", 6)) {
+            format = getVideoFormat(mime, request.requestedVideoTrackFormat);
+        }
+
+        err = mTranscoder->configureTrackFormat(i, format);
+        if (format != nullptr) {
+            AMediaFormat_delete(format);
+        }
+        if (err != AMEDIA_OK) {
+            ALOGE("failed to configure track format for track %d: %d", i, err);
+            return toTranscodingError(err);
+        }
+    }
+
+    err = mTranscoder->configureDestination(dstFd.get());
+    if (err != AMEDIA_OK) {
+        ALOGE("failed to configure dest: %d", err);
+        return toTranscodingError(err);
+    }
+
+    return TranscodingErrorCode::kNoError;
+}
+
+TranscodingErrorCode TranscoderWrapper::handleStart(
+        ClientIdType clientId, JobIdType jobId, const TranscodingRequestParcel& request,
+        const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
+    ALOGI("setting up transcoder for start");
+    TranscodingErrorCode err = setupTranscoder(clientId, jobId, request, clientCb);
+    if (err != TranscodingErrorCode::kNoError) {
+        ALOGI("%s: failed to setup transcoder", __FUNCTION__);
+        return err;
+    }
+
+    media_status_t status = mTranscoder->start();
+    if (status != AMEDIA_OK) {
+        ALOGE("%s: failed to start transcoder: %d", __FUNCTION__, err);
+        return toTranscodingError(status);
+    }
+
+    ALOGI("%s: transcoder started", __FUNCTION__);
+    return TranscodingErrorCode::kNoError;
+}
+
+TranscodingErrorCode TranscoderWrapper::handlePause(ClientIdType clientId, JobIdType jobId) {
+    if (mTranscoder == nullptr) {
+        ALOGE("%s: transcoder is not running", __FUNCTION__);
+        return TranscodingErrorCode::kInvalidOperation;
+    }
+
+    if (clientId != mCurrentClientId || jobId != mCurrentJobId) {
+        ALOGW("%s: stopping job {%lld, %d} that's not current job {%lld, %d}", __FUNCTION__,
+              (long long)clientId, jobId, (long long)mCurrentClientId, mCurrentJobId);
+    }
+
+    std::shared_ptr<const Parcel> pauseStates;
+    media_status_t err = mTranscoder->pause(&pauseStates);
+    if (err != AMEDIA_OK) {
+        ALOGE("%s: failed to pause transcoder: %d", __FUNCTION__, err);
+        return toTranscodingError(err);
+    }
+    mPausedStateMap[JobKeyType(clientId, jobId)] = pauseStates;
+
+    ALOGI("%s: transcoder paused", __FUNCTION__);
+    return TranscodingErrorCode::kNoError;
+}
+
+TranscodingErrorCode TranscoderWrapper::handleResume(
+        ClientIdType clientId, JobIdType jobId, const TranscodingRequestParcel& request,
+        const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
+    std::shared_ptr<const Parcel> pausedState;
+    auto it = mPausedStateMap.find(JobKeyType(clientId, jobId));
+    if (it != mPausedStateMap.end()) {
+        pausedState = it->second;
+        mPausedStateMap.erase(it);
+    } else {
+        ALOGE("%s: can't find paused state", __FUNCTION__);
+        return TranscodingErrorCode::kInvalidOperation;
+    }
+
+    ALOGI("setting up transcoder for resume");
+    TranscodingErrorCode err = setupTranscoder(clientId, jobId, request, clientCb, pausedState);
+    if (err != TranscodingErrorCode::kNoError) {
+        ALOGE("%s: failed to setup transcoder", __FUNCTION__);
+        return err;
+    }
+
+    media_status_t status = mTranscoder->resume();
+    if (status != AMEDIA_OK) {
+        ALOGE("%s: failed to resume transcoder: %d", __FUNCTION__, err);
+        return toTranscodingError(status);
+    }
+
+    ALOGI("%s: transcoder resumed", __FUNCTION__);
+    return TranscodingErrorCode::kNoError;
+}
+
+void TranscoderWrapper::cleanup() {
+    mCurrentClientId = 0;
+    mCurrentJobId = -1;
+    mTranscoderCb = nullptr;
+    mTranscoder = nullptr;
+}
+
+void TranscoderWrapper::queueEvent(Event::Type type, ClientIdType clientId, JobIdType jobId,
+                                   const std::function<void()> runnable) {
+    ALOGV("%s: job {%lld, %d}: %s", __FUNCTION__, (long long)clientId, jobId, toString(type));
+
+    std::scoped_lock lock{mLock};
+
+    mQueue.push_back({type, clientId, jobId, runnable});
+    mCondition.notify_one();
+}
+
+void TranscoderWrapper::threadLoop() {
+    std::unique_lock<std::mutex> lock{mLock};
+    // TranscoderWrapper currently lives in the transcoding service, as long as
+    // MediaTranscodingService itself.
+    while (true) {
+        // Wait for the next event.
+        while (mQueue.empty()) {
+            mCondition.wait(lock);
+        }
+
+        Event event = *mQueue.begin();
+        mQueue.pop_front();
+
+        ALOGD("%s: job {%lld, %d}: %s", __FUNCTION__, (long long)event.clientId, event.jobId,
+              toString(event.type));
+
+        lock.unlock();
+        event.runnable();
+        lock.lock();
+    }
+}
+
+}  // namespace android
diff --git a/media/libmediatranscoding/TranscodingClientManager.cpp b/media/libmediatranscoding/TranscodingClientManager.cpp
index 7252437..ce3ac13 100644
--- a/media/libmediatranscoding/TranscodingClientManager.cpp
+++ b/media/libmediatranscoding/TranscodingClientManager.cpp
@@ -17,31 +17,203 @@
 // #define LOG_NDEBUG 0
 #define LOG_TAG "TranscodingClientManager"
 
+#include <aidl/android/media/BnTranscodingClient.h>
+#include <aidl/android/media/IMediaTranscodingService.h>
+#include <android/binder_ibinder.h>
 #include <inttypes.h>
 #include <media/TranscodingClientManager.h>
+#include <media/TranscodingRequest.h>
 #include <utils/Log.h>
-
 namespace android {
 
-using Status = ::ndk::ScopedAStatus;
+static_assert(sizeof(ClientIdType) == sizeof(void*), "ClientIdType should be pointer-sized");
 
-// static
-TranscodingClientManager& TranscodingClientManager::getInstance() {
-    static TranscodingClientManager gInstance{};
-    return gInstance;
+using ::aidl::android::media::BnTranscodingClient;
+using ::aidl::android::media::IMediaTranscodingService;  // For service error codes
+using ::aidl::android::media::TranscodingJobParcel;
+using ::aidl::android::media::TranscodingRequestParcel;
+using Status = ::ndk::ScopedAStatus;
+using ::ndk::SpAIBinder;
+
+//static
+std::atomic<ClientIdType> TranscodingClientManager::sCookieCounter = 0;
+//static
+std::mutex TranscodingClientManager::sCookie2ClientLock;
+//static
+std::map<ClientIdType, std::shared_ptr<TranscodingClientManager::ClientImpl>>
+        TranscodingClientManager::sCookie2Client;
+///////////////////////////////////////////////////////////////////////////////
+
+/**
+ * ClientImpl implements a single client and contains all its information.
+ */
+struct TranscodingClientManager::ClientImpl : public BnTranscodingClient {
+    /* The remote client callback that this ClientInfo is associated with.
+     * Once the ClientInfo is created, we hold an SpAIBinder so that the binder
+     * object doesn't get created again, otherwise the binder object pointer
+     * may not be unique.
+     */
+    SpAIBinder mClientBinder;
+    std::shared_ptr<ITranscodingClientCallback> mClientCallback;
+    /* A unique id assigned to the client by the service. This number is used
+     * by the service for indexing. Here we use the binder object's pointer
+     * (casted to int64t_t) as the client id.
+     */
+    ClientIdType mClientId;
+    pid_t mClientPid;
+    uid_t mClientUid;
+    std::string mClientName;
+    std::string mClientOpPackageName;
+
+    // Next jobId to assign.
+    std::atomic<int32_t> mNextJobId;
+    // Whether this client has been unregistered already.
+    std::atomic<bool> mAbandoned;
+    // Weak pointer to the client manager for this client.
+    std::weak_ptr<TranscodingClientManager> mOwner;
+
+    ClientImpl(const std::shared_ptr<ITranscodingClientCallback>& callback, pid_t pid, uid_t uid,
+               const std::string& clientName, const std::string& opPackageName,
+               const std::weak_ptr<TranscodingClientManager>& owner);
+
+    Status submitRequest(const TranscodingRequestParcel& /*in_request*/,
+                         TranscodingJobParcel* /*out_job*/, bool* /*_aidl_return*/) override;
+
+    Status cancelJob(int32_t /*in_jobId*/, bool* /*_aidl_return*/) override;
+
+    Status getJobWithId(int32_t /*in_jobId*/, TranscodingJobParcel* /*out_job*/,
+                        bool* /*_aidl_return*/) override;
+
+    Status unregister() override;
+};
+
+TranscodingClientManager::ClientImpl::ClientImpl(
+        const std::shared_ptr<ITranscodingClientCallback>& callback, pid_t pid, uid_t uid,
+        const std::string& clientName, const std::string& opPackageName,
+        const std::weak_ptr<TranscodingClientManager>& owner)
+      : mClientBinder((callback != nullptr) ? callback->asBinder() : nullptr),
+        mClientCallback(callback),
+        mClientId(sCookieCounter.fetch_add(1, std::memory_order_relaxed)),
+        mClientPid(pid),
+        mClientUid(uid),
+        mClientName(clientName),
+        mClientOpPackageName(opPackageName),
+        mNextJobId(0),
+        mAbandoned(false),
+        mOwner(owner) {}
+
+Status TranscodingClientManager::ClientImpl::submitRequest(
+        const TranscodingRequestParcel& in_request, TranscodingJobParcel* out_job,
+        bool* _aidl_return) {
+    *_aidl_return = false;
+
+    std::shared_ptr<TranscodingClientManager> owner;
+    if (mAbandoned || (owner = mOwner.lock()) == nullptr) {
+        return Status::fromServiceSpecificError(IMediaTranscodingService::ERROR_DISCONNECTED);
+    }
+
+    if (in_request.sourceFilePath.empty() || in_request.destinationFilePath.empty()) {
+        // This is the only error we check for now.
+        return Status::ok();
+    }
+
+    int32_t jobId = mNextJobId.fetch_add(1);
+
+    *_aidl_return =
+            owner->mJobScheduler->submit(mClientId, jobId, mClientUid, in_request, mClientCallback);
+
+    if (*_aidl_return) {
+        out_job->jobId = jobId;
+
+        // TODO(chz): is some of this coming from JobScheduler?
+        *(TranscodingRequest*)&out_job->request = in_request;
+        out_job->awaitNumberOfJobs = 0;
+    }
+
+    return Status::ok();
 }
 
+Status TranscodingClientManager::ClientImpl::cancelJob(int32_t in_jobId, bool* _aidl_return) {
+    *_aidl_return = false;
+
+    std::shared_ptr<TranscodingClientManager> owner;
+    if (mAbandoned || (owner = mOwner.lock()) == nullptr) {
+        return Status::fromServiceSpecificError(IMediaTranscodingService::ERROR_DISCONNECTED);
+    }
+
+    if (in_jobId < 0) {
+        return Status::ok();
+    }
+
+    *_aidl_return = owner->mJobScheduler->cancel(mClientId, in_jobId);
+    return Status::ok();
+}
+
+Status TranscodingClientManager::ClientImpl::getJobWithId(int32_t in_jobId,
+                                                          TranscodingJobParcel* out_job,
+                                                          bool* _aidl_return) {
+    *_aidl_return = false;
+
+    std::shared_ptr<TranscodingClientManager> owner;
+    if (mAbandoned || (owner = mOwner.lock()) == nullptr) {
+        return Status::fromServiceSpecificError(IMediaTranscodingService::ERROR_DISCONNECTED);
+    }
+
+    if (in_jobId < 0) {
+        return Status::ok();
+    }
+
+    *_aidl_return = owner->mJobScheduler->getJob(mClientId, in_jobId, &out_job->request);
+
+    if (*_aidl_return) {
+        out_job->jobId = in_jobId;
+        out_job->awaitNumberOfJobs = 0;
+    }
+    return Status::ok();
+}
+
+Status TranscodingClientManager::ClientImpl::unregister() {
+    bool abandoned = mAbandoned.exchange(true);
+
+    std::shared_ptr<TranscodingClientManager> owner;
+    if (abandoned || (owner = mOwner.lock()) == nullptr) {
+        return Status::fromServiceSpecificError(IMediaTranscodingService::ERROR_DISCONNECTED);
+    }
+
+    // Use jobId == -1 to cancel all realtime jobs for this client with the scheduler.
+    owner->mJobScheduler->cancel(mClientId, -1);
+    owner->removeClient(mClientId);
+
+    return Status::ok();
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
 // static
 void TranscodingClientManager::BinderDiedCallback(void* cookie) {
-    int32_t clientId = static_cast<int32_t>(reinterpret_cast<intptr_t>(cookie));
-    ALOGD("Client %" PRId32 " is dead", clientId);
-    // Don't check for pid validity since we know it's already dead.
-    TranscodingClientManager& manager = TranscodingClientManager::getInstance();
-    manager.removeClient(clientId);
+    ClientIdType clientId = reinterpret_cast<ClientIdType>(cookie);
+
+    ALOGD("Client %lld is dead", (long long)clientId);
+
+    std::shared_ptr<ClientImpl> client;
+
+    {
+        std::scoped_lock lock{sCookie2ClientLock};
+
+        auto it = sCookie2Client.find(clientId);
+        if (it != sCookie2Client.end()) {
+            client = it->second;
+        }
+    }
+
+    if (client != nullptr) {
+        client->unregister();
+    }
 }
 
-TranscodingClientManager::TranscodingClientManager()
-    : mDeathRecipient(AIBinder_DeathRecipient_new(BinderDiedCallback)) {
+TranscodingClientManager::TranscodingClientManager(
+        const std::shared_ptr<SchedulerClientInterface>& scheduler)
+      : mDeathRecipient(AIBinder_DeathRecipient_new(BinderDiedCallback)), mJobScheduler(scheduler) {
     ALOGD("TranscodingClientManager started");
 }
 
@@ -49,97 +221,108 @@
     ALOGD("TranscodingClientManager exited");
 }
 
-bool TranscodingClientManager::isClientIdRegistered(int32_t clientId) const {
-    std::scoped_lock lock{mLock};
-    return mClientIdToClientInfoMap.find(clientId) != mClientIdToClientInfoMap.end();
-}
-
 void TranscodingClientManager::dumpAllClients(int fd, const Vector<String16>& args __unused) {
     String8 result;
 
     const size_t SIZE = 256;
     char buffer[SIZE];
+    std::scoped_lock lock{mLock};
 
-    snprintf(buffer, SIZE, "    Total num of Clients: %zu\n", mClientIdToClientInfoMap.size());
+    snprintf(buffer, SIZE, "    Total num of Clients: %zu\n", mClientIdToClientMap.size());
     result.append(buffer);
 
-    if (mClientIdToClientInfoMap.size() > 0) {
+    if (mClientIdToClientMap.size() > 0) {
         snprintf(buffer, SIZE, "========== Dumping all clients =========\n");
         result.append(buffer);
     }
 
-    for (const auto& iter : mClientIdToClientInfoMap) {
-        const std::shared_ptr<ITranscodingServiceClient> client = iter.second->mClient;
-        std::string clientName;
-        Status status = client->getName(&clientName);
-        if (!status.isOk()) {
-            ALOGE("Failed to get client: %d information", iter.first);
-            continue;
-        }
-        snprintf(buffer, SIZE, "    -- Clients: %d  name: %s\n", iter.first, clientName.c_str());
+    for (const auto& iter : mClientIdToClientMap) {
+        snprintf(buffer, SIZE, "    -- Client id: %lld  name: %s\n", (long long)iter.first,
+                 iter.second->mClientName.c_str());
         result.append(buffer);
     }
 
     write(fd, result.string(), result.size());
 }
 
-status_t TranscodingClientManager::addClient(std::unique_ptr<ClientInfo> client) {
+status_t TranscodingClientManager::addClient(
+        const std::shared_ptr<ITranscodingClientCallback>& callback, pid_t pid, uid_t uid,
+        const std::string& clientName, const std::string& opPackageName,
+        std::shared_ptr<ITranscodingClient>* outClient) {
     // Validate the client.
-    if (client == nullptr || client->mClientId < 0 || client->mClientPid < 0 ||
-        client->mClientUid < 0 || client->mClientOpPackageName.empty() ||
-        client->mClientOpPackageName == "") {
+    if (callback == nullptr || pid < 0 || clientName.empty() || opPackageName.empty()) {
         ALOGE("Invalid client");
-        return BAD_VALUE;
+        return IMediaTranscodingService::ERROR_ILLEGAL_ARGUMENT;
     }
 
+    SpAIBinder binder = callback->asBinder();
+
     std::scoped_lock lock{mLock};
 
-    // Check if the client already exists.
-    if (mClientIdToClientInfoMap.count(client->mClientId) != 0) {
-        ALOGW("Client already exists.");
-        return ALREADY_EXISTS;
+    // Checks if the client already registers.
+    if (mRegisteredCallbacks.count((uintptr_t)binder.get()) > 0) {
+        return IMediaTranscodingService::ERROR_ALREADY_EXISTS;
     }
 
-    ALOGD("Adding client id %d pid: %d uid: %d %s", client->mClientId, client->mClientPid,
-          client->mClientUid, client->mClientOpPackageName.c_str());
+    // Creates the client and uses its process id as client id.
+    std::shared_ptr<ClientImpl> client = ::ndk::SharedRefBase::make<ClientImpl>(
+            callback, pid, uid, clientName, opPackageName, shared_from_this());
 
-    AIBinder_linkToDeath(client->mClient->asBinder().get(), mDeathRecipient.get(),
+    ALOGD("Adding client id %lld, pid %d, uid %d, name %s, package %s",
+          (long long)client->mClientId, client->mClientPid, client->mClientUid,
+          client->mClientName.c_str(), client->mClientOpPackageName.c_str());
+
+    {
+        std::scoped_lock lock{sCookie2ClientLock};
+        sCookie2Client.emplace(std::make_pair(client->mClientId, client));
+    }
+
+    AIBinder_linkToDeath(binder.get(), mDeathRecipient.get(),
                          reinterpret_cast<void*>(client->mClientId));
 
     // Adds the new client to the map.
-    mClientIdToClientInfoMap[client->mClientId] = std::move(client);
+    mRegisteredCallbacks.insert((uintptr_t)binder.get());
+    mClientIdToClientMap[client->mClientId] = client;
+
+    *outClient = client;
 
     return OK;
 }
 
-status_t TranscodingClientManager::removeClient(int32_t clientId) {
-    ALOGD("Removing client id %d", clientId);
+status_t TranscodingClientManager::removeClient(ClientIdType clientId) {
+    ALOGD("Removing client id %lld", (long long)clientId);
     std::scoped_lock lock{mLock};
 
     // Checks if the client is valid.
-    auto it = mClientIdToClientInfoMap.find(clientId);
-    if (it == mClientIdToClientInfoMap.end()) {
-        ALOGE("Client id %d does not exist", clientId);
-        return INVALID_OPERATION;
+    auto it = mClientIdToClientMap.find(clientId);
+    if (it == mClientIdToClientMap.end()) {
+        ALOGE("Client id %lld does not exist", (long long)clientId);
+        return IMediaTranscodingService::ERROR_INVALID_OPERATION;
     }
 
-    std::shared_ptr<ITranscodingServiceClient> client = it->second->mClient;
+    SpAIBinder binder = it->second->mClientBinder;
 
     // Check if the client still live. If alive, unlink the death.
-    if (client) {
-        AIBinder_unlinkToDeath(client->asBinder().get(), mDeathRecipient.get(),
-                               reinterpret_cast<void*>(clientId));
+    if (binder.get() != nullptr) {
+        AIBinder_unlinkToDeath(binder.get(), mDeathRecipient.get(),
+                               reinterpret_cast<void*>(it->second->mClientId));
+    }
+
+    {
+        std::scoped_lock lock{sCookie2ClientLock};
+        sCookie2Client.erase(it->second->mClientId);
     }
 
     // Erase the entry.
-    mClientIdToClientInfoMap.erase(it);
+    mClientIdToClientMap.erase(it);
+    mRegisteredCallbacks.erase((uintptr_t)binder.get());
 
     return OK;
 }
 
 size_t TranscodingClientManager::getNumOfClients() const {
     std::scoped_lock lock{mLock};
-    return mClientIdToClientInfoMap.size();
+    return mClientIdToClientMap.size();
 }
 
 }  // namespace android
diff --git a/media/libmediatranscoding/TranscodingJobScheduler.cpp b/media/libmediatranscoding/TranscodingJobScheduler.cpp
new file mode 100644
index 0000000..3e4f319
--- /dev/null
+++ b/media/libmediatranscoding/TranscodingJobScheduler.cpp
@@ -0,0 +1,477 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "TranscodingJobScheduler"
+
+#define VALIDATE_STATE 1
+
+#include <inttypes.h>
+#include <media/TranscodingJobScheduler.h>
+#include <utils/Log.h>
+
+#include <utility>
+
+namespace android {
+
+static_assert((JobIdType)-1 < 0, "JobIdType should be signed");
+
+constexpr static uid_t OFFLINE_UID = -1;
+
+//static
+String8 TranscodingJobScheduler::jobToString(const JobKeyType& jobKey) {
+    return String8::format("{client:%lld, job:%d}", (long long)jobKey.first, jobKey.second);
+}
+
+TranscodingJobScheduler::TranscodingJobScheduler(
+        const std::shared_ptr<TranscoderInterface>& transcoder,
+        const std::shared_ptr<UidPolicyInterface>& uidPolicy)
+      : mTranscoder(transcoder), mUidPolicy(uidPolicy), mCurrentJob(nullptr), mResourceLost(false) {
+    // Only push empty offline queue initially. Realtime queues are added when requests come in.
+    mUidSortedList.push_back(OFFLINE_UID);
+    mOfflineUidIterator = mUidSortedList.begin();
+    mJobQueues.emplace(OFFLINE_UID, JobQueueType());
+}
+
+TranscodingJobScheduler::~TranscodingJobScheduler() {}
+
+TranscodingJobScheduler::Job* TranscodingJobScheduler::getTopJob_l() {
+    if (mJobMap.empty()) {
+        return nullptr;
+    }
+    uid_t topUid = *mUidSortedList.begin();
+    JobKeyType topJobKey = *mJobQueues[topUid].begin();
+    return &mJobMap[topJobKey];
+}
+
+void TranscodingJobScheduler::updateCurrentJob_l() {
+    Job* topJob = getTopJob_l();
+    Job* curJob = mCurrentJob;
+    ALOGV("updateCurrentJob: topJob is %s, curJob is %s",
+          topJob == nullptr ? "null" : jobToString(topJob->key).c_str(),
+          curJob == nullptr ? "null" : jobToString(curJob->key).c_str());
+
+    // If we found a topJob that should be run, and it's not already running,
+    // take some actions to ensure it's running.
+    if (topJob != nullptr && (topJob != curJob || topJob->state != Job::RUNNING)) {
+        // If another job is currently running, pause it first.
+        if (curJob != nullptr && curJob->state == Job::RUNNING) {
+            mTranscoder->pause(curJob->key.first, curJob->key.second);
+            curJob->state = Job::PAUSED;
+        }
+        // If we are not experiencing resource loss, we can start or resume
+        // the topJob now.
+        if (!mResourceLost) {
+            if (topJob->state == Job::NOT_STARTED) {
+                mTranscoder->start(topJob->key.first, topJob->key.second, topJob->request,
+                                   topJob->callback.lock());
+            } else if (topJob->state == Job::PAUSED) {
+                mTranscoder->resume(topJob->key.first, topJob->key.second, topJob->request,
+                                    topJob->callback.lock());
+            }
+            topJob->state = Job::RUNNING;
+        }
+    }
+    mCurrentJob = topJob;
+}
+
+void TranscodingJobScheduler::removeJob_l(const JobKeyType& jobKey) {
+    ALOGV("%s: job %s", __FUNCTION__, jobToString(jobKey).c_str());
+
+    if (mJobMap.count(jobKey) == 0) {
+        ALOGE("job %s doesn't exist", jobToString(jobKey).c_str());
+        return;
+    }
+
+    // Remove job from uid's queue.
+    const uid_t uid = mJobMap[jobKey].uid;
+    JobQueueType& jobQueue = mJobQueues[uid];
+    auto it = std::find(jobQueue.begin(), jobQueue.end(), jobKey);
+    if (it == jobQueue.end()) {
+        ALOGE("couldn't find job %s in queue for uid %d", jobToString(jobKey).c_str(), uid);
+        return;
+    }
+    jobQueue.erase(it);
+
+    // If this is the last job in a real-time queue, remove this uid's queue.
+    if (uid != OFFLINE_UID && jobQueue.empty()) {
+        mUidSortedList.remove(uid);
+        mJobQueues.erase(uid);
+        mUidPolicy->unregisterMonitorUid(uid);
+
+        std::unordered_set<uid_t> topUids = mUidPolicy->getTopUids();
+        moveUidsToTop_l(topUids, false /*preserveTopUid*/);
+    }
+
+    // Clear current job.
+    if (mCurrentJob == &mJobMap[jobKey]) {
+        mCurrentJob = nullptr;
+    }
+
+    // Remove job from job map.
+    mJobMap.erase(jobKey);
+}
+
+/**
+ * Moves the set of uids to the front of mUidSortedList (which is used to pick
+ * the next job to run).
+ *
+ * This is called when 1) we received a onTopUidsChanged() callbcak from UidPolicy,
+ * or 2) we removed the job queue for a uid because it becomes empty.
+ *
+ * In case of 1), if there are multiple uids in the set, and the current front
+ * uid in mUidSortedList is still in the set, we try to keep that uid at front
+ * so that current job run is not interrupted. (This is not a concern for case 2)
+ * because the queue for a uid was just removed entirely.)
+ */
+void TranscodingJobScheduler::moveUidsToTop_l(const std::unordered_set<uid_t>& uids,
+                                              bool preserveTopUid) {
+    // If uid set is empty, nothing to do. Do not change the queue status.
+    if (uids.empty()) {
+        return;
+    }
+
+    // Save the current top uid.
+    uid_t curTopUid = *mUidSortedList.begin();
+    bool pushCurTopToFront = false;
+    int32_t numUidsMoved = 0;
+
+    // Go through the sorted uid list once, and move the ones in top set to front.
+    for (auto it = mUidSortedList.begin(); it != mUidSortedList.end();) {
+        uid_t uid = *it;
+
+        if (uid != OFFLINE_UID && uids.count(uid) > 0) {
+            it = mUidSortedList.erase(it);
+
+            // If this is the top we're preserving, don't push it here, push
+            // it after the for-loop.
+            if (uid == curTopUid && preserveTopUid) {
+                pushCurTopToFront = true;
+            } else {
+                mUidSortedList.push_front(uid);
+            }
+
+            // If we found all uids in the set, break out.
+            if (++numUidsMoved == uids.size()) {
+                break;
+            }
+        } else {
+            ++it;
+        }
+    }
+
+    if (pushCurTopToFront) {
+        mUidSortedList.push_front(curTopUid);
+    }
+}
+
+bool TranscodingJobScheduler::submit(ClientIdType clientId, JobIdType jobId, uid_t uid,
+                                     const TranscodingRequestParcel& request,
+                                     const std::weak_ptr<ITranscodingClientCallback>& callback) {
+    JobKeyType jobKey = std::make_pair(clientId, jobId);
+
+    ALOGV("%s: job %s, uid %d, prioirty %d", __FUNCTION__, jobToString(jobKey).c_str(), uid,
+          (int32_t)request.priority);
+
+    std::scoped_lock lock{mLock};
+
+    if (mJobMap.count(jobKey) > 0) {
+        ALOGE("job %s already exists", jobToString(jobKey).c_str());
+        return false;
+    }
+
+    // TODO(chz): only support offline vs real-time for now. All kUnspecified jobs
+    // go to offline queue.
+    if (request.priority == TranscodingJobPriority::kUnspecified) {
+        uid = OFFLINE_UID;
+    }
+
+    // Add job to job map.
+    mJobMap[jobKey].key = jobKey;
+    mJobMap[jobKey].uid = uid;
+    mJobMap[jobKey].state = Job::NOT_STARTED;
+    mJobMap[jobKey].request = request;
+    mJobMap[jobKey].callback = callback;
+
+    // If it's an offline job, the queue was already added in constructor.
+    // If it's a real-time jobs, check if a queue is already present for the uid,
+    // and add a new queue if needed.
+    if (uid != OFFLINE_UID) {
+        if (mJobQueues.count(uid) == 0) {
+            mUidPolicy->registerMonitorUid(uid);
+            if (mUidPolicy->isUidOnTop(uid)) {
+                mUidSortedList.push_front(uid);
+            } else {
+                // Shouldn't be submitting real-time requests from non-top app,
+                // put it in front of the offline queue.
+                mUidSortedList.insert(mOfflineUidIterator, uid);
+            }
+        } else if (uid != *mUidSortedList.begin()) {
+            if (mUidPolicy->isUidOnTop(uid)) {
+                mUidSortedList.remove(uid);
+                mUidSortedList.push_front(uid);
+            }
+        }
+    }
+    // Append this job to the uid's queue.
+    mJobQueues[uid].push_back(jobKey);
+
+    updateCurrentJob_l();
+
+    validateState_l();
+    return true;
+}
+
+bool TranscodingJobScheduler::cancel(ClientIdType clientId, JobIdType jobId) {
+    JobKeyType jobKey = std::make_pair(clientId, jobId);
+
+    ALOGV("%s: job %s", __FUNCTION__, jobToString(jobKey).c_str());
+
+    std::list<JobKeyType> jobsToRemove;
+
+    std::scoped_lock lock{mLock};
+
+    if (jobId < 0) {
+        for (auto it = mJobMap.begin(); it != mJobMap.end(); ++it) {
+            if (it->first.first == clientId && it->second.uid != OFFLINE_UID) {
+                jobsToRemove.push_back(it->first);
+            }
+        }
+    } else {
+        if (mJobMap.count(jobKey) == 0) {
+            ALOGE("job %s doesn't exist", jobToString(jobKey).c_str());
+            return false;
+        }
+        jobsToRemove.push_back(jobKey);
+    }
+
+    for (auto it = jobsToRemove.begin(); it != jobsToRemove.end(); ++it) {
+        // If the job has ever been started, stop it now.
+        // Note that stop() is needed even if the job is currently paused. This instructs
+        // the transcoder to discard any states for the job, otherwise the states may
+        // never be discarded.
+        if (mJobMap[*it].state != Job::NOT_STARTED) {
+            mTranscoder->stop(it->first, it->second);
+        }
+
+        // Remove the job.
+        removeJob_l(*it);
+    }
+
+    // Start next job.
+    updateCurrentJob_l();
+
+    validateState_l();
+    return true;
+}
+
+bool TranscodingJobScheduler::getJob(ClientIdType clientId, JobIdType jobId,
+                                     TranscodingRequestParcel* request) {
+    JobKeyType jobKey = std::make_pair(clientId, jobId);
+
+    std::scoped_lock lock{mLock};
+
+    if (mJobMap.count(jobKey) == 0) {
+        ALOGE("job %s doesn't exist", jobToString(jobKey).c_str());
+        return false;
+    }
+
+    *(TranscodingRequest*)request = mJobMap[jobKey].request;
+    return true;
+}
+
+void TranscodingJobScheduler::notifyClient(ClientIdType clientId, JobIdType jobId,
+                                           const char* reason,
+                                           std::function<void(const JobKeyType&)> func) {
+    JobKeyType jobKey = std::make_pair(clientId, jobId);
+
+    std::scoped_lock lock{mLock};
+
+    if (mJobMap.count(jobKey) == 0) {
+        ALOGW("%s: ignoring %s for job %s that doesn't exist", __FUNCTION__, reason,
+              jobToString(jobKey).c_str());
+        return;
+    }
+
+    // Only ignore if job was never started. In particular, propagate the status
+    // to client if the job is paused. Transcoder could have posted finish when
+    // we're pausing it, and the finish arrived after we changed current job.
+    if (mJobMap[jobKey].state == Job::NOT_STARTED) {
+        ALOGW("%s: ignoring %s for job %s that was never started", __FUNCTION__, reason,
+              jobToString(jobKey).c_str());
+        return;
+    }
+
+    ALOGV("%s: job %s %s", __FUNCTION__, jobToString(jobKey).c_str(), reason);
+    func(jobKey);
+}
+
+void TranscodingJobScheduler::onStarted(ClientIdType clientId, JobIdType jobId) {
+    notifyClient(clientId, jobId, "started", [=](const JobKeyType& jobKey) {
+        auto callback = mJobMap[jobKey].callback.lock();
+        if (callback != nullptr) {
+            callback->onTranscodingStarted(jobId);
+        }
+    });
+}
+
+void TranscodingJobScheduler::onPaused(ClientIdType clientId, JobIdType jobId) {
+    notifyClient(clientId, jobId, "paused", [=](const JobKeyType& jobKey) {
+        auto callback = mJobMap[jobKey].callback.lock();
+        if (callback != nullptr) {
+            callback->onTranscodingPaused(jobId);
+        }
+    });
+}
+
+void TranscodingJobScheduler::onResumed(ClientIdType clientId, JobIdType jobId) {
+    notifyClient(clientId, jobId, "resumed", [=](const JobKeyType& jobKey) {
+        auto callback = mJobMap[jobKey].callback.lock();
+        if (callback != nullptr) {
+            callback->onTranscodingResumed(jobId);
+        }
+    });
+}
+
+void TranscodingJobScheduler::onFinish(ClientIdType clientId, JobIdType jobId) {
+    notifyClient(clientId, jobId, "finish", [=](const JobKeyType& jobKey) {
+        {
+            auto clientCallback = mJobMap[jobKey].callback.lock();
+            if (clientCallback != nullptr) {
+                clientCallback->onTranscodingFinished(
+                        jobId, TranscodingResultParcel({jobId, -1 /*actualBitrateBps*/,
+                                                        std::nullopt /*jobStats*/}));
+            }
+        }
+
+        // Remove the job.
+        removeJob_l(jobKey);
+
+        // Start next job.
+        updateCurrentJob_l();
+
+        validateState_l();
+    });
+}
+
+void TranscodingJobScheduler::onError(ClientIdType clientId, JobIdType jobId,
+                                      TranscodingErrorCode err) {
+    notifyClient(clientId, jobId, "error", [=](const JobKeyType& jobKey) {
+        {
+            auto clientCallback = mJobMap[jobKey].callback.lock();
+            if (clientCallback != nullptr) {
+                clientCallback->onTranscodingFailed(jobId, err);
+            }
+        }
+
+        // Remove the job.
+        removeJob_l(jobKey);
+
+        // Start next job.
+        updateCurrentJob_l();
+
+        validateState_l();
+    });
+}
+
+void TranscodingJobScheduler::onProgressUpdate(ClientIdType clientId, JobIdType jobId,
+                                               int32_t progress) {
+    notifyClient(clientId, jobId, "progress", [=](const JobKeyType& jobKey) {
+        auto callback = mJobMap[jobKey].callback.lock();
+        if (callback != nullptr) {
+            callback->onProgressUpdate(jobId, progress);
+        }
+    });
+}
+
+void TranscodingJobScheduler::onResourceLost() {
+    ALOGV("%s", __FUNCTION__);
+
+    std::scoped_lock lock{mLock};
+
+    // If we receive a resource loss event, the TranscoderLibrary already paused
+    // the transcoding, so we don't need to call onPaused to notify it to pause.
+    // Only need to update the job state here.
+    if (mCurrentJob != nullptr && mCurrentJob->state == Job::RUNNING) {
+        mCurrentJob->state = Job::PAUSED;
+    }
+    mResourceLost = true;
+
+    validateState_l();
+}
+
+void TranscodingJobScheduler::onTopUidsChanged(const std::unordered_set<uid_t>& uids) {
+    if (uids.empty()) {
+        ALOGW("%s: ignoring empty uids", __FUNCTION__);
+        return;
+    }
+
+    std::string uidStr;
+    for (auto it = uids.begin(); it != uids.end(); it++) {
+        if (!uidStr.empty()) {
+            uidStr += ", ";
+        }
+        uidStr += std::to_string(*it);
+    }
+
+    ALOGD("%s: topUids: size %zu, uids: %s", __FUNCTION__, uids.size(), uidStr.c_str());
+
+    std::scoped_lock lock{mLock};
+
+    moveUidsToTop_l(uids, true /*preserveTopUid*/);
+
+    updateCurrentJob_l();
+
+    validateState_l();
+}
+
+void TranscodingJobScheduler::onResourceAvailable() {
+    ALOGV("%s", __FUNCTION__);
+
+    std::scoped_lock lock{mLock};
+
+    mResourceLost = false;
+    updateCurrentJob_l();
+
+    validateState_l();
+}
+
+void TranscodingJobScheduler::validateState_l() {
+#ifdef VALIDATE_STATE
+    LOG_ALWAYS_FATAL_IF(mJobQueues.count(OFFLINE_UID) != 1,
+                        "mJobQueues offline queue number is not 1");
+    LOG_ALWAYS_FATAL_IF(*mOfflineUidIterator != OFFLINE_UID,
+                        "mOfflineUidIterator not pointing to offline uid");
+    LOG_ALWAYS_FATAL_IF(mUidSortedList.size() != mJobQueues.size(),
+                        "mUidList and mJobQueues size mismatch");
+
+    int32_t totalJobs = 0;
+    for (auto uidIt = mUidSortedList.begin(); uidIt != mUidSortedList.end(); uidIt++) {
+        LOG_ALWAYS_FATAL_IF(mJobQueues.count(*uidIt) != 1, "mJobQueues count for uid %d is not 1",
+                            *uidIt);
+        for (auto jobIt = mJobQueues[*uidIt].begin(); jobIt != mJobQueues[*uidIt].end(); jobIt++) {
+            LOG_ALWAYS_FATAL_IF(mJobMap.count(*jobIt) != 1, "mJobs count for job %s is not 1",
+                                jobToString(*jobIt).c_str());
+        }
+
+        totalJobs += mJobQueues[*uidIt].size();
+    }
+    LOG_ALWAYS_FATAL_IF(mJobMap.size() != totalJobs,
+                        "mJobs size doesn't match total jobs counted from uid queues");
+#endif  // VALIDATE_STATE
+}
+
+}  // namespace android
diff --git a/media/libmediatranscoding/TranscodingUidPolicy.cpp b/media/libmediatranscoding/TranscodingUidPolicy.cpp
new file mode 100644
index 0000000..b72a2b9
--- /dev/null
+++ b/media/libmediatranscoding/TranscodingUidPolicy.cpp
@@ -0,0 +1,248 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "TranscodingUidPolicy"
+
+#include <binder/ActivityManager.h>
+#include <cutils/misc.h>  // FIRST_APPLICATION_UID
+#include <inttypes.h>
+#include <media/TranscodingUidPolicy.h>
+#include <utils/Log.h>
+
+#include <utility>
+
+namespace android {
+
+constexpr static uid_t OFFLINE_UID = -1;
+constexpr static const char* kTranscodingTag = "transcoding";
+
+struct TranscodingUidPolicy::UidObserver : public BnUidObserver,
+                                           public virtual IBinder::DeathRecipient {
+    explicit UidObserver(TranscodingUidPolicy* owner) : mOwner(owner) {}
+
+    // IUidObserver
+    void onUidGone(uid_t uid, bool disabled) override;
+    void onUidActive(uid_t uid) override;
+    void onUidIdle(uid_t uid, bool disabled) override;
+    void onUidStateChanged(uid_t uid, int32_t procState, int64_t procStateSeq,
+                           int32_t capability) override;
+
+    // IBinder::DeathRecipient implementation
+    void binderDied(const wp<IBinder>& who) override;
+
+    TranscodingUidPolicy* mOwner;
+};
+
+void TranscodingUidPolicy::UidObserver::onUidGone(uid_t uid __unused, bool disabled __unused) {}
+
+void TranscodingUidPolicy::UidObserver::onUidActive(uid_t uid __unused) {}
+
+void TranscodingUidPolicy::UidObserver::onUidIdle(uid_t uid __unused, bool disabled __unused) {}
+
+void TranscodingUidPolicy::UidObserver::onUidStateChanged(uid_t uid, int32_t procState,
+                                                          int64_t procStateSeq __unused,
+                                                          int32_t capability __unused) {
+    mOwner->onUidStateChanged(uid, procState);
+}
+
+void TranscodingUidPolicy::UidObserver::binderDied(const wp<IBinder>& /*who*/) {
+    ALOGW("TranscodingUidPolicy: ActivityManager has died");
+    // TODO(chz): this is a rare event (since if the AMS is dead, the system is
+    // probably dead as well). But we should try to reconnect.
+    mOwner->setUidObserverRegistered(false);
+}
+
+////////////////////////////////////////////////////////////////////////////
+
+TranscodingUidPolicy::TranscodingUidPolicy()
+      : mAm(std::make_shared<ActivityManager>()),
+        mUidObserver(new UidObserver(this)),
+        mRegistered(false),
+        mTopUidState(ActivityManager::PROCESS_STATE_UNKNOWN) {
+    registerSelf();
+}
+
+TranscodingUidPolicy::~TranscodingUidPolicy() {
+    unregisterSelf();
+}
+
+void TranscodingUidPolicy::registerSelf() {
+    status_t res = mAm->linkToDeath(mUidObserver.get());
+    mAm->registerUidObserver(
+            mUidObserver.get(),
+            ActivityManager::UID_OBSERVER_GONE | ActivityManager::UID_OBSERVER_IDLE |
+                    ActivityManager::UID_OBSERVER_ACTIVE | ActivityManager::UID_OBSERVER_PROCSTATE,
+            ActivityManager::PROCESS_STATE_UNKNOWN, String16(kTranscodingTag));
+
+    if (res == OK) {
+        Mutex::Autolock _l(mUidLock);
+
+        mRegistered = true;
+        ALOGI("TranscodingUidPolicy: Registered with ActivityManager");
+    } else {
+        mAm->unregisterUidObserver(mUidObserver.get());
+    }
+}
+
+void TranscodingUidPolicy::unregisterSelf() {
+    mAm->unregisterUidObserver(mUidObserver.get());
+    mAm->unlinkToDeath(mUidObserver.get());
+
+    Mutex::Autolock _l(mUidLock);
+
+    mRegistered = false;
+
+    ALOGI("TranscodingUidPolicy: Unregistered with ActivityManager");
+}
+
+void TranscodingUidPolicy::setUidObserverRegistered(bool registered) {
+    Mutex::Autolock _l(mUidLock);
+
+    mRegistered = registered;
+}
+
+void TranscodingUidPolicy::setCallback(const std::shared_ptr<UidPolicyCallbackInterface>& cb) {
+    mUidPolicyCallback = cb;
+}
+
+void TranscodingUidPolicy::registerMonitorUid(uid_t uid) {
+    Mutex::Autolock _l(mUidLock);
+    if (uid == OFFLINE_UID) {
+        ALOGW("Ignoring the offline uid");
+        return;
+    }
+    if (mUidStateMap.find(uid) != mUidStateMap.end()) {
+        ALOGE("%s: Trying to register uid: %d which is already monitored!", __FUNCTION__, uid);
+        return;
+    }
+
+    int32_t state = ActivityManager::PROCESS_STATE_UNKNOWN;
+    if (mRegistered && mAm->isUidActive(uid, String16(kTranscodingTag))) {
+        state = mAm->getUidProcessState(uid, String16(kTranscodingTag));
+    }
+
+    ALOGV("%s: inserting new uid: %u, procState %d", __FUNCTION__, uid, state);
+
+    mUidStateMap.emplace(std::pair<uid_t, int32_t>(uid, state));
+    mStateUidMap[state].insert(uid);
+
+    updateTopUid_l();
+}
+
+void TranscodingUidPolicy::unregisterMonitorUid(uid_t uid) {
+    Mutex::Autolock _l(mUidLock);
+
+    auto it = mUidStateMap.find(uid);
+    if (it == mUidStateMap.end()) {
+        ALOGE("%s: Trying to unregister uid: %d which is not monitored!", __FUNCTION__, uid);
+        return;
+    }
+
+    auto stateIt = mStateUidMap.find(it->second);
+    if (stateIt != mStateUidMap.end()) {
+        stateIt->second.erase(uid);
+        if (stateIt->second.empty()) {
+            mStateUidMap.erase(stateIt);
+        }
+    }
+    mUidStateMap.erase(it);
+
+    updateTopUid_l();
+}
+
+bool TranscodingUidPolicy::isUidOnTop(uid_t uid) {
+    Mutex::Autolock _l(mUidLock);
+
+    return mTopUidState != ActivityManager::PROCESS_STATE_UNKNOWN &&
+           mTopUidState == getProcState_l(uid);
+}
+
+std::unordered_set<uid_t> TranscodingUidPolicy::getTopUids() const {
+    Mutex::Autolock _l(mUidLock);
+
+    if (mTopUidState == ActivityManager::PROCESS_STATE_UNKNOWN) {
+        return std::unordered_set<uid_t>();
+    }
+
+    return mStateUidMap.at(mTopUidState);
+}
+
+void TranscodingUidPolicy::onUidStateChanged(uid_t uid, int32_t procState) {
+    ALOGV("onUidStateChanged: %u, procState %d", uid, procState);
+
+    bool topUidSetChanged = false;
+    std::unordered_set<uid_t> topUids;
+    {
+        Mutex::Autolock _l(mUidLock);
+        auto it = mUidStateMap.find(uid);
+        if (it != mUidStateMap.end() && it->second != procState) {
+            // Top set changed if 1) the uid is in the current top uid set, or 2) the
+            // new procState is at least the same priority as the current top uid state.
+            bool isUidCurrentTop = mTopUidState != ActivityManager::PROCESS_STATE_UNKNOWN &&
+                                   mStateUidMap[mTopUidState].count(uid) > 0;
+            bool isNewStateHigherThanTop = procState != ActivityManager::PROCESS_STATE_UNKNOWN &&
+                                           (procState <= mTopUidState ||
+                                            mTopUidState == ActivityManager::PROCESS_STATE_UNKNOWN);
+            topUidSetChanged = (isUidCurrentTop || isNewStateHigherThanTop);
+
+            // Move uid to the new procState.
+            mStateUidMap[it->second].erase(uid);
+            mStateUidMap[procState].insert(uid);
+            it->second = procState;
+
+            if (topUidSetChanged) {
+                updateTopUid_l();
+
+                // Make a copy of the uid set for callback.
+                topUids = mStateUidMap[mTopUidState];
+            }
+        }
+    }
+
+    ALOGV("topUidSetChanged: %d", topUidSetChanged);
+
+    if (topUidSetChanged) {
+        auto callback = mUidPolicyCallback.lock();
+        if (callback != nullptr) {
+            callback->onTopUidsChanged(topUids);
+        }
+    }
+}
+
+void TranscodingUidPolicy::updateTopUid_l() {
+    mTopUidState = ActivityManager::PROCESS_STATE_UNKNOWN;
+
+    // Find the lowest uid state (ignoring PROCESS_STATE_UNKNOWN) with some monitored uids.
+    for (auto stateIt = mStateUidMap.begin(); stateIt != mStateUidMap.end(); stateIt++) {
+        if (stateIt->first != ActivityManager::PROCESS_STATE_UNKNOWN && !stateIt->second.empty()) {
+            mTopUidState = stateIt->first;
+            break;
+        }
+    }
+
+    ALOGV("%s: top uid state is %d", __FUNCTION__, mTopUidState);
+}
+
+int32_t TranscodingUidPolicy::getProcState_l(uid_t uid) {
+    auto it = mUidStateMap.find(uid);
+    if (it != mUidStateMap.end()) {
+        return it->second;
+    }
+    return ActivityManager::PROCESS_STATE_UNKNOWN;
+}
+
+}  // namespace android
diff --git a/media/libmediatranscoding/aidl/android/media/IMediaTranscodingService.aidl b/media/libmediatranscoding/aidl/android/media/IMediaTranscodingService.aidl
index 07b6c1a..40ca2c2 100644
--- a/media/libmediatranscoding/aidl/android/media/IMediaTranscodingService.aidl
+++ b/media/libmediatranscoding/aidl/android/media/IMediaTranscodingService.aidl
@@ -16,9 +16,10 @@
 
 package android.media;
 
+import android.media.ITranscodingClient;
+import android.media.ITranscodingClientCallback;
 import android.media.TranscodingJobParcel;
 import android.media.TranscodingRequestParcel;
-import android.media.ITranscodingServiceClient;
 
 /**
  * Binder interface for MediaTranscodingService.
@@ -48,64 +49,29 @@
     /**
      * Register the client with the MediaTranscodingService.
      *
-     * Client must call this function to register itself with the service in order to perform
-     * transcoding. This function will return a unique positive Id assigned by the service.
-     * Client should save this Id and use it for all the transaction with the service.
+     * Client must call this function to register itself with the service in
+     * order to perform transcoding tasks. This function will return an
+     * ITranscodingClient interface object. The client should save and use it
+     * for all future transactions with the service.
      *
-     * @param client interface for the MediaTranscodingService to call the client.
+     * @param callback client interface for the MediaTranscodingService to call
+     *        the client.
+     * @param clientName name of the client.
      * @param opPackageName op package name of the client.
      * @param clientUid user id of the client.
      * @param clientPid process id of the client.
-     * @return a unique positive Id assigned to the client by the service, -1  means failed to
-     * register.
+     * @return an ITranscodingClient interface object, with nullptr indicating
+     *         failure to register.
      */
-    int registerClient(in ITranscodingServiceClient client,
-                       in String opPackageName,
-                       in int clientUid,
-                       in int clientPid);
-
-    /**
-    * Unregister the client with the MediaTranscodingService.
-    *
-    * Client will not be able to perform any more transcoding after unregister.
-    *
-    * @param clientId assigned Id of the client.
-    * @return true if succeeds, false otherwise.
-    */
-    boolean unregisterClient(in int clientId);
+    ITranscodingClient registerClient(
+            in ITranscodingClientCallback callback,
+            in String clientName,
+            in String opPackageName,
+            in int clientUid,
+            in int clientPid);
 
     /**
     * Returns the number of clients. This is used for debugging.
     */
     int getNumOfClients();
-
-    /**
-     * Submits a transcoding request to MediaTranscodingService.
-     *
-     * @param clientId assigned Id of the client.
-     * @param request a TranscodingRequest contains transcoding configuration.
-     * @param job(output variable) a TranscodingJob generated by the MediaTranscodingService.
-     * @return a unique positive jobId generated by the MediaTranscodingService, -1 means failure.
-     */
-    int submitRequest(in int clientId,
-                      in TranscodingRequestParcel request,
-                      out TranscodingJobParcel job);
-
-    /**
-     * Cancels a transcoding job.
-     *
-     * @param clientId assigned id of the client.
-     * @param jobId a TranscodingJob generated by the MediaTranscodingService.
-     * @return true if succeeds, false otherwise.
-     */
-    boolean cancelJob(in int clientId, in int jobId);
-
-    /**
-     * Queries the job detail associated with a jobId.
-     *
-     * @param jobId a TranscodingJob generated by the MediaTranscodingService.
-     * @param job(output variable) the TranscodingJob associated with the jobId.
-     * @return true if succeeds, false otherwise.
-     */
-    boolean getJobWithId(in int jobId, out TranscodingJobParcel job);
 }
diff --git a/media/libmediatranscoding/aidl/android/media/ITranscodingClient.aidl b/media/libmediatranscoding/aidl/android/media/ITranscodingClient.aidl
new file mode 100644
index 0000000..37b5147
--- /dev/null
+++ b/media/libmediatranscoding/aidl/android/media/ITranscodingClient.aidl
@@ -0,0 +1,63 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.TranscodingJobParcel;
+import android.media.TranscodingRequestParcel;
+
+/**
+ * ITranscodingClient
+ *
+ * Interface for a client to communicate with MediaTranscodingService.
+ *
+ * {@hide}
+ */
+interface ITranscodingClient {
+    /**
+     * Submits a transcoding request to MediaTranscodingService.
+     *
+     * @param request a TranscodingRequest contains transcoding configuration.
+     * @param job(output variable) a TranscodingJob generated by the MediaTranscodingService.
+     * @return true if success, false otherwise.
+     */
+    boolean submitRequest(in TranscodingRequestParcel request,
+                          out TranscodingJobParcel job);
+
+    /**
+     * Cancels a transcoding job.
+     *
+     * @param jobId a TranscodingJob generated by the MediaTranscodingService.
+     * @return true if succeeds, false otherwise.
+     */
+    boolean cancelJob(in int jobId);
+
+    /**
+     * Queries the job detail associated with a jobId.
+     *
+     * @param jobId a TranscodingJob generated by the MediaTranscodingService.
+     * @param job(output variable) the TranscodingJob associated with the jobId.
+     * @return true if succeeds, false otherwise.
+     */
+    boolean getJobWithId(in int jobId, out TranscodingJobParcel job);
+
+    /**
+    * Unregister the client with the MediaTranscodingService.
+    *
+    * Client will not be able to perform any more transcoding after unregister.
+    */
+    void unregister();
+}
diff --git a/media/libmediatranscoding/aidl/android/media/ITranscodingServiceClient.aidl b/media/libmediatranscoding/aidl/android/media/ITranscodingClientCallback.aidl
similarity index 63%
rename from media/libmediatranscoding/aidl/android/media/ITranscodingServiceClient.aidl
rename to media/libmediatranscoding/aidl/android/media/ITranscodingClientCallback.aidl
index e23c833..73edb95 100644
--- a/media/libmediatranscoding/aidl/android/media/ITranscodingServiceClient.aidl
+++ b/media/libmediatranscoding/aidl/android/media/ITranscodingClientCallback.aidl
@@ -19,19 +19,49 @@
 import android.media.TranscodingErrorCode;
 import android.media.TranscodingJobParcel;
 import android.media.TranscodingResultParcel;
+import android.os.ParcelFileDescriptor;
 
 /**
- * ITranscodingServiceClient interface for the MediaTranscodingervice to communicate with the
- * client.
+ * ITranscodingClientCallback
+ *
+ * Interface for the MediaTranscodingService to communicate with the client.
  *
  * {@hide}
  */
-//TODO(hkuang): Implement the interface.
-interface ITranscodingServiceClient {
+interface ITranscodingClientCallback {
     /**
-     * Retrieves the name of the client.
-     */
-    @utf8InCpp String getName();
+    * Called to open a raw file descriptor to access data under a URI
+    *
+    * @param fileUri The path of the filename.
+    * @param mode The file mode to use. Must be one of ("r, "w", "rw")
+    * @return ParcelFileDescriptor if open the file successfully, null otherwise.
+    */
+    ParcelFileDescriptor openFileDescriptor(in @utf8InCpp String fileUri,
+                                            in @utf8InCpp String mode);
+
+    /**
+    * Called when the transcoding associated with the jobId finished.
+    * This will only be called if client request to get all the status of the job.
+    *
+    * @param jobId jobId assigned by the MediaTranscodingService upon receiving request.
+    */
+    oneway void onTranscodingStarted(in int jobId);
+
+    /**
+    * Called when the transcoding associated with the jobId is paused.
+    * This will only be called if client request to get all the status of the job.
+    *
+    * @param jobId jobId assigned by the MediaTranscodingService upon receiving request.
+    */
+    oneway void onTranscodingPaused(in int jobId);
+
+    /**
+    * Called when the transcoding associated with the jobId is resumed.
+    * This will only be called if client request to get all the status of the job.
+    *
+    * @param jobId jobId assigned by the MediaTranscodingService upon receiving request.
+    */
+    oneway void onTranscodingResumed(in int jobId);
 
     /**
     * Called when the transcoding associated with the jobId finished.
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingErrorCode.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingErrorCode.aidl
index 7f47fdc..b044d41 100644
--- a/media/libmediatranscoding/aidl/android/media/TranscodingErrorCode.aidl
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingErrorCode.aidl
@@ -23,11 +23,12 @@
  */
 @Backing(type = "int")
 enum TranscodingErrorCode {
-    kUnknown = 0,
-    kUnsupported = 1,
-    kDecoderError = 2,
-    kEncoderError = 3,
-    kExtractorError = 4,
-    kMuxerError = 5,
-    kInvalidBitstream = 6
+    kNoError = 0,
+    kUnknown = 1,
+    kMalformed = 2,
+    kUnsupported = 3,
+    kInvalidParameter = 4,
+    kInvalidOperation = 5,
+    kErrorIO = 6,
+    kInsufficientResources = 7,
 }
\ No newline at end of file
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingJobParcel.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingJobParcel.aidl
index d912c38..baf4381 100644
--- a/media/libmediatranscoding/aidl/android/media/TranscodingJobParcel.aidl
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingJobParcel.aidl
@@ -17,6 +17,7 @@
 package android.media;
 
 import android.media.TranscodingRequestParcel;
+import android.media.TranscodingVideoTrackFormat;
 
 /**
  * TranscodingJob is generated by the MediaTranscodingService upon receiving a TranscodingRequest.
@@ -38,6 +39,12 @@
     TranscodingRequestParcel request;
 
     /**
+     * Output video track's format. This will only be avaiable for video transcoding and it will
+     * be avaiable when the job is finished.
+     */
+    @nullable TranscodingVideoTrackFormat videoTrackFormat;
+
+    /**
     * Current number of jobs ahead of this job. The service schedules the job based on the priority
     * passed from the client. Client could specify whether to receive updates when the
     * awaitNumberOfJobs changes through setting requestProgressUpdate in the TranscodingRequest.
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingJobStats.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingJobStats.aidl
new file mode 100644
index 0000000..1b41b87
--- /dev/null
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingJobStats.aidl
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * TranscodingJobStats encapsulated the stats of the a TranscodingJob.
+ *
+ * {@hide}
+ */
+parcelable TranscodingJobStats {
+    /**
+     * System time of when the job is created.
+     */
+    long jobCreatedTimeUs;
+
+    /**
+     * System time of when the job is finished.
+     */
+    long jobFinishedTimeUs;
+
+    /**
+     * Total time spend on transcoding, exclude the time in pause.
+     */
+    long totalProcessingTimeUs;
+
+    /**
+     * Total time spend on handling the job, include the time in pause.
+     * The totaltimeUs is actually the same as jobFinishedTimeUs - jobCreatedTimeUs.
+     */
+    long totalTimeUs;
+}
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl
index 7b7986d..83ea707 100644
--- a/media/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl
@@ -17,7 +17,9 @@
 package android.media;
 
 import android.media.TranscodingJobPriority;
+import android.media.TranscodingTestConfig;
 import android.media.TranscodingType;
+import android.media.TranscodingVideoTrackFormat;
 
 /**
  * TranscodingRequest contains the desired configuration for the transcoding.
@@ -27,9 +29,14 @@
 //TODO(hkuang): Implement the parcelable.
 parcelable TranscodingRequestParcel {
     /**
-     * Name of file to be transcoded.
+     * The absolute file path of the source file.
      */
-    @utf8InCpp String fileName;
+    @utf8InCpp String sourceFilePath;
+
+    /**
+     * The absolute file path of the destination file.
+     */
+    @utf8InCpp String destinationFilePath;
 
     /**
      * Type of the transcoding.
@@ -37,14 +44,12 @@
     TranscodingType transcodingType;
 
     /**
-     * Input source file descriptor.
+     * Requested video track format for the transcoding.
+     * Note that the transcoding service will try to fulfill the requested format as much as
+     * possbile, while subject to hardware and software limitation. The final video track format
+     * will be available in the TranscodingJobParcel when the job is finished.
      */
-    ParcelFileDescriptor inFd;
-
-    /**
-     * Output transcoded file descriptor.
-     */
-    ParcelFileDescriptor outFd;
+    @nullable TranscodingVideoTrackFormat requestedVideoTrackFormat;
 
     /**
      * Priority of this transcoding. Service will schedule the transcoding based on the priority.
@@ -53,6 +58,30 @@
 
     /**
      * Whether to receive update on progress and change of awaitNumJobs.
+     * Default to false.
      */
-    boolean requestUpdate;
+    boolean requestProgressUpdate = false;
+
+    /**
+     * Whether to receive update on job's start/stop/pause/resume.
+     * Default to false.
+     */
+    boolean requestJobEventUpdate = false;
+
+    /**
+     * Whether this request is for testing.
+     */
+    boolean isForTesting = false;
+
+    /**
+     * Test configuration. This will be available only when isForTesting is set to true.
+     */
+    @nullable TranscodingTestConfig testConfig;
+
+     /**
+      * Whether to get the stats of the transcoding.
+      * If this is enabled, the TranscodingJobStats will be returned in TranscodingResultParcel
+      * upon transcoding finishes.
+      */
+    boolean enableStats = false;
 }
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl
index 65c49e7..a20c8b1 100644
--- a/media/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl
@@ -16,6 +16,8 @@
 
 package android.media;
 
+import android.media.TranscodingJobStats;
+
 /**
  * Result of the transcoding.
  *
@@ -34,5 +36,9 @@
      */
     int actualBitrateBps;
 
-    // TODO(hkuang): Add more fields.
+    /**
+     * Stats of the transcoding job. This will only be available when client requests to get the
+     * stats in TranscodingRequestParcel.
+     */
+    @nullable TranscodingJobStats jobStats;
 }
\ No newline at end of file
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingTestConfig.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingTestConfig.aidl
new file mode 100644
index 0000000..a564799
--- /dev/null
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingTestConfig.aidl
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+ package android.media;
+
+ /**
+  * TranscodingTestConfig contains the test configureation used in testing.
+  *
+  * {@hide}
+  */
+parcelable TranscodingTestConfig {
+    /**
+     * Whether to use SimulatedTranscoder for testing. Note that SimulatedTranscoder does not send
+     * transcoding jobs to real MediaTranscoder.
+     */
+    boolean useSimulatedTranscoder = false;
+
+    /**
+     * Passthrough mode used for testing. The transcoding service will assume the destination
+     * path already contains the transcoding of the source file and return it to client directly.
+     */
+    boolean passThroughMode = false;
+
+    /**
+     * Time of processing the job in milliseconds. Service will return the job result at least after
+     * processingTotalTimeMs from the time it starts to process the job. Note that if service uses
+     * real MediaTranscoder to do transcoding, the time spent on transcoding may be more than that.
+     */
+    int processingTotalTimeMs = 0;
+}
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingVideoTrackFormat.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingVideoTrackFormat.aidl
new file mode 100644
index 0000000..a567a95
--- /dev/null
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingVideoTrackFormat.aidl
@@ -0,0 +1,70 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.TranscodingVideoCodecType;
+
+/**
+ * TranscodingVideoTrackFormat contains the video track format of a video.
+ *
+ * TODO(hkuang): Switch to PersistableBundle when b/156428735 is fixed or after we remove
+ * aidl_interface
+ *
+ * Note that TranscodingVideoTrackFormat is used in TranscodingRequestParcel for the  client to
+ * specify the desired transcoded video format, and is also used in TranscodingJobParcel for the
+ * service to notify client of the final video format for transcoding.
+ * When used as input in TranscodingRequestParcel, the client only needs to specify the config that
+ * they want to change, e.g. codec or resolution, and all the missing configs will be extracted
+ * from the source video and applied to the destination video.
+ * When used as output in TranscodingJobParcel, all the configs will be populated to indicate the
+ * final encoder configs used for transcoding.
+ *
+ * {@hide}
+ */
+parcelable TranscodingVideoTrackFormat {
+    /**
+     * Video Codec type.
+     */
+    TranscodingVideoCodecType codecType; // TranscodingVideoCodecType::kUnspecified;
+
+    /**
+     * Width of the video in pixels. -1 means unavailable.
+     */
+    int width = -1;
+
+    /**
+     * Height of the video in pixels. -1 means unavailable.
+     */
+    int height = -1;
+
+    /**
+     * Bitrate in bits per second. -1 means unavailable.
+     */
+    int bitrateBps = -1;
+
+    /**
+     * Codec profile. This must be the same constant as used in MediaCodecInfo.CodecProfileLevel.
+     * -1 means unavailable.
+     */
+    int profile = -1;
+
+    /**
+     * Codec level. This must be the same constant as used in MediaCodecInfo.CodecProfileLevel.
+     * -1 means unavailable.
+     */
+    int level = -1;
+}
diff --git a/media/libmediatranscoding/build_and_run_all_unit_tests.sh b/media/libmediatranscoding/build_and_run_all_unit_tests.sh
new file mode 100755
index 0000000..388e2ea
--- /dev/null
+++ b/media/libmediatranscoding/build_and_run_all_unit_tests.sh
@@ -0,0 +1,31 @@
+#!/bin/bash
+#
+# Script to run all transcoding related tests from subfolders.
+# Run script from this folder.
+#
+
+if [ -z "$ANDROID_BUILD_TOP" ]; then
+    echo "Android build environment not set"
+    exit -1
+fi
+
+# ensure we have mm
+. $ANDROID_BUILD_TOP/build/envsetup.sh
+
+mm
+
+echo "waiting for device"
+
+adb root && adb wait-for-device remount && adb sync
+SYNC_FINISHED=true
+
+# Run the transcoding service tests.
+pushd tests
+. build_and_run_all_unit_tests.sh
+popd
+
+# Run the transcoder tests.
+pushd transcoder/tests/
+. build_and_run_all_unit_tests.sh
+popd
+
diff --git a/media/libmediatranscoding/include/media/AdjustableMaxPriorityQueue.h b/media/libmediatranscoding/include/media/AdjustableMaxPriorityQueue.h
index 0e8dcfd..9ca2ee9 100644
--- a/media/libmediatranscoding/include/media/AdjustableMaxPriorityQueue.h
+++ b/media/libmediatranscoding/include/media/AdjustableMaxPriorityQueue.h
@@ -38,7 +38,7 @@
  */
 template <class T, class Comparator = std::less<T>>
 class AdjustableMaxPriorityQueue {
-   public:
+public:
     typedef typename std::vector<T>::iterator iterator;
     typedef typename std::vector<T>::const_iterator const_iterator;
 
@@ -104,7 +104,7 @@
     /* Return the backbone storage of this PriorityQueue. Mainly used for debugging. */
     const std::vector<T>& getStorage() const { return mHeap; };
 
-   private:
+private:
     std::vector<T> mHeap;
 
     /* Implementation shared by both public push() methods. */
diff --git a/media/libmediatranscoding/include/media/SchedulerClientInterface.h b/media/libmediatranscoding/include/media/SchedulerClientInterface.h
new file mode 100644
index 0000000..e00cfb2
--- /dev/null
+++ b/media/libmediatranscoding/include/media/SchedulerClientInterface.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_SCHEDULER_CLIENT_INTERFACE_H
+#define ANDROID_MEDIA_SCHEDULER_CLIENT_INTERFACE_H
+
+#include <aidl/android/media/ITranscodingClientCallback.h>
+#include <aidl/android/media/TranscodingRequestParcel.h>
+#include <media/TranscodingDefs.h>
+
+namespace android {
+
+using ::aidl::android::media::ITranscodingClientCallback;
+using ::aidl::android::media::TranscodingRequestParcel;
+
+// Interface for a client to call the scheduler to schedule or retrieve
+// the status of a job.
+class SchedulerClientInterface {
+public:
+    /**
+     * Submits one request to the scheduler.
+     *
+     * Returns true on success and false on failure. This call will fail is a job identified
+     * by <clientId, jobId> already exists.
+     */
+    virtual bool submit(ClientIdType clientId, JobIdType jobId, uid_t uid,
+                        const TranscodingRequestParcel& request,
+                        const std::weak_ptr<ITranscodingClientCallback>& clientCallback) = 0;
+
+    /**
+     * Cancels a job identified by <clientId, jobId>.
+     *
+     * If jobId is negative (<0), all jobs with a specified priority (that's not
+     * TranscodingJobPriority::kUnspecified) will be cancelled. Otherwise, only the single job
+     * <clientId, jobId> will be cancelled.
+     *
+     * Returns false if a single job is being cancelled but it doesn't exist. Returns
+     * true otherwise.
+     */
+    virtual bool cancel(ClientIdType clientId, JobIdType jobId) = 0;
+
+    /**
+     * Retrieves information about a job.
+     *
+     * Returns true and the job if it exists, and false otherwise.
+     */
+    virtual bool getJob(ClientIdType clientId, JobIdType jobId,
+                        TranscodingRequestParcel* request) = 0;
+
+protected:
+    virtual ~SchedulerClientInterface() = default;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_SCHEDULER_CLIENT_INTERFACE_H
diff --git a/media/libmediatranscoding/include/media/TranscoderInterface.h b/media/libmediatranscoding/include/media/TranscoderInterface.h
new file mode 100644
index 0000000..1a3f505
--- /dev/null
+++ b/media/libmediatranscoding/include/media/TranscoderInterface.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRANSCODER_INTERFACE_H
+#define ANDROID_MEDIA_TRANSCODER_INTERFACE_H
+
+#include <aidl/android/media/ITranscodingClientCallback.h>
+#include <aidl/android/media/TranscodingErrorCode.h>
+#include <aidl/android/media/TranscodingRequestParcel.h>
+#include <media/TranscodingDefs.h>
+
+namespace android {
+
+using ::aidl::android::media::ITranscodingClientCallback;
+using ::aidl::android::media::TranscodingErrorCode;
+using ::aidl::android::media::TranscodingRequestParcel;
+class TranscoderCallbackInterface;
+
+// Interface for the scheduler to call the transcoder to take actions.
+class TranscoderInterface {
+public:
+    virtual void setCallback(const std::shared_ptr<TranscoderCallbackInterface>& cb) = 0;
+    virtual void start(ClientIdType clientId, JobIdType jobId,
+                       const TranscodingRequestParcel& request,
+                       const std::shared_ptr<ITranscodingClientCallback>& clientCallback) = 0;
+    virtual void pause(ClientIdType clientId, JobIdType jobId) = 0;
+    virtual void resume(ClientIdType clientId, JobIdType jobId,
+                        const TranscodingRequestParcel& request,
+                        const std::shared_ptr<ITranscodingClientCallback>& clientCallback) = 0;
+    virtual void stop(ClientIdType clientId, JobIdType jobId) = 0;
+
+protected:
+    virtual ~TranscoderInterface() = default;
+};
+
+// Interface for the transcoder to notify the scheduler of the status of
+// the currently running job, or temporary loss of transcoding resources.
+class TranscoderCallbackInterface {
+public:
+    // TODO(chz): determine what parameters are needed here.
+    virtual void onStarted(ClientIdType clientId, JobIdType jobId) = 0;
+    virtual void onPaused(ClientIdType clientId, JobIdType jobId) = 0;
+    virtual void onResumed(ClientIdType clientId, JobIdType jobId) = 0;
+    virtual void onFinish(ClientIdType clientId, JobIdType jobId) = 0;
+    virtual void onError(ClientIdType clientId, JobIdType jobId, TranscodingErrorCode err) = 0;
+    virtual void onProgressUpdate(ClientIdType clientId, JobIdType jobId, int32_t progress) = 0;
+
+    // Called when transcoding becomes temporarily inaccessible due to loss of resource.
+    // If there is any job currently running, it will be paused. When resource contention
+    // is solved, the scheduler should call TranscoderInterface's to either start a new job,
+    // or resume a paused job.
+    virtual void onResourceLost() = 0;
+
+protected:
+    virtual ~TranscoderCallbackInterface() = default;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_TRANSCODER_INTERFACE_H
diff --git a/media/libmediatranscoding/include/media/TranscoderWrapper.h b/media/libmediatranscoding/include/media/TranscoderWrapper.h
new file mode 100644
index 0000000..a4c92c5
--- /dev/null
+++ b/media/libmediatranscoding/include/media/TranscoderWrapper.h
@@ -0,0 +1,95 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_TRANSCODER_WRAPPER_H
+#define ANDROID_TRANSCODER_WRAPPER_H
+
+#include <android-base/thread_annotations.h>
+#include <media/TranscoderInterface.h>
+
+#include <list>
+#include <map>
+#include <mutex>
+
+namespace android {
+
+class MediaTranscoder;
+class Parcelable;
+
+/*
+ * Wrapper class around MediaTranscoder.
+ * Implements TranscoderInterface for TranscodingJobScheduler to use.
+ */
+class TranscoderWrapper : public TranscoderInterface,
+                          public std::enable_shared_from_this<TranscoderWrapper> {
+public:
+    TranscoderWrapper();
+
+    virtual void setCallback(const std::shared_ptr<TranscoderCallbackInterface>& cb) override;
+    virtual void start(ClientIdType clientId, JobIdType jobId,
+                       const TranscodingRequestParcel& request,
+                       const std::shared_ptr<ITranscodingClientCallback>& clientCallback) override;
+    virtual void pause(ClientIdType clientId, JobIdType jobId) override;
+    virtual void resume(ClientIdType clientId, JobIdType jobId,
+                        const TranscodingRequestParcel& request,
+                        const std::shared_ptr<ITranscodingClientCallback>& clientCallback) override;
+    virtual void stop(ClientIdType clientId, JobIdType jobId) override;
+
+private:
+    class CallbackImpl;
+    struct Event {
+        enum Type { NoEvent, Start, Pause, Resume, Stop, Finish, Error, Progress } type;
+        ClientIdType clientId;
+        JobIdType jobId;
+        std::function<void()> runnable;
+    };
+    using JobKeyType = std::pair<ClientIdType, JobIdType>;
+
+    std::shared_ptr<CallbackImpl> mTranscoderCb;
+    std::shared_ptr<MediaTranscoder> mTranscoder;
+    std::weak_ptr<TranscoderCallbackInterface> mCallback;
+    std::mutex mLock;
+    std::condition_variable mCondition;
+    std::list<Event> mQueue;  // GUARDED_BY(mLock);
+    std::map<JobKeyType, std::shared_ptr<const Parcel>> mPausedStateMap;
+    ClientIdType mCurrentClientId;
+    JobIdType mCurrentJobId;
+
+    static const char* toString(Event::Type type);
+    void onFinish(ClientIdType clientId, JobIdType jobId);
+    void onError(ClientIdType clientId, JobIdType jobId, TranscodingErrorCode error);
+    void onProgress(ClientIdType clientId, JobIdType jobId, int32_t progress);
+
+    TranscodingErrorCode handleStart(ClientIdType clientId, JobIdType jobId,
+                                     const TranscodingRequestParcel& request,
+                                     const std::shared_ptr<ITranscodingClientCallback>& callback);
+    TranscodingErrorCode handlePause(ClientIdType clientId, JobIdType jobId);
+    TranscodingErrorCode handleResume(ClientIdType clientId, JobIdType jobId,
+                                      const TranscodingRequestParcel& request,
+                                      const std::shared_ptr<ITranscodingClientCallback>& callback);
+    TranscodingErrorCode setupTranscoder(
+            ClientIdType clientId, JobIdType jobId, const TranscodingRequestParcel& request,
+            const std::shared_ptr<ITranscodingClientCallback>& callback,
+            const std::shared_ptr<const Parcel>& pausedState = nullptr);
+
+    void cleanup();
+    void queueEvent(Event::Type type, ClientIdType clientId, JobIdType jobId,
+                    const std::function<void()> runnable);
+    void threadLoop();
+};
+
+}  // namespace android
+#endif  // ANDROID_TRANSCODER_WRAPPER_H
diff --git a/media/libmediatranscoding/include/media/TranscodingClientManager.h b/media/libmediatranscoding/include/media/TranscodingClientManager.h
index eec120a..a62ad8c 100644
--- a/media/libmediatranscoding/include/media/TranscodingClientManager.h
+++ b/media/libmediatranscoding/include/media/TranscodingClientManager.h
@@ -17,73 +17,76 @@
 #ifndef ANDROID_MEDIA_TRANSCODING_CLIENT_MANAGER_H
 #define ANDROID_MEDIA_TRANSCODING_CLIENT_MANAGER_H
 
-#include <aidl/android/media/BnTranscodingServiceClient.h>
-#include <android/binder_ibinder.h>
+#include <aidl/android/media/ITranscodingClient.h>
+#include <aidl/android/media/ITranscodingClientCallback.h>
 #include <sys/types.h>
 #include <utils/Condition.h>
-#include <utils/RefBase.h>
 #include <utils/String8.h>
 #include <utils/Vector.h>
 
+#include <map>
 #include <mutex>
 #include <unordered_map>
+#include <unordered_set>
+
+#include "SchedulerClientInterface.h"
 
 namespace android {
 
-using ::aidl::android::media::ITranscodingServiceClient;
-
-class MediaTranscodingService;
+using ::aidl::android::media::ITranscodingClient;
+using ::aidl::android::media::ITranscodingClientCallback;
 
 /*
  * TranscodingClientManager manages all the transcoding clients across different processes.
  *
- * TranscodingClientManager is a global singleton that could only acquired by
- * MediaTranscodingService. It manages all the clients's registration/unregistration and clients'
- * information. It also bookkeeps all the clients' information. It also monitors to the death of the
+ * TranscodingClientManager manages all the clients's registration/unregistration and clients'
+ * information. It also bookkeeps all the clients' information. It also monitors the death of the
  * clients. Upon client's death, it will remove the client from it.
  *
  * TODO(hkuang): Hook up with ResourceManager for resource management.
  * TODO(hkuang): Hook up with MediaMetrics to log all the transactions.
  */
-class TranscodingClientManager {
-   public:
+class TranscodingClientManager : public std::enable_shared_from_this<TranscodingClientManager> {
+public:
     virtual ~TranscodingClientManager();
 
     /**
-     * ClientInfo contains a single client's information.
-     */
-    struct ClientInfo {
-        /* The remote client that this ClientInfo is associated with. */
-        std::shared_ptr<ITranscodingServiceClient> mClient;
-        /* A unique positive Id assigned to the client by the service. */
-        int32_t mClientId;
-        /* Process id of the client */
-        int32_t mClientPid;
-        /* User id of the client. */
-        int32_t mClientUid;
-        /* Package name of the client. */
-        std::string mClientOpPackageName;
-
-        ClientInfo(const std::shared_ptr<ITranscodingServiceClient>& client, int64_t clientId,
-                   int32_t pid, int32_t uid, const std::string& opPackageName)
-            : mClient(client),
-              mClientId(clientId),
-              mClientPid(pid),
-              mClientUid(uid),
-              mClientOpPackageName(opPackageName) {}
-    };
-
-    /**
      * Adds a new client to the manager.
      *
-     * The client must have valid clientId, pid, uid and opPackageName, otherwise, this will return
-     * a non-zero errorcode. If the client has already been added, it will also return non-zero
-     * errorcode.
+     * The client must have valid callback, pid, uid, clientName and opPackageName.
+     * Otherwise, this will return a non-zero errorcode. If the client callback has
+     * already been added, it will also return non-zero errorcode.
      *
-     * @param client to be added to the manager.
+     * @param callback client callback for the service to call this client.
+     * @param pid client's process id.
+     * @param uid client's user id.
+     * @param clientName client's name.
+     * @param opPackageName client's package name.
+     * @param client output holding the ITranscodingClient interface for the client
+     *        to use for subsequent communications with the service.
      * @return 0 if client is added successfully, non-zero errorcode otherwise.
      */
-    status_t addClient(std::unique_ptr<ClientInfo> client);
+    status_t addClient(const std::shared_ptr<ITranscodingClientCallback>& callback, pid_t pid,
+                       uid_t uid, const std::string& clientName, const std::string& opPackageName,
+                       std::shared_ptr<ITranscodingClient>* client);
+
+    /**
+     * Gets the number of clients.
+     */
+    size_t getNumOfClients() const;
+
+    /**
+     * Dump all the client information to the fd.
+     */
+    void dumpAllClients(int fd, const Vector<String16>& args);
+
+private:
+    friend class MediaTranscodingService;
+    friend class TranscodingClientManagerTest;
+    struct ClientImpl;
+
+    // Only allow MediaTranscodingService and unit tests to instantiate.
+    TranscodingClientManager(const std::shared_ptr<SchedulerClientInterface>& scheduler);
 
     /**
      * Removes an existing client from the manager.
@@ -93,39 +96,23 @@
      * @param clientId id of the client to be removed..
      * @return 0 if client is removed successfully, non-zero errorcode otherwise.
      */
-    status_t removeClient(int32_t clientId);
-
-    /**
-     * Gets the number of clients.
-     */
-    size_t getNumOfClients() const;
-
-    /**
-     * Checks if a client with clientId is already registered.
-     */
-    bool isClientIdRegistered(int32_t clientId) const;
-
-    /**
-     * Dump all the client information to the fd.
-     */
-    void dumpAllClients(int fd, const Vector<String16>& args);
-
-   private:
-    friend class MediaTranscodingService;
-    friend class TranscodingClientManagerTest;
-
-    /** Get the singleton instance of the TranscodingClientManager. */
-    static TranscodingClientManager& getInstance();
-
-    TranscodingClientManager();
+    status_t removeClient(ClientIdType clientId);
 
     static void BinderDiedCallback(void* cookie);
 
     mutable std::mutex mLock;
-    std::unordered_map<int32_t, std::unique_ptr<ClientInfo>> mClientIdToClientInfoMap
+    std::unordered_map<ClientIdType, std::shared_ptr<ClientImpl>> mClientIdToClientMap
             GUARDED_BY(mLock);
+    std::unordered_set<uintptr_t> mRegisteredCallbacks GUARDED_BY(mLock);
 
     ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
+
+    std::shared_ptr<SchedulerClientInterface> mJobScheduler;
+
+    static std::atomic<ClientIdType> sCookieCounter;
+    static std::mutex sCookie2ClientLock;
+    static std::map<ClientIdType, std::shared_ptr<ClientImpl>> sCookie2Client
+            GUARDED_BY(sCookie2ClientLock);
 };
 
 }  // namespace android
diff --git a/media/libmediatranscoding/include/media/TranscodingDefs.h b/media/libmediatranscoding/include/media/TranscodingDefs.h
new file mode 100644
index 0000000..31d83ac
--- /dev/null
+++ b/media/libmediatranscoding/include/media/TranscodingDefs.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRANSCODING_DEFS_H
+#define ANDROID_MEDIA_TRANSCODING_DEFS_H
+
+#include <aidl/android/media/ITranscodingClientCallback.h>
+#include <aidl/android/media/TranscodingRequestParcel.h>
+
+namespace android {
+
+using ClientIdType = uintptr_t;
+using JobIdType = int32_t;
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_TRANSCODING_DEFS_H
diff --git a/media/libmediatranscoding/include/media/TranscodingJobScheduler.h b/media/libmediatranscoding/include/media/TranscodingJobScheduler.h
new file mode 100644
index 0000000..5ccadad
--- /dev/null
+++ b/media/libmediatranscoding/include/media/TranscodingJobScheduler.h
@@ -0,0 +1,120 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRANSCODING_JOB_SCHEDULER_H
+#define ANDROID_MEDIA_TRANSCODING_JOB_SCHEDULER_H
+
+#include <aidl/android/media/TranscodingJobPriority.h>
+#include <media/SchedulerClientInterface.h>
+#include <media/TranscoderInterface.h>
+#include <media/TranscodingRequest.h>
+#include <media/UidPolicyInterface.h>
+#include <utils/String8.h>
+
+#include <list>
+#include <map>
+#include <mutex>
+
+namespace android {
+using ::aidl::android::media::TranscodingJobPriority;
+using ::aidl::android::media::TranscodingResultParcel;
+
+class TranscodingJobScheduler : public UidPolicyCallbackInterface,
+                                public SchedulerClientInterface,
+                                public TranscoderCallbackInterface {
+public:
+    virtual ~TranscodingJobScheduler();
+
+    // SchedulerClientInterface
+    bool submit(ClientIdType clientId, JobIdType jobId, uid_t uid,
+                const TranscodingRequestParcel& request,
+                const std::weak_ptr<ITranscodingClientCallback>& clientCallback) override;
+    bool cancel(ClientIdType clientId, JobIdType jobId) override;
+    bool getJob(ClientIdType clientId, JobIdType jobId, TranscodingRequestParcel* request) override;
+    // ~SchedulerClientInterface
+
+    // TranscoderCallbackInterface
+    void onStarted(ClientIdType clientId, JobIdType jobId) override;
+    void onPaused(ClientIdType clientId, JobIdType jobId) override;
+    void onResumed(ClientIdType clientId, JobIdType jobId) override;
+    void onFinish(ClientIdType clientId, JobIdType jobId) override;
+    void onError(ClientIdType clientId, JobIdType jobId, TranscodingErrorCode err) override;
+    void onProgressUpdate(ClientIdType clientId, JobIdType jobId, int32_t progress) override;
+    void onResourceLost() override;
+    // ~TranscoderCallbackInterface
+
+    // UidPolicyCallbackInterface
+    void onTopUidsChanged(const std::unordered_set<uid_t>& uids) override;
+    void onResourceAvailable() override;
+    // ~UidPolicyCallbackInterface
+
+private:
+    friend class MediaTranscodingService;
+    friend class TranscodingJobSchedulerTest;
+
+    using JobKeyType = std::pair<ClientIdType, JobIdType>;
+    using JobQueueType = std::list<JobKeyType>;
+
+    struct Job {
+        JobKeyType key;
+        uid_t uid;
+        enum JobState {
+            NOT_STARTED,
+            RUNNING,
+            PAUSED,
+        } state;
+        TranscodingRequest request;
+        std::weak_ptr<ITranscodingClientCallback> callback;
+    };
+
+    // TODO(chz): call transcoder without global lock.
+    // Use mLock for all entrypoints for now.
+    mutable std::mutex mLock;
+
+    std::map<JobKeyType, Job> mJobMap;
+
+    // uid->JobQueue map (uid == -1: offline queue)
+    std::map<uid_t, JobQueueType> mJobQueues;
+
+    // uids, with the head being the most-recently-top app, 2nd item is the
+    // previous top app, etc.
+    std::list<uid_t> mUidSortedList;
+    std::list<uid_t>::iterator mOfflineUidIterator;
+
+    std::shared_ptr<TranscoderInterface> mTranscoder;
+    std::shared_ptr<UidPolicyInterface> mUidPolicy;
+
+    Job* mCurrentJob;
+    bool mResourceLost;
+
+    // Only allow MediaTranscodingService and unit tests to instantiate.
+    TranscodingJobScheduler(const std::shared_ptr<TranscoderInterface>& transcoder,
+                            const std::shared_ptr<UidPolicyInterface>& uidPolicy);
+
+    Job* getTopJob_l();
+    void updateCurrentJob_l();
+    void removeJob_l(const JobKeyType& jobKey);
+    void moveUidsToTop_l(const std::unordered_set<uid_t>& uids, bool preserveTopUid);
+    void notifyClient(ClientIdType clientId, JobIdType jobId, const char* reason,
+                      std::function<void(const JobKeyType&)> func);
+    // Internal state verifier (debug only)
+    void validateState_l();
+
+    static String8 jobToString(const JobKeyType& jobKey);
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_TRANSCODING_JOB_SCHEDULER_H
diff --git a/media/libmediatranscoding/include/media/TranscodingRequest.h b/media/libmediatranscoding/include/media/TranscodingRequest.h
new file mode 100644
index 0000000..63de1fb
--- /dev/null
+++ b/media/libmediatranscoding/include/media/TranscodingRequest.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRANSCODING_REQUEST_H
+#define ANDROID_MEDIA_TRANSCODING_REQUEST_H
+
+#include <aidl/android/media/TranscodingRequestParcel.h>
+
+namespace android {
+
+using ::aidl::android::media::TranscodingRequestParcel;
+
+// Helper class for duplicating a TranscodingRequestParcel
+class TranscodingRequest : public TranscodingRequestParcel {
+public:
+    TranscodingRequest() = default;
+    TranscodingRequest(const TranscodingRequestParcel& parcel) { setTo(parcel); }
+    TranscodingRequest& operator=(const TranscodingRequest& request) {
+        setTo(request);
+        return *this;
+    }
+
+private:
+    void setTo(const TranscodingRequestParcel& parcel) {
+        sourceFilePath = parcel.sourceFilePath;
+        destinationFilePath = parcel.destinationFilePath;
+        transcodingType = parcel.transcodingType;
+        requestedVideoTrackFormat = parcel.requestedVideoTrackFormat;
+        priority = parcel.priority;
+        requestProgressUpdate = parcel.requestProgressUpdate;
+        requestJobEventUpdate = parcel.requestJobEventUpdate;
+        isForTesting = parcel.isForTesting;
+        testConfig = parcel.testConfig;
+    }
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_TRANSCODING_REQUEST_H
diff --git a/media/libmediatranscoding/include/media/TranscodingUidPolicy.h b/media/libmediatranscoding/include/media/TranscodingUidPolicy.h
new file mode 100644
index 0000000..27dadd2
--- /dev/null
+++ b/media/libmediatranscoding/include/media/TranscodingUidPolicy.h
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRANSCODING_UID_POLICY_H
+#define ANDROID_MEDIA_TRANSCODING_UID_POLICY_H
+
+#include <aidl/android/media/ITranscodingClient.h>
+#include <aidl/android/media/ITranscodingClientCallback.h>
+#include <media/UidPolicyInterface.h>
+#include <sys/types.h>
+#include <utils/Condition.h>
+#include <utils/RefBase.h>
+#include <utils/String8.h>
+#include <utils/Vector.h>
+
+#include <map>
+#include <mutex>
+#include <unordered_map>
+#include <unordered_set>
+
+namespace android {
+
+class ActivityManager;
+// Observer for UID lifecycle and provide information about the uid's app
+// priority used by the job scheduler.
+class TranscodingUidPolicy : public UidPolicyInterface {
+public:
+    explicit TranscodingUidPolicy();
+    ~TranscodingUidPolicy();
+
+    // UidPolicyInterface
+    bool isUidOnTop(uid_t uid) override;
+    void registerMonitorUid(uid_t uid) override;
+    void unregisterMonitorUid(uid_t uid) override;
+    std::unordered_set<uid_t> getTopUids() const override;
+    void setCallback(const std::shared_ptr<UidPolicyCallbackInterface>& cb) override;
+
+private:
+    void onUidStateChanged(uid_t uid, int32_t procState);
+    void setUidObserverRegistered(bool registerd);
+    void registerSelf();
+    void unregisterSelf();
+    int32_t getProcState_l(uid_t uid) NO_THREAD_SAFETY_ANALYSIS;
+    void updateTopUid_l() NO_THREAD_SAFETY_ANALYSIS;
+
+    struct UidObserver;
+    mutable Mutex mUidLock;
+    std::shared_ptr<ActivityManager> mAm;
+    sp<UidObserver> mUidObserver;
+    bool mRegistered GUARDED_BY(mUidLock);
+    int32_t mTopUidState GUARDED_BY(mUidLock);
+    std::unordered_map<uid_t, int32_t> mUidStateMap GUARDED_BY(mUidLock);
+    std::map<int32_t, std::unordered_set<uid_t>> mStateUidMap GUARDED_BY(mUidLock);
+    std::weak_ptr<UidPolicyCallbackInterface> mUidPolicyCallback;
+};  // class TranscodingUidPolicy
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_TRANSCODING_SERVICE_H
diff --git a/media/libmediatranscoding/include/media/UidPolicyInterface.h b/media/libmediatranscoding/include/media/UidPolicyInterface.h
new file mode 100644
index 0000000..dc28027
--- /dev/null
+++ b/media/libmediatranscoding/include/media/UidPolicyInterface.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_UID_POLICY_INTERFACE_H
+#define ANDROID_MEDIA_UID_POLICY_INTERFACE_H
+
+#include <unordered_set>
+
+namespace android {
+
+class UidPolicyCallbackInterface;
+
+// Interface for the scheduler to query a uid's info.
+class UidPolicyInterface {
+public:
+    // Instruct the uid policy to start monitoring a uid.
+    virtual void registerMonitorUid(uid_t uid) = 0;
+    // Instruct the uid policy to stop monitoring a uid.
+    virtual void unregisterMonitorUid(uid_t uid) = 0;
+    // Whether a uid is among the set of uids that's currently top priority.
+    virtual bool isUidOnTop(uid_t uid) = 0;
+    // Retrieves the set of uids that's currently top priority.
+    virtual std::unordered_set<uid_t> getTopUids() const = 0;
+    // Set the associated callback interface to send the events when uid states change.
+    virtual void setCallback(const std::shared_ptr<UidPolicyCallbackInterface>& cb) = 0;
+
+protected:
+    virtual ~UidPolicyInterface() = default;
+};
+
+// Interface for notifying the scheduler of a change in uid states or
+// transcoding resource availability.
+class UidPolicyCallbackInterface {
+public:
+    // Called when the set of uids that's top priority among the uids of interest
+    // has changed. The receiver of this callback should adjust accordingly.
+    virtual void onTopUidsChanged(const std::unordered_set<uid_t>& uids) = 0;
+
+    // Called when resources become available for transcoding use. The scheduler
+    // may use this as a signal to attempt restart transcoding activity that
+    // were previously paused due to temporary resource loss.
+    virtual void onResourceAvailable() = 0;
+
+protected:
+    virtual ~UidPolicyCallbackInterface() = default;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_UID_POLICY_INTERFACE_H
diff --git a/media/libmediatranscoding/tests/AdjustableMaxPriorityQueue_tests.cpp b/media/libmediatranscoding/tests/AdjustableMaxPriorityQueue_tests.cpp
index d58af4e..2e49f32 100644
--- a/media/libmediatranscoding/tests/AdjustableMaxPriorityQueue_tests.cpp
+++ b/media/libmediatranscoding/tests/AdjustableMaxPriorityQueue_tests.cpp
@@ -36,7 +36,7 @@
 namespace android {
 
 class IntUniquePtrComp {
-   public:
+public:
     bool operator()(const std::unique_ptr<int>& lhs, const std::unique_ptr<int>& rhs) const {
         return *lhs < *rhs;
     }
@@ -233,7 +233,7 @@
     // The job is arranging according to priority with highest priority comes first.
     // For the job with the same priority, the job with early createTime will come first.
     class TranscodingJobComp {
-       public:
+    public:
         bool operator()(const std::unique_ptr<TranscodingJob>& lhs,
                         const std::unique_ptr<TranscodingJob>& rhs) const {
             if (lhs->priority != rhs->priority) {
diff --git a/media/libmediatranscoding/tests/Android.bp b/media/libmediatranscoding/tests/Android.bp
index 8191b00..b54022a 100644
--- a/media/libmediatranscoding/tests/Android.bp
+++ b/media/libmediatranscoding/tests/Android.bp
@@ -38,6 +38,16 @@
 }
 
 //
+// TranscodingJobScheduler unit test
+//
+cc_test {
+    name: "TranscodingJobScheduler_tests",
+    defaults: ["libmediatranscoding_test_defaults"],
+
+    srcs: ["TranscodingJobScheduler_tests.cpp"],
+}
+
+//
 // AdjustableMaxPriorityQueue unit test
 //
 cc_test {
@@ -45,4 +55,4 @@
     defaults: ["libmediatranscoding_test_defaults"],
 
     srcs: ["AdjustableMaxPriorityQueue_tests.cpp"],
-}
\ No newline at end of file
+}
diff --git a/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp b/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp
index 5d2419d..1583325 100644
--- a/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp
+++ b/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp
@@ -19,52 +19,66 @@
 // #define LOG_NDEBUG 0
 #define LOG_TAG "TranscodingClientManagerTest"
 
-#include <aidl/android/media/BnTranscodingServiceClient.h>
+#include <aidl/android/media/BnTranscodingClientCallback.h>
 #include <aidl/android/media/IMediaTranscodingService.h>
-#include <aidl/android/media/ITranscodingServiceClient.h>
 #include <android-base/logging.h>
 #include <android/binder_manager.h>
 #include <android/binder_process.h>
 #include <gtest/gtest.h>
+#include <media/SchedulerClientInterface.h>
 #include <media/TranscodingClientManager.h>
+#include <media/TranscodingRequest.h>
 #include <utils/Log.h>
 
+#include <list>
+
 namespace android {
 
 using Status = ::ndk::ScopedAStatus;
-using aidl::android::media::BnTranscodingServiceClient;
-using aidl::android::media::IMediaTranscodingService;
-using aidl::android::media::ITranscodingServiceClient;
+using ::aidl::android::media::BnTranscodingClientCallback;
+using ::aidl::android::media::IMediaTranscodingService;
+using ::aidl::android::media::TranscodingErrorCode;
+using ::aidl::android::media::TranscodingJobParcel;
+using ::aidl::android::media::TranscodingJobPriority;
+using ::aidl::android::media::TranscodingRequestParcel;
+using ::aidl::android::media::TranscodingResultParcel;
 
-constexpr int32_t kInvalidClientId = -1;
-constexpr int32_t kInvalidClientPid = -1;
-constexpr int32_t kInvalidClientUid = -1;
-constexpr const char* kInvalidClientOpPackageName = "";
+constexpr pid_t kInvalidClientPid = -1;
+constexpr const char* kInvalidClientName = "";
+constexpr const char* kInvalidClientPackage = "";
 
-constexpr int32_t kClientId = 1;
-constexpr int32_t kClientPid = 2;
-constexpr int32_t kClientUid = 3;
-constexpr const char* kClientOpPackageName = "TestClient";
+constexpr pid_t kClientPid = 2;
+constexpr uid_t kClientUid = 3;
+constexpr const char* kClientName = "TestClientName";
+constexpr const char* kClientPackage = "TestClientPackage";
 
-struct TestClient : public BnTranscodingServiceClient {
-    TestClient(const std::shared_ptr<IMediaTranscodingService>& service) : mService(service) {
-        ALOGD("TestClient Created");
-    }
+#define JOB(n) (n)
 
-    Status getName(std::string* _aidl_return) override {
-        *_aidl_return = "test_client";
+struct TestClientCallback : public BnTranscodingClientCallback {
+    TestClientCallback() { ALOGI("TestClientCallback Created"); }
+
+    virtual ~TestClientCallback() { ALOGI("TestClientCallback destroyed"); };
+
+    Status openFileDescriptor(const std::string& /*in_fileUri*/, const std::string& /*in_mode*/,
+                              ::ndk::ScopedFileDescriptor* /*_aidl_return*/) override {
         return Status::ok();
     }
 
-    Status onTranscodingFinished(
-            int32_t /* in_jobId */,
-            const ::aidl::android::media::TranscodingResultParcel& /* in_result */) override {
+    Status onTranscodingStarted(int32_t /*in_jobId*/) override { return Status::ok(); }
+
+    Status onTranscodingPaused(int32_t /*in_jobId*/) override { return Status::ok(); }
+
+    Status onTranscodingResumed(int32_t /*in_jobId*/) override { return Status::ok(); }
+
+    Status onTranscodingFinished(int32_t in_jobId,
+                                 const TranscodingResultParcel& in_result) override {
+        EXPECT_EQ(in_jobId, in_result.jobId);
+        mEventQueue.push_back(Finished(in_jobId));
         return Status::ok();
     }
 
-    Status onTranscodingFailed(
-            int32_t /* in_jobId */,
-            ::aidl::android::media::TranscodingErrorCode /*in_errorCode */) override {
+    Status onTranscodingFailed(int32_t in_jobId, TranscodingErrorCode /*in_errorCode */) override {
+        mEventQueue.push_back(Failed(in_jobId));
         return Status::ok();
     }
 
@@ -77,204 +91,444 @@
         return Status::ok();
     }
 
-    virtual ~TestClient() { ALOGI("TestClient destroyed"); };
+    struct Event {
+        enum {
+            NoEvent,
+            Finished,
+            Failed,
+        } type;
+        JobIdType jobId;
+    };
 
-   private:
-    std::shared_ptr<IMediaTranscodingService> mService;
-    TestClient(const TestClient&) = delete;
-    TestClient& operator=(const TestClient&) = delete;
+    static constexpr Event NoEvent = {Event::NoEvent, 0};
+#define DECLARE_EVENT(action) \
+    static Event action(JobIdType jobId) { return {Event::action, jobId}; }
+
+    DECLARE_EVENT(Finished);
+    DECLARE_EVENT(Failed);
+
+    const Event& popEvent() {
+        if (mEventQueue.empty()) {
+            mPoppedEvent = NoEvent;
+        } else {
+            mPoppedEvent = *mEventQueue.begin();
+            mEventQueue.pop_front();
+        }
+        return mPoppedEvent;
+    }
+
+private:
+    Event mPoppedEvent;
+    std::list<Event> mEventQueue;
+
+    TestClientCallback(const TestClientCallback&) = delete;
+    TestClientCallback& operator=(const TestClientCallback&) = delete;
+};
+
+bool operator==(const TestClientCallback::Event& lhs, const TestClientCallback::Event& rhs) {
+    return lhs.type == rhs.type && lhs.jobId == rhs.jobId;
+}
+
+struct TestScheduler : public SchedulerClientInterface {
+    TestScheduler() { ALOGI("TestScheduler Created"); }
+
+    virtual ~TestScheduler() { ALOGI("TestScheduler Destroyed"); }
+
+    bool submit(ClientIdType clientId, JobIdType jobId, uid_t /*uid*/,
+                const TranscodingRequestParcel& request,
+                const std::weak_ptr<ITranscodingClientCallback>& clientCallback) override {
+        JobKeyType jobKey = std::make_pair(clientId, jobId);
+        if (mJobs.count(jobKey) > 0) {
+            return false;
+        }
+
+        // This is the secret name we'll check, to test error propagation from
+        // the scheduler back to client.
+        if (request.sourceFilePath == "bad_source_file") {
+            return false;
+        }
+
+        mJobs[jobKey].request = request;
+        mJobs[jobKey].callback = clientCallback;
+
+        mLastJob = jobKey;
+        return true;
+    }
+
+    bool cancel(ClientIdType clientId, JobIdType jobId) override {
+        JobKeyType jobKey = std::make_pair(clientId, jobId);
+
+        if (mJobs.count(jobKey) == 0) {
+            return false;
+        }
+        mJobs.erase(jobKey);
+        return true;
+    }
+
+    bool getJob(ClientIdType clientId, JobIdType jobId,
+                TranscodingRequestParcel* request) override {
+        JobKeyType jobKey = std::make_pair(clientId, jobId);
+        if (mJobs.count(jobKey) == 0) {
+            return false;
+        }
+
+        *(TranscodingRequest*)request = mJobs[jobKey].request;
+        return true;
+    }
+
+    void finishLastJob() {
+        auto it = mJobs.find(mLastJob);
+        if (it == mJobs.end()) {
+            return;
+        }
+        {
+            auto clientCallback = it->second.callback.lock();
+            if (clientCallback != nullptr) {
+                clientCallback->onTranscodingFinished(
+                        mLastJob.second,
+                        TranscodingResultParcel({mLastJob.second, 0, std::nullopt}));
+            }
+        }
+        mJobs.erase(it);
+    }
+
+    void abortLastJob() {
+        auto it = mJobs.find(mLastJob);
+        if (it == mJobs.end()) {
+            return;
+        }
+        {
+            auto clientCallback = it->second.callback.lock();
+            if (clientCallback != nullptr) {
+                clientCallback->onTranscodingFailed(mLastJob.second,
+                                                    TranscodingErrorCode::kUnknown);
+            }
+        }
+        mJobs.erase(it);
+    }
+
+    struct Job {
+        TranscodingRequest request;
+        std::weak_ptr<ITranscodingClientCallback> callback;
+    };
+
+    typedef std::pair<ClientIdType, JobIdType> JobKeyType;
+    std::map<JobKeyType, Job> mJobs;
+    JobKeyType mLastJob;
 };
 
 class TranscodingClientManagerTest : public ::testing::Test {
-   public:
-    TranscodingClientManagerTest() : mClientManager(TranscodingClientManager::getInstance()) {
+public:
+    TranscodingClientManagerTest()
+          : mScheduler(new TestScheduler()),
+            mClientManager(new TranscodingClientManager(mScheduler)) {
         ALOGD("TranscodingClientManagerTest created");
     }
 
     void SetUp() override {
-        ::ndk::SpAIBinder binder(AServiceManager_getService("media.transcoding"));
-        mService = IMediaTranscodingService::fromBinder(binder);
-        if (mService == nullptr) {
-            ALOGE("Failed to connect to the media.trascoding service.");
-            return;
-        }
-
-        mTestClient = ::ndk::SharedRefBase::make<TestClient>(mService);
+        mClientCallback1 = ::ndk::SharedRefBase::make<TestClientCallback>();
+        mClientCallback2 = ::ndk::SharedRefBase::make<TestClientCallback>();
+        mClientCallback3 = ::ndk::SharedRefBase::make<TestClientCallback>();
     }
 
-    void TearDown() override {
-        ALOGI("TranscodingClientManagerTest tear down");
-        mService = nullptr;
-    }
+    void TearDown() override { ALOGI("TranscodingClientManagerTest tear down"); }
 
     ~TranscodingClientManagerTest() { ALOGD("TranscodingClientManagerTest destroyed"); }
 
-    TranscodingClientManager& mClientManager;
-    std::shared_ptr<ITranscodingServiceClient> mTestClient = nullptr;
-    std::shared_ptr<IMediaTranscodingService> mService = nullptr;
+    void addMultipleClients() {
+        EXPECT_EQ(mClientManager->addClient(mClientCallback1, kClientPid, kClientUid, kClientName,
+                                            kClientPackage, &mClient1),
+                  OK);
+        EXPECT_NE(mClient1, nullptr);
+
+        EXPECT_EQ(mClientManager->addClient(mClientCallback2, kClientPid, kClientUid, kClientName,
+                                            kClientPackage, &mClient2),
+                  OK);
+        EXPECT_NE(mClient2, nullptr);
+
+        EXPECT_EQ(mClientManager->addClient(mClientCallback3, kClientPid, kClientUid, kClientName,
+                                            kClientPackage, &mClient3),
+                  OK);
+        EXPECT_NE(mClient3, nullptr);
+
+        EXPECT_EQ(mClientManager->getNumOfClients(), 3);
+    }
+
+    void unregisterMultipleClients() {
+        EXPECT_TRUE(mClient1->unregister().isOk());
+        EXPECT_TRUE(mClient2->unregister().isOk());
+        EXPECT_TRUE(mClient3->unregister().isOk());
+        EXPECT_EQ(mClientManager->getNumOfClients(), 0);
+    }
+
+    std::shared_ptr<TestScheduler> mScheduler;
+    std::shared_ptr<TranscodingClientManager> mClientManager;
+    std::shared_ptr<ITranscodingClient> mClient1;
+    std::shared_ptr<ITranscodingClient> mClient2;
+    std::shared_ptr<ITranscodingClient> mClient3;
+    std::shared_ptr<TestClientCallback> mClientCallback1;
+    std::shared_ptr<TestClientCallback> mClientCallback2;
+    std::shared_ptr<TestClientCallback> mClientCallback3;
 };
 
-TEST_F(TranscodingClientManagerTest, TestAddingWithInvalidClientId) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-
-    // Create a client with invalid client id.
-    std::unique_ptr<TranscodingClientManager::ClientInfo> clientInfo =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    client, kInvalidClientId, kClientPid, kClientUid, kClientOpPackageName);
-
-    // Add the client to the manager and expect failure.
-    status_t err = mClientManager.addClient(std::move(clientInfo));
-    EXPECT_TRUE(err != OK);
+TEST_F(TranscodingClientManagerTest, TestAddingWithInvalidClientCallback) {
+    // Add a client with null callback and expect failure.
+    std::shared_ptr<ITranscodingClient> client;
+    status_t err = mClientManager->addClient(nullptr, kClientPid, kClientUid, kClientName,
+                                             kClientPackage, &client);
+    EXPECT_EQ(err, IMediaTranscodingService::ERROR_ILLEGAL_ARGUMENT);
 }
 
 TEST_F(TranscodingClientManagerTest, TestAddingWithInvalidClientPid) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-
-    // Create a client with invalid Pid.
-    std::unique_ptr<TranscodingClientManager::ClientInfo> clientInfo =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    client, kClientId, kInvalidClientPid, kClientUid, kClientOpPackageName);
-
-    // Add the client to the manager and expect failure.
-    status_t err = mClientManager.addClient(std::move(clientInfo));
-    EXPECT_TRUE(err != OK);
+    // Add a client with invalid Pid and expect failure.
+    std::shared_ptr<ITranscodingClient> client;
+    status_t err = mClientManager->addClient(mClientCallback1, kInvalidClientPid, kClientUid,
+                                             kClientName, kClientPackage, &client);
+    EXPECT_EQ(err, IMediaTranscodingService::ERROR_ILLEGAL_ARGUMENT);
 }
 
-TEST_F(TranscodingClientManagerTest, TestAddingWithInvalidClientUid) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-
-    // Create a client with invalid Uid.
-    std::unique_ptr<TranscodingClientManager::ClientInfo> clientInfo =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    client, kClientId, kClientPid, kInvalidClientUid, kClientOpPackageName);
-
-    // Add the client to the manager and expect failure.
-    status_t err = mClientManager.addClient(std::move(clientInfo));
-    EXPECT_TRUE(err != OK);
+TEST_F(TranscodingClientManagerTest, TestAddingWithInvalidClientName) {
+    // Add a client with invalid name and expect failure.
+    std::shared_ptr<ITranscodingClient> client;
+    status_t err = mClientManager->addClient(mClientCallback1, kClientPid, kClientUid,
+                                             kInvalidClientName, kClientPackage, &client);
+    EXPECT_EQ(err, IMediaTranscodingService::ERROR_ILLEGAL_ARGUMENT);
 }
 
 TEST_F(TranscodingClientManagerTest, TestAddingWithInvalidClientPackageName) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-
-    // Create a client with invalid packagename.
-    std::unique_ptr<TranscodingClientManager::ClientInfo> clientInfo =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    client, kClientId, kClientPid, kClientUid, kInvalidClientOpPackageName);
-
-    // Add the client to the manager and expect failure.
-    status_t err = mClientManager.addClient(std::move(clientInfo));
-    EXPECT_TRUE(err != OK);
+    // Add a client with invalid packagename and expect failure.
+    std::shared_ptr<ITranscodingClient> client;
+    status_t err = mClientManager->addClient(mClientCallback1, kClientPid, kClientUid, kClientName,
+                                             kInvalidClientPackage, &client);
+    EXPECT_EQ(err, IMediaTranscodingService::ERROR_ILLEGAL_ARGUMENT);
 }
 
 TEST_F(TranscodingClientManagerTest, TestAddingValidClient) {
-    std::shared_ptr<ITranscodingServiceClient> client1 =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
+    // Add a valid client, should succeed.
+    std::shared_ptr<ITranscodingClient> client;
+    status_t err = mClientManager->addClient(mClientCallback1, kClientPid, kClientUid, kClientName,
+                                             kClientPackage, &client);
+    EXPECT_EQ(err, OK);
+    EXPECT_NE(client.get(), nullptr);
+    EXPECT_EQ(mClientManager->getNumOfClients(), 1);
 
-    std::unique_ptr<TranscodingClientManager::ClientInfo> clientInfo =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    client1, kClientId, kClientPid, kClientUid, kClientOpPackageName);
-
-    status_t err = mClientManager.addClient(std::move(clientInfo));
-    EXPECT_TRUE(err == OK);
-
-    size_t numOfClients = mClientManager.getNumOfClients();
-    EXPECT_EQ(numOfClients, 1);
-
-    err = mClientManager.removeClient(kClientId);
-    EXPECT_TRUE(err == OK);
+    // Unregister client, should succeed.
+    Status status = client->unregister();
+    EXPECT_TRUE(status.isOk());
+    EXPECT_EQ(mClientManager->getNumOfClients(), 0);
 }
 
 TEST_F(TranscodingClientManagerTest, TestAddingDupliacteClient) {
-    std::shared_ptr<ITranscodingServiceClient> client1 =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
+    std::shared_ptr<ITranscodingClient> client;
+    status_t err = mClientManager->addClient(mClientCallback1, kClientPid, kClientUid, kClientName,
+                                             kClientPackage, &client);
+    EXPECT_EQ(err, OK);
+    EXPECT_NE(client.get(), nullptr);
+    EXPECT_EQ(mClientManager->getNumOfClients(), 1);
 
-    std::unique_ptr<TranscodingClientManager::ClientInfo> clientInfo =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    client1, kClientId, kClientPid, kClientUid, kClientOpPackageName);
+    std::shared_ptr<ITranscodingClient> dupClient;
+    err = mClientManager->addClient(mClientCallback1, kClientPid, kClientUid, "dupClient",
+                                    "dupPackage", &dupClient);
+    EXPECT_EQ(err, IMediaTranscodingService::ERROR_ALREADY_EXISTS);
+    EXPECT_EQ(dupClient.get(), nullptr);
+    EXPECT_EQ(mClientManager->getNumOfClients(), 1);
 
-    status_t err = mClientManager.addClient(std::move(clientInfo));
-    EXPECT_TRUE(err == OK);
+    Status status = client->unregister();
+    EXPECT_TRUE(status.isOk());
+    EXPECT_EQ(mClientManager->getNumOfClients(), 0);
 
-    err = mClientManager.addClient(std::move(clientInfo));
-    EXPECT_TRUE(err != OK);
+    err = mClientManager->addClient(mClientCallback1, kClientPid, kClientUid, "dupClient",
+                                    "dupPackage", &dupClient);
+    EXPECT_EQ(err, OK);
+    EXPECT_NE(dupClient.get(), nullptr);
+    EXPECT_EQ(mClientManager->getNumOfClients(), 1);
 
-    err = mClientManager.removeClient(kClientId);
-    EXPECT_TRUE(err == OK);
+    status = dupClient->unregister();
+    EXPECT_TRUE(status.isOk());
+    EXPECT_EQ(mClientManager->getNumOfClients(), 0);
 }
 
 TEST_F(TranscodingClientManagerTest, TestAddingMultipleClient) {
-    std::shared_ptr<ITranscodingServiceClient> client1 =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-
-    std::unique_ptr<TranscodingClientManager::ClientInfo> clientInfo1 =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    client1, kClientId, kClientPid, kClientUid, kClientOpPackageName);
-
-    status_t err = mClientManager.addClient(std::move(clientInfo1));
-    EXPECT_TRUE(err == OK);
-
-    std::shared_ptr<ITranscodingServiceClient> client2 =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-
-    std::unique_ptr<TranscodingClientManager::ClientInfo> clientInfo2 =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    client2, kClientId + 1, kClientPid, kClientUid, kClientOpPackageName);
-
-    err = mClientManager.addClient(std::move(clientInfo2));
-    EXPECT_TRUE(err == OK);
-
-    std::shared_ptr<ITranscodingServiceClient> client3 =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-
-    // Create a client with invalid packagename.
-    std::unique_ptr<TranscodingClientManager::ClientInfo> clientInfo3 =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    client3, kClientId + 2, kClientPid, kClientUid, kClientOpPackageName);
-
-    err = mClientManager.addClient(std::move(clientInfo3));
-    EXPECT_TRUE(err == OK);
-
-    size_t numOfClients = mClientManager.getNumOfClients();
-    EXPECT_EQ(numOfClients, 3);
-
-    err = mClientManager.removeClient(kClientId);
-    EXPECT_TRUE(err == OK);
-
-    err = mClientManager.removeClient(kClientId + 1);
-    EXPECT_TRUE(err == OK);
-
-    err = mClientManager.removeClient(kClientId + 2);
-    EXPECT_TRUE(err == OK);
+    addMultipleClients();
+    unregisterMultipleClients();
 }
 
-TEST_F(TranscodingClientManagerTest, TestRemovingNonExistClient) {
-    status_t err = mClientManager.removeClient(kInvalidClientId);
-    EXPECT_TRUE(err != OK);
+TEST_F(TranscodingClientManagerTest, TestSubmitCancelGetJobs) {
+    addMultipleClients();
 
-    err = mClientManager.removeClient(1000 /* clientId */);
-    EXPECT_TRUE(err != OK);
+    // Test jobId assignment.
+    TranscodingRequestParcel request;
+    request.sourceFilePath = "test_source_file_0";
+    request.destinationFilePath = "test_desintaion_file_0";
+    TranscodingJobParcel job;
+    bool result;
+    EXPECT_TRUE(mClient1->submitRequest(request, &job, &result).isOk());
+    EXPECT_TRUE(result);
+    EXPECT_EQ(job.jobId, JOB(0));
+
+    request.sourceFilePath = "test_source_file_1";
+    request.destinationFilePath = "test_desintaion_file_1";
+    EXPECT_TRUE(mClient1->submitRequest(request, &job, &result).isOk());
+    EXPECT_TRUE(result);
+    EXPECT_EQ(job.jobId, JOB(1));
+
+    request.sourceFilePath = "test_source_file_2";
+    request.destinationFilePath = "test_desintaion_file_2";
+    EXPECT_TRUE(mClient1->submitRequest(request, &job, &result).isOk());
+    EXPECT_TRUE(result);
+    EXPECT_EQ(job.jobId, JOB(2));
+
+    // Test submit bad request (no valid sourceFilePath) fails.
+    TranscodingRequestParcel badRequest;
+    badRequest.sourceFilePath = "bad_source_file";
+    badRequest.destinationFilePath = "bad_destination_file";
+    EXPECT_TRUE(mClient1->submitRequest(badRequest, &job, &result).isOk());
+    EXPECT_FALSE(result);
+
+    // Test get jobs by id.
+    EXPECT_TRUE(mClient1->getJobWithId(JOB(2), &job, &result).isOk());
+    EXPECT_EQ(job.jobId, JOB(2));
+    EXPECT_EQ(job.request.sourceFilePath, "test_source_file_2");
+    EXPECT_TRUE(result);
+
+    // Test get jobs by invalid id fails.
+    EXPECT_TRUE(mClient1->getJobWithId(JOB(100), &job, &result).isOk());
+    EXPECT_FALSE(result);
+
+    // Test cancel non-existent job fail.
+    EXPECT_TRUE(mClient2->cancelJob(JOB(100), &result).isOk());
+    EXPECT_FALSE(result);
+
+    // Test cancel valid jobId in arbitrary order.
+    EXPECT_TRUE(mClient1->cancelJob(JOB(2), &result).isOk());
+    EXPECT_TRUE(result);
+
+    EXPECT_TRUE(mClient1->cancelJob(JOB(0), &result).isOk());
+    EXPECT_TRUE(result);
+
+    EXPECT_TRUE(mClient1->cancelJob(JOB(1), &result).isOk());
+    EXPECT_TRUE(result);
+
+    // Test cancel job again fails.
+    EXPECT_TRUE(mClient1->cancelJob(JOB(1), &result).isOk());
+    EXPECT_FALSE(result);
+
+    // Test get job after cancel fails.
+    EXPECT_TRUE(mClient1->getJobWithId(JOB(2), &job, &result).isOk());
+    EXPECT_FALSE(result);
+
+    // Test jobId independence for each client.
+    EXPECT_TRUE(mClient2->submitRequest(request, &job, &result).isOk());
+    EXPECT_TRUE(result);
+    EXPECT_EQ(job.jobId, JOB(0));
+
+    EXPECT_TRUE(mClient2->submitRequest(request, &job, &result).isOk());
+    EXPECT_TRUE(result);
+    EXPECT_EQ(job.jobId, JOB(1));
+
+    unregisterMultipleClients();
 }
 
-TEST_F(TranscodingClientManagerTest, TestCheckClientWithClientId) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
+TEST_F(TranscodingClientManagerTest, TestClientCallback) {
+    addMultipleClients();
 
-    std::unique_ptr<TranscodingClientManager::ClientInfo> clientInfo =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    client, kClientId, kClientPid, kClientUid, kClientOpPackageName);
+    TranscodingRequestParcel request;
+    request.sourceFilePath = "test_source_file_name";
+    request.destinationFilePath = "test_destination_file_name";
+    TranscodingJobParcel job;
+    bool result;
+    EXPECT_TRUE(mClient1->submitRequest(request, &job, &result).isOk());
+    EXPECT_TRUE(result);
+    EXPECT_EQ(job.jobId, JOB(0));
 
-    status_t err = mClientManager.addClient(std::move(clientInfo));
-    EXPECT_TRUE(err == OK);
+    mScheduler->finishLastJob();
+    EXPECT_EQ(mClientCallback1->popEvent(), TestClientCallback::Finished(job.jobId));
 
-    bool res = mClientManager.isClientIdRegistered(kClientId);
-    EXPECT_TRUE(res);
+    EXPECT_TRUE(mClient1->submitRequest(request, &job, &result).isOk());
+    EXPECT_TRUE(result);
+    EXPECT_EQ(job.jobId, JOB(1));
 
-    res = mClientManager.isClientIdRegistered(kInvalidClientId);
-    EXPECT_FALSE(res);
+    mScheduler->abortLastJob();
+    EXPECT_EQ(mClientCallback1->popEvent(), TestClientCallback::Failed(job.jobId));
+
+    EXPECT_TRUE(mClient1->submitRequest(request, &job, &result).isOk());
+    EXPECT_TRUE(result);
+    EXPECT_EQ(job.jobId, JOB(2));
+
+    EXPECT_TRUE(mClient2->submitRequest(request, &job, &result).isOk());
+    EXPECT_TRUE(result);
+    EXPECT_EQ(job.jobId, JOB(0));
+
+    mScheduler->finishLastJob();
+    EXPECT_EQ(mClientCallback2->popEvent(), TestClientCallback::Finished(job.jobId));
+
+    unregisterMultipleClients();
 }
 
-}  // namespace android
\ No newline at end of file
+TEST_F(TranscodingClientManagerTest, TestUseAfterUnregister) {
+    // Add a client.
+    std::shared_ptr<ITranscodingClient> client;
+    status_t err = mClientManager->addClient(mClientCallback1, kClientPid, kClientUid, kClientName,
+                                             kClientPackage, &client);
+    EXPECT_EQ(err, OK);
+    EXPECT_NE(client.get(), nullptr);
+
+    // Submit 2 requests, 1 offline and 1 realtime.
+    TranscodingRequestParcel request;
+    TranscodingJobParcel job;
+    bool result;
+
+    request.sourceFilePath = "test_source_file_0";
+    request.destinationFilePath = "test_destination_file_0";
+    request.priority = TranscodingJobPriority::kUnspecified;
+    EXPECT_TRUE(client->submitRequest(request, &job, &result).isOk() && result);
+    EXPECT_EQ(job.jobId, JOB(0));
+
+    request.sourceFilePath = "test_source_file_1";
+    request.destinationFilePath = "test_destination_file_1";
+    request.priority = TranscodingJobPriority::kNormal;
+    EXPECT_TRUE(client->submitRequest(request, &job, &result).isOk() && result);
+    EXPECT_EQ(job.jobId, JOB(1));
+
+    // Unregister client, should succeed.
+    Status status = client->unregister();
+    EXPECT_TRUE(status.isOk());
+
+    // Test submit new request after unregister, should fail with ERROR_DISCONNECTED.
+    request.sourceFilePath = "test_source_file_2";
+    request.destinationFilePath = "test_destination_file_2";
+    request.priority = TranscodingJobPriority::kNormal;
+    status = client->submitRequest(request, &job, &result);
+    EXPECT_FALSE(status.isOk());
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+
+    // Test cancel jobs after unregister, should fail with ERROR_DISCONNECTED
+    // regardless of realtime or offline job, or whether the jobId is valid.
+    status = client->cancelJob(JOB(0), &result);
+    EXPECT_FALSE(status.isOk());
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+
+    status = client->cancelJob(JOB(1), &result);
+    EXPECT_FALSE(status.isOk());
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+
+    status = client->cancelJob(JOB(2), &result);
+    EXPECT_FALSE(status.isOk());
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+
+    // Test get jobs, should fail with ERROR_DISCONNECTED regardless of realtime
+    // or offline job, or whether the jobId is valid.
+    status = client->getJobWithId(JOB(0), &job, &result);
+    EXPECT_FALSE(status.isOk());
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+
+    status = client->getJobWithId(JOB(1), &job, &result);
+    EXPECT_FALSE(status.isOk());
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+
+    status = client->getJobWithId(JOB(2), &job, &result);
+    EXPECT_FALSE(status.isOk());
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+}
+
+}  // namespace android
diff --git a/media/libmediatranscoding/tests/TranscodingJobScheduler_tests.cpp b/media/libmediatranscoding/tests/TranscodingJobScheduler_tests.cpp
new file mode 100644
index 0000000..d21b595
--- /dev/null
+++ b/media/libmediatranscoding/tests/TranscodingJobScheduler_tests.cpp
@@ -0,0 +1,602 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for TranscodingJobScheduler
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "TranscodingJobSchedulerTest"
+
+#include <aidl/android/media/BnTranscodingClientCallback.h>
+#include <aidl/android/media/IMediaTranscodingService.h>
+#include <aidl/android/media/ITranscodingClient.h>
+#include <aidl/android/media/ITranscodingClientCallback.h>
+#include <android-base/logging.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <gtest/gtest.h>
+#include <media/TranscodingClientManager.h>
+#include <media/TranscodingJobScheduler.h>
+#include <utils/Log.h>
+
+#include <unordered_set>
+
+namespace android {
+
+using Status = ::ndk::ScopedAStatus;
+using aidl::android::media::BnTranscodingClientCallback;
+using aidl::android::media::IMediaTranscodingService;
+using aidl::android::media::ITranscodingClient;
+using aidl::android::media::TranscodingRequestParcel;
+
+constexpr ClientIdType kClientId = 1000;
+constexpr JobIdType kClientJobId = 0;
+constexpr uid_t kClientUid = 5000;
+constexpr uid_t kInvalidUid = (uid_t)-1;
+
+#define CLIENT(n) (kClientId + (n))
+#define JOB(n) (kClientJobId + (n))
+#define UID(n) (kClientUid + (n))
+
+class TestUidPolicy : public UidPolicyInterface {
+public:
+    TestUidPolicy() = default;
+    virtual ~TestUidPolicy() = default;
+
+    // UidPolicyInterface
+    void registerMonitorUid(uid_t /*uid*/) override {}
+    void unregisterMonitorUid(uid_t /*uid*/) override {}
+    bool isUidOnTop(uid_t uid) override { return mTopUids.count(uid) > 0; }
+    std::unordered_set<uid_t> getTopUids() const override { return mTopUids; }
+    void setCallback(const std::shared_ptr<UidPolicyCallbackInterface>& cb) override {
+        mUidPolicyCallback = cb;
+    }
+    void setTop(uid_t uid) {
+        std::unordered_set<uid_t> uids = {uid};
+        setTop(uids);
+    }
+    void setTop(const std::unordered_set<uid_t>& uids) {
+        mTopUids = uids;
+        auto uidPolicyCb = mUidPolicyCallback.lock();
+        if (uidPolicyCb != nullptr) {
+            uidPolicyCb->onTopUidsChanged(mTopUids);
+        }
+    }
+
+    std::unordered_set<uid_t> mTopUids;
+    std::weak_ptr<UidPolicyCallbackInterface> mUidPolicyCallback;
+};
+
+class TestTranscoder : public TranscoderInterface {
+public:
+    TestTranscoder() : mLastError(TranscodingErrorCode::kUnknown) {}
+    virtual ~TestTranscoder() {}
+
+    // TranscoderInterface
+    void setCallback(const std::shared_ptr<TranscoderCallbackInterface>& /*cb*/) override {}
+
+    void start(ClientIdType clientId, JobIdType jobId, const TranscodingRequestParcel& /*request*/,
+               const std::shared_ptr<ITranscodingClientCallback>& /*clientCallback*/) override {
+        mEventQueue.push_back(Start(clientId, jobId));
+    }
+    void pause(ClientIdType clientId, JobIdType jobId) override {
+        mEventQueue.push_back(Pause(clientId, jobId));
+    }
+    void resume(ClientIdType clientId, JobIdType jobId, const TranscodingRequestParcel& /*request*/,
+                const std::shared_ptr<ITranscodingClientCallback>& /*clientCallback*/) override {
+        mEventQueue.push_back(Resume(clientId, jobId));
+    }
+    void stop(ClientIdType clientId, JobIdType jobId) override {
+        mEventQueue.push_back(Stop(clientId, jobId));
+    }
+
+    void onFinished(ClientIdType clientId, JobIdType jobId) {
+        mEventQueue.push_back(Finished(clientId, jobId));
+    }
+
+    void onFailed(ClientIdType clientId, JobIdType jobId, TranscodingErrorCode err) {
+        mLastError = err;
+        mEventQueue.push_back(Failed(clientId, jobId));
+    }
+
+    TranscodingErrorCode getLastError() {
+        TranscodingErrorCode result = mLastError;
+        mLastError = TranscodingErrorCode::kUnknown;
+        return result;
+    }
+
+    struct Event {
+        enum { NoEvent, Start, Pause, Resume, Stop, Finished, Failed } type;
+        ClientIdType clientId;
+        JobIdType jobId;
+    };
+
+    static constexpr Event NoEvent = {Event::NoEvent, 0, 0};
+
+#define DECLARE_EVENT(action)                                     \
+    static Event action(ClientIdType clientId, JobIdType jobId) { \
+        return {Event::action, clientId, jobId};                  \
+    }
+
+    DECLARE_EVENT(Start);
+    DECLARE_EVENT(Pause);
+    DECLARE_EVENT(Resume);
+    DECLARE_EVENT(Stop);
+    DECLARE_EVENT(Finished);
+    DECLARE_EVENT(Failed);
+
+    const Event& popEvent() {
+        if (mEventQueue.empty()) {
+            mPoppedEvent = NoEvent;
+        } else {
+            mPoppedEvent = *mEventQueue.begin();
+            mEventQueue.pop_front();
+        }
+        return mPoppedEvent;
+    }
+
+private:
+    Event mPoppedEvent;
+    std::list<Event> mEventQueue;
+    TranscodingErrorCode mLastError;
+};
+
+bool operator==(const TestTranscoder::Event& lhs, const TestTranscoder::Event& rhs) {
+    return lhs.type == rhs.type && lhs.clientId == rhs.clientId && lhs.jobId == rhs.jobId;
+}
+
+struct TestClientCallback : public BnTranscodingClientCallback {
+    TestClientCallback(TestTranscoder* owner, int64_t clientId)
+          : mOwner(owner), mClientId(clientId) {
+        ALOGD("TestClient Created");
+    }
+
+    Status openFileDescriptor(const std::string& /*in_fileUri*/, const std::string& /*in_mode*/,
+                              ::ndk::ScopedFileDescriptor* /*_aidl_return*/) override {
+        return Status::ok();
+    }
+
+    Status onTranscodingStarted(int32_t /*in_jobId*/) override { return Status::ok(); }
+
+    Status onTranscodingPaused(int32_t /*in_jobId*/) override { return Status::ok(); }
+
+    Status onTranscodingResumed(int32_t /*in_jobId*/) override { return Status::ok(); }
+
+    Status onTranscodingFinished(int32_t in_jobId,
+                                 const TranscodingResultParcel& in_result) override {
+        EXPECT_EQ(in_jobId, in_result.jobId);
+        ALOGD("TestClientCallback: received onTranscodingFinished");
+        mOwner->onFinished(mClientId, in_jobId);
+        return Status::ok();
+    }
+
+    Status onTranscodingFailed(int32_t in_jobId, TranscodingErrorCode in_errorCode) override {
+        mOwner->onFailed(mClientId, in_jobId, in_errorCode);
+        return Status::ok();
+    }
+
+    Status onAwaitNumberOfJobsChanged(int32_t /* in_jobId */, int32_t /* in_oldAwaitNumber */,
+                                      int32_t /* in_newAwaitNumber */) override {
+        return Status::ok();
+    }
+
+    Status onProgressUpdate(int32_t /* in_jobId */, int32_t /* in_progress */) override {
+        return Status::ok();
+    }
+
+    virtual ~TestClientCallback() { ALOGI("TestClient destroyed"); };
+
+private:
+    TestTranscoder* mOwner;
+    int64_t mClientId;
+    TestClientCallback(const TestClientCallback&) = delete;
+    TestClientCallback& operator=(const TestClientCallback&) = delete;
+};
+
+class TranscodingJobSchedulerTest : public ::testing::Test {
+public:
+    TranscodingJobSchedulerTest() { ALOGI("TranscodingJobSchedulerTest created"); }
+
+    void SetUp() override {
+        ALOGI("TranscodingJobSchedulerTest set up");
+        mTranscoder.reset(new TestTranscoder());
+        mUidPolicy.reset(new TestUidPolicy());
+        mScheduler.reset(new TranscodingJobScheduler(mTranscoder, mUidPolicy));
+        mUidPolicy->setCallback(mScheduler);
+
+        // Set priority only, ignore other fields for now.
+        mOfflineRequest.priority = TranscodingJobPriority::kUnspecified;
+        mRealtimeRequest.priority = TranscodingJobPriority::kHigh;
+        mClientCallback0 =
+                ::ndk::SharedRefBase::make<TestClientCallback>(mTranscoder.get(), CLIENT(0));
+        mClientCallback1 =
+                ::ndk::SharedRefBase::make<TestClientCallback>(mTranscoder.get(), CLIENT(1));
+        mClientCallback2 =
+                ::ndk::SharedRefBase::make<TestClientCallback>(mTranscoder.get(), CLIENT(2));
+        mClientCallback3 =
+                ::ndk::SharedRefBase::make<TestClientCallback>(mTranscoder.get(), CLIENT(3));
+    }
+
+    void TearDown() override { ALOGI("TranscodingJobSchedulerTest tear down"); }
+
+    ~TranscodingJobSchedulerTest() { ALOGD("TranscodingJobSchedulerTest destroyed"); }
+
+    std::shared_ptr<TestTranscoder> mTranscoder;
+    std::shared_ptr<TestUidPolicy> mUidPolicy;
+    std::shared_ptr<TranscodingJobScheduler> mScheduler;
+    TranscodingRequestParcel mOfflineRequest;
+    TranscodingRequestParcel mRealtimeRequest;
+    std::shared_ptr<TestClientCallback> mClientCallback0;
+    std::shared_ptr<TestClientCallback> mClientCallback1;
+    std::shared_ptr<TestClientCallback> mClientCallback2;
+    std::shared_ptr<TestClientCallback> mClientCallback3;
+};
+
+TEST_F(TranscodingJobSchedulerTest, TestSubmitJob) {
+    ALOGD("TestSubmitJob");
+
+    // Start with UID(1) on top.
+    mUidPolicy->setTop(UID(1));
+
+    // Submit offline job to CLIENT(0) in UID(0).
+    // Should start immediately (because this is the only job).
+    mScheduler->submit(CLIENT(0), JOB(0), UID(0), mOfflineRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), 0));
+
+    // Submit real-time job to CLIENT(0).
+    // Should pause offline job and start new job,  even if UID(0) is not on top.
+    mScheduler->submit(CLIENT(0), JOB(1), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), JOB(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), JOB(1)));
+
+    // Submit real-time job to CLIENT(0), should be queued after the previous job.
+    mScheduler->submit(CLIENT(0), JOB(2), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Submit real-time job to CLIENT(1) in same uid, should be queued after the previous job.
+    mScheduler->submit(CLIENT(1), JOB(0), UID(0), mRealtimeRequest, mClientCallback1);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Submit real-time job to CLIENT(2) in UID(1).
+    // Should pause previous job and start new job, because UID(1) is (has been) top.
+    mScheduler->submit(CLIENT(2), JOB(0), UID(1), mRealtimeRequest, mClientCallback2);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), JOB(1)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), JOB(0)));
+
+    // Submit offline job, shouldn't generate any event.
+    mScheduler->submit(CLIENT(2), JOB(1), UID(1), mOfflineRequest, mClientCallback2);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Bring UID(0) to top.
+    mUidPolicy->setTop(UID(0));
+    // Should pause current job, and resume last job in UID(0).
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(2), JOB(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), JOB(1)));
+}
+
+TEST_F(TranscodingJobSchedulerTest, TestCancelJob) {
+    ALOGD("TestCancelJob");
+
+    // Submit real-time job JOB(0), should start immediately.
+    mScheduler->submit(CLIENT(0), JOB(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), JOB(0)));
+
+    // Submit real-time job JOB(1), should not start.
+    mScheduler->submit(CLIENT(0), JOB(1), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Submit offline job JOB(2), should not start.
+    mScheduler->submit(CLIENT(0), JOB(2), UID(0), mOfflineRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Cancel queued real-time job.
+    // Cancel real-time job JOB(1), should be cancelled.
+    EXPECT_TRUE(mScheduler->cancel(CLIENT(0), JOB(1)));
+
+    // Cancel queued offline job.
+    // Cancel offline job JOB(2), should be cancelled.
+    EXPECT_TRUE(mScheduler->cancel(CLIENT(0), JOB(2)));
+
+    // Submit offline job JOB(3), shouldn't cause any event.
+    mScheduler->submit(CLIENT(0), JOB(3), UID(0), mOfflineRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Cancel running real-time job JOB(0).
+    // - Should be stopped first then cancelled.
+    // - Should also start offline job JOB(2) because real-time queue is empty.
+    EXPECT_TRUE(mScheduler->cancel(CLIENT(0), JOB(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Stop(CLIENT(0), JOB(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), JOB(3)));
+
+    // Submit real-time job JOB(4), offline JOB(3) should pause and JOB(4) should start.
+    mScheduler->submit(CLIENT(0), JOB(4), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), JOB(3)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), JOB(4)));
+
+    // Cancel paused JOB(3). JOB(3) should be stopped.
+    EXPECT_TRUE(mScheduler->cancel(CLIENT(0), JOB(3)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Stop(CLIENT(0), JOB(3)));
+}
+
+TEST_F(TranscodingJobSchedulerTest, TestFinishJob) {
+    ALOGD("TestFinishJob");
+
+    // Start with unspecified top UID.
+    // Finish without any jobs submitted, should be ignored.
+    mScheduler->onFinish(CLIENT(0), JOB(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Submit offline job JOB(0), should start immediately.
+    mScheduler->submit(CLIENT(0), JOB(0), UID(0), mOfflineRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), JOB(0)));
+
+    // Submit real-time job JOB(1), should pause offline job and start immediately.
+    mScheduler->submit(CLIENT(0), JOB(1), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), JOB(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), JOB(1)));
+
+    // Submit real-time job JOB(2), should not start.
+    mScheduler->submit(CLIENT(0), JOB(2), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Finish when the job never started, should be ignored.
+    mScheduler->onFinish(CLIENT(0), JOB(2));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // UID(1) moves to top.
+    mUidPolicy->setTop(UID(1));
+    // Submit real-time job to CLIENT(1) in UID(1), should pause previous job and start new job.
+    mScheduler->submit(CLIENT(1), JOB(0), UID(1), mRealtimeRequest, mClientCallback1);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), JOB(1)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(1), JOB(0)));
+
+    // Simulate Finish that arrived late, after pause issued by scheduler.
+    // Should still be propagated to client, but shouldn't trigger any new start.
+    mScheduler->onFinish(CLIENT(0), JOB(1));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(0), JOB(1)));
+
+    // Finish running real-time job, should start next real-time job in queue.
+    mScheduler->onFinish(CLIENT(1), JOB(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(1), JOB(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), JOB(2)));
+
+    // Finish running real-time job, should resume next job (offline job) in queue.
+    mScheduler->onFinish(CLIENT(0), JOB(2));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(0), JOB(2)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), JOB(0)));
+
+    // Finish running offline job.
+    mScheduler->onFinish(CLIENT(0), JOB(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(0), JOB(0)));
+
+    // Duplicate finish for last job, should be ignored.
+    mScheduler->onFinish(CLIENT(0), JOB(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+}
+
+TEST_F(TranscodingJobSchedulerTest, TestFailJob) {
+    ALOGD("TestFailJob");
+
+    // Start with unspecified top UID.
+    // Fail without any jobs submitted, should be ignored.
+    mScheduler->onError(CLIENT(0), JOB(0), TranscodingErrorCode::kUnknown);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Submit offline job JOB(0), should start immediately.
+    mScheduler->submit(CLIENT(0), JOB(0), UID(0), mOfflineRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), JOB(0)));
+
+    // Submit real-time job JOB(1), should pause offline job and start immediately.
+    mScheduler->submit(CLIENT(0), JOB(1), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), JOB(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), JOB(1)));
+
+    // Submit real-time job JOB(2), should not start.
+    mScheduler->submit(CLIENT(0), JOB(2), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Fail when the job never started, should be ignored.
+    mScheduler->onError(CLIENT(0), JOB(2), TranscodingErrorCode::kUnknown);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // UID(1) moves to top.
+    mUidPolicy->setTop(UID(1));
+    // Submit real-time job to CLIENT(1) in UID(1), should pause previous job and start new job.
+    mScheduler->submit(CLIENT(1), JOB(0), UID(1), mRealtimeRequest, mClientCallback1);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), JOB(1)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(1), JOB(0)));
+
+    // Simulate Fail that arrived late, after pause issued by scheduler.
+    // Should still be propagated to client, but shouldn't trigger any new start.
+    mScheduler->onError(CLIENT(0), JOB(1), TranscodingErrorCode::kUnknown);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(0), JOB(1)));
+
+    // Fail running real-time job, should start next real-time job in queue.
+    mScheduler->onError(CLIENT(1), JOB(0), TranscodingErrorCode::kUnknown);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(1), JOB(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), JOB(2)));
+
+    // Fail running real-time job, should resume next job (offline job) in queue.
+    mScheduler->onError(CLIENT(0), JOB(2), TranscodingErrorCode::kUnknown);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(0), JOB(2)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), JOB(0)));
+
+    // Fail running offline job, and test error code propagation.
+    mScheduler->onError(CLIENT(0), JOB(0), TranscodingErrorCode::kInvalidOperation);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(0), JOB(0)));
+    EXPECT_EQ(mTranscoder->getLastError(), TranscodingErrorCode::kInvalidOperation);
+
+    // Duplicate fail for last job, should be ignored.
+    mScheduler->onError(CLIENT(0), JOB(0), TranscodingErrorCode::kUnknown);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+}
+
+TEST_F(TranscodingJobSchedulerTest, TestTopUidChanged) {
+    ALOGD("TestTopUidChanged");
+
+    // Start with unspecified top UID.
+    // Submit real-time job to CLIENT(0), job should start immediately.
+    mScheduler->submit(CLIENT(0), JOB(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), JOB(0)));
+
+    // Submit offline job to CLIENT(0), should not start.
+    mScheduler->submit(CLIENT(1), JOB(0), UID(0), mOfflineRequest, mClientCallback1);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Move UID(1) to top.
+    mUidPolicy->setTop(UID(1));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Submit real-time job to CLIENT(2) in different uid UID(1).
+    // Should pause previous job and start new job.
+    mScheduler->submit(CLIENT(2), JOB(0), UID(1), mRealtimeRequest, mClientCallback2);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), JOB(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), JOB(0)));
+
+    // Bring UID(0) back to top.
+    mUidPolicy->setTop(UID(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(2), JOB(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), JOB(0)));
+
+    // Bring invalid uid to top.
+    mUidPolicy->setTop(kInvalidUid);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Finish job, next real-time job should resume.
+    mScheduler->onFinish(CLIENT(0), JOB(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(0), JOB(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(2), JOB(0)));
+
+    // Finish job, offline job should start.
+    mScheduler->onFinish(CLIENT(2), JOB(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(2), JOB(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(1), JOB(0)));
+}
+
+TEST_F(TranscodingJobSchedulerTest, TestTopUidSetChanged) {
+    ALOGD("TestTopUidChanged_MultipleUids");
+
+    // Start with unspecified top UID.
+    // Submit real-time job to CLIENT(0), job should start immediately.
+    mScheduler->submit(CLIENT(0), JOB(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), JOB(0)));
+
+    // Submit offline job to CLIENT(0), should not start.
+    mScheduler->submit(CLIENT(1), JOB(0), UID(0), mOfflineRequest, mClientCallback1);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Set UID(0), UID(1) to top set.
+    // UID(0) should continue to run.
+    mUidPolicy->setTop({UID(0), UID(1)});
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Submit real-time job to CLIENT(2) in different uid UID(1).
+    // UID(0) should pause and UID(1) should start.
+    mScheduler->submit(CLIENT(2), JOB(0), UID(1), mRealtimeRequest, mClientCallback2);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), JOB(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), JOB(0)));
+
+    // Remove UID(0) from top set, and only leave UID(1) in the set.
+    // UID(1) should continue to run.
+    mUidPolicy->setTop(UID(1));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Set UID(0), UID(2) to top set.
+    // UID(1) should continue to run.
+    mUidPolicy->setTop({UID(1), UID(2)});
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Bring UID(0) back to top.
+    mUidPolicy->setTop(UID(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(2), JOB(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), JOB(0)));
+
+    // Bring invalid uid to top.
+    mUidPolicy->setTop(kInvalidUid);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Finish job, next real-time job from UID(1) should resume, even if UID(1) no longer top.
+    mScheduler->onFinish(CLIENT(0), JOB(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(0), JOB(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(2), JOB(0)));
+
+    // Finish job, offline job should start.
+    mScheduler->onFinish(CLIENT(2), JOB(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(2), JOB(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(1), JOB(0)));
+}
+
+TEST_F(TranscodingJobSchedulerTest, TestResourceLost) {
+    ALOGD("TestResourceLost");
+
+    // Start with unspecified top UID.
+    // Submit real-time job to CLIENT(0), job should start immediately.
+    mScheduler->submit(CLIENT(0), JOB(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), JOB(0)));
+
+    // Submit offline job to CLIENT(0), should not start.
+    mScheduler->submit(CLIENT(1), JOB(0), UID(0), mOfflineRequest, mClientCallback1);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Move UID(1) to top.
+    mUidPolicy->setTop(UID(1));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Submit real-time job to CLIENT(2) in different uid UID(1).
+    // Should pause previous job and start new job.
+    mScheduler->submit(CLIENT(2), JOB(0), UID(1), mRealtimeRequest, mClientCallback2);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), JOB(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), JOB(0)));
+
+    // Test 1: No queue change during resource loss.
+    // Signal resource lost.
+    mScheduler->onResourceLost();
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Signal resource available, CLIENT(2) should resume.
+    mScheduler->onResourceAvailable();
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(2), JOB(0)));
+
+    // Test 2: Change of queue order during resource loss.
+    // Signal resource lost.
+    mScheduler->onResourceLost();
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Move UID(0) back to top, should have no resume due to no resource.
+    mUidPolicy->setTop(UID(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Signal resource available, CLIENT(0) should resume.
+    mScheduler->onResourceAvailable();
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), JOB(0)));
+
+    // Test 3: Adding new queue during resource loss.
+    // Signal resource lost.
+    mScheduler->onResourceLost();
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Move UID(2) to top.
+    mUidPolicy->setTop(UID(2));
+
+    // Submit real-time job to CLIENT(3) in UID(2), job shouldn't start due to no resource.
+    mScheduler->submit(CLIENT(3), JOB(0), UID(2), mRealtimeRequest, mClientCallback3);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Signal resource available, CLIENT(3)'s job should start.
+    mScheduler->onResourceAvailable();
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(3), JOB(0)));
+}
+
+}  // namespace android
diff --git a/media/libmediatranscoding/tests/assets/backyard_hevc_1920x1080_20Mbps.mp4 b/media/libmediatranscoding/tests/assets/backyard_hevc_1920x1080_20Mbps.mp4
new file mode 100644
index 0000000..80d1ec3
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/backyard_hevc_1920x1080_20Mbps.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/cubicle_avc_480x240_aac_24KHz.mp4 b/media/libmediatranscoding/tests/assets/cubicle_avc_480x240_aac_24KHz.mp4
new file mode 100644
index 0000000..ef7e1b7
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/cubicle_avc_480x240_aac_24KHz.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/desk_hevc_1920x1080_aac_48KHz_rot90.mp4 b/media/libmediatranscoding/tests/assets/desk_hevc_1920x1080_aac_48KHz_rot90.mp4
new file mode 100644
index 0000000..df42a15
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/desk_hevc_1920x1080_aac_48KHz_rot90.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/jets_hevc_1280x720_20Mbps.mp4 b/media/libmediatranscoding/tests/assets/jets_hevc_1280x720_20Mbps.mp4
new file mode 100644
index 0000000..7794b99
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/jets_hevc_1280x720_20Mbps.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/longtest_15s.mp4 b/media/libmediatranscoding/tests/assets/longtest_15s.mp4
new file mode 100644
index 0000000..b50d8e4
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/longtest_15s.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/plex_hevc_3840x2160_12Mbps.mp4 b/media/libmediatranscoding/tests/assets/plex_hevc_3840x2160_12Mbps.mp4
new file mode 100644
index 0000000..92dda3b
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/plex_hevc_3840x2160_12Mbps.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/plex_hevc_3840x2160_20Mbps.mp4 b/media/libmediatranscoding/tests/assets/plex_hevc_3840x2160_20Mbps.mp4
new file mode 100644
index 0000000..2fe37bd
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/plex_hevc_3840x2160_20Mbps.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/push_assets.sh b/media/libmediatranscoding/tests/assets/push_assets.sh
new file mode 100755
index 0000000..8afc947
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/push_assets.sh
@@ -0,0 +1,32 @@
+#!/bin/bash
+#
+# Pushes the assets to the /data/local/tmp.
+#
+
+if [ "$SYNC_FINISHED" != true ]; then
+  if [ -z "$ANDROID_BUILD_TOP" ]; then
+      echo "Android build environment not set"
+      exit -1
+  fi
+
+  # ensure we have mm
+  . $ANDROID_BUILD_TOP/build/envsetup.sh
+
+  mm
+
+  echo "waiting for device"
+
+  adb root && adb wait-for-device remount
+fi
+
+echo "Copying files to device"
+
+adb shell mkdir -p /data/local/tmp/TranscodingTestAssets
+
+FILES=$ANDROID_BUILD_TOP/frameworks/av/media/libmediatranscoding/tests/assets/*
+for file in $FILES
+do 
+adb push --sync $file /data/local/tmp/TranscodingTestAssets
+done
+
+echo "Copy done"
diff --git a/media/libmediatranscoding/tests/build_and_run_all_unit_tests.sh b/media/libmediatranscoding/tests/build_and_run_all_unit_tests.sh
index d8e4830..ff6df2c 100644
--- a/media/libmediatranscoding/tests/build_and_run_all_unit_tests.sh
+++ b/media/libmediatranscoding/tests/build_and_run_all_unit_tests.sh
@@ -3,24 +3,32 @@
 # Run tests in this directory.
 #
 
-if [ -z "$ANDROID_BUILD_TOP" ]; then
-    echo "Android build environment not set"
-    exit -1
+if [ "$SYNC_FINISHED" != true ]; then
+  if [ -z "$ANDROID_BUILD_TOP" ]; then
+      echo "Android build environment not set"
+      exit -1
+  fi
+
+  # ensure we have mm
+  . $ANDROID_BUILD_TOP/build/envsetup.sh
+
+  mm
+
+  echo "waiting for device"
+
+  adb root && adb wait-for-device remount && adb sync
 fi
 
-# ensure we have mm
-. $ANDROID_BUILD_TOP/build/envsetup.sh
-
-mm
-
-echo "waiting for device"
-
-adb root && adb wait-for-device remount && adb sync
-
 echo "========================================"
 
 echo "testing TranscodingClientManager"
-adb shell /data/nativetest64/TranscodingClientManager_tests/TranscodingClientManager_tests
+#adb shell /data/nativetest64/TranscodingClientManager_tests/TranscodingClientManager_tests
+adb shell /data/nativetest/TranscodingClientManager_tests/TranscodingClientManager_tests
 
 echo "testing AdjustableMaxPriorityQueue"
-adb shell /data/nativetest64/AdjustableMaxPriorityQueue_tests/AdjustableMaxPriorityQueue_tests
+#adb shell /data/nativetest64/AdjustableMaxPriorityQueue_tests/AdjustableMaxPriorityQueue_tests
+adb shell /data/nativetest/AdjustableMaxPriorityQueue_tests/AdjustableMaxPriorityQueue_tests
+
+echo "testing TranscodingJobScheduler"
+#adb shell /data/nativetest64/TranscodingJobScheduler_tests/TranscodingJobScheduler_tests
+adb shell /data/nativetest/TranscodingJobScheduler_tests/TranscodingJobScheduler_tests
diff --git a/media/libmediatranscoding/transcoder/Android.bp b/media/libmediatranscoding/transcoder/Android.bp
new file mode 100644
index 0000000..c153a42
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/Android.bp
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_library_shared {
+    name: "libmediatranscoder",
+
+    srcs: [
+        "MediaSampleQueue.cpp",
+        "MediaSampleReaderNDK.cpp",
+        "MediaSampleWriter.cpp",
+        "MediaTrackTranscoder.cpp",
+        "MediaTranscoder.cpp",
+        "NdkCommon.cpp",
+        "PassthroughTrackTranscoder.cpp",
+        "VideoTrackTranscoder.cpp",
+    ],
+
+    shared_libs: [
+        "libbase",
+        "libcutils",
+        "libmediandk",
+        "libnativewindow",
+        "libutils",
+        // TODO: Use libbinder_ndk
+        "libbinder",
+    ],
+
+    export_include_dirs: [
+        "include",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wformat",
+        "-Wno-error=deprecated-declarations",
+        "-Wthread-safety",
+        "-Wunused",
+        "-Wunreachable-code",
+    ],
+
+    sanitize: {
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+    },
+}
diff --git a/media/libmediatranscoding/transcoder/MediaSampleQueue.cpp b/media/libmediatranscoding/transcoder/MediaSampleQueue.cpp
new file mode 100644
index 0000000..691ee1c
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/MediaSampleQueue.cpp
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaSampleQueue"
+
+#include <android-base/logging.h>
+#include <media/MediaSampleQueue.h>
+
+namespace android {
+
+bool MediaSampleQueue::enqueue(const std::shared_ptr<MediaSample>& sample) {
+    std::scoped_lock<std::mutex> lock(mMutex);
+    if (!mAborted) {
+        mSampleQueue.push(sample);
+        mCondition.notify_one();
+    }
+    return mAborted;
+}
+
+// Unfortunately std::unique_lock is incompatible with -Wthread-safety
+bool MediaSampleQueue::dequeue(std::shared_ptr<MediaSample>* sample) NO_THREAD_SAFETY_ANALYSIS {
+    std::unique_lock<std::mutex> lock(mMutex);
+    while (mSampleQueue.empty() && !mAborted) {
+        mCondition.wait(lock);
+    }
+
+    if (!mAborted) {
+        if (sample != nullptr) {
+            *sample = mSampleQueue.front();
+        }
+        mSampleQueue.pop();
+    }
+    return mAborted;
+}
+
+void MediaSampleQueue::abort() {
+    std::scoped_lock<std::mutex> lock(mMutex);
+    // Clear the queue and notify consumers.
+    std::queue<std::shared_ptr<MediaSample>> empty = {};
+    std::swap(mSampleQueue, empty);
+    mAborted = true;
+    mCondition.notify_all();
+}
+}  // namespace android
\ No newline at end of file
diff --git a/media/libmediatranscoding/transcoder/MediaSampleReaderNDK.cpp b/media/libmediatranscoding/transcoder/MediaSampleReaderNDK.cpp
new file mode 100644
index 0000000..a0096c7
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/MediaSampleReaderNDK.cpp
@@ -0,0 +1,264 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaSampleReader"
+
+#include <android-base/logging.h>
+#include <media/MediaSampleReaderNDK.h>
+
+#include <algorithm>
+#include <vector>
+
+namespace android {
+
+// Check that the extractor sample flags have the expected NDK meaning.
+static_assert(SAMPLE_FLAG_SYNC_SAMPLE == AMEDIAEXTRACTOR_SAMPLE_FLAG_SYNC,
+              "Sample flag mismatch: SYNC_SAMPLE");
+
+// static
+std::shared_ptr<MediaSampleReader> MediaSampleReaderNDK::createFromFd(int fd, size_t offset,
+                                                                      size_t size) {
+    AMediaExtractor* extractor = AMediaExtractor_new();
+    if (extractor == nullptr) {
+        LOG(ERROR) << "Unable to allocate AMediaExtractor";
+        return nullptr;
+    }
+
+    media_status_t status = AMediaExtractor_setDataSourceFd(extractor, fd, offset, size);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "AMediaExtractor_setDataSourceFd returned error: " << status;
+        AMediaExtractor_delete(extractor);
+        return nullptr;
+    }
+
+    auto sampleReader = std::shared_ptr<MediaSampleReaderNDK>(new MediaSampleReaderNDK(extractor));
+    status = sampleReader->init();
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "MediaSampleReaderNDK::init returned error: " << status;
+        return nullptr;
+    }
+
+    return sampleReader;
+}
+
+MediaSampleReaderNDK::MediaSampleReaderNDK(AMediaExtractor* extractor)
+      : mExtractor(extractor), mTrackCount(AMediaExtractor_getTrackCount(mExtractor)) {
+    if (mTrackCount > 0) {
+        mTrackCursors.resize(mTrackCount);
+        mTrackCursors.resize(mTrackCount);
+    }
+}
+
+media_status_t MediaSampleReaderNDK::init() {
+    for (size_t trackIndex = 0; trackIndex < mTrackCount; trackIndex++) {
+        media_status_t status = AMediaExtractor_selectTrack(mExtractor, trackIndex);
+        if (status != AMEDIA_OK) {
+            LOG(ERROR) << "AMediaExtractor_selectTrack returned error: " << status;
+            return status;
+        }
+    }
+
+    mExtractorTrackIndex = AMediaExtractor_getSampleTrackIndex(mExtractor);
+    if (mExtractorTrackIndex >= 0) {
+        mTrackCursors[mExtractorTrackIndex].current.set(mExtractorSampleIndex,
+                                                        AMediaExtractor_getSampleTime(mExtractor));
+    } else if (mTrackCount > 0) {
+        // The extractor track index is only allowed to be invalid if there are no tracks.
+        LOG(ERROR) << "Track index " << mExtractorTrackIndex << " is invalid for track count "
+                   << mTrackCount;
+        return AMEDIA_ERROR_MALFORMED;
+    }
+
+    return AMEDIA_OK;
+}
+
+MediaSampleReaderNDK::~MediaSampleReaderNDK() {
+    if (mExtractor != nullptr) {
+        AMediaExtractor_delete(mExtractor);
+    }
+}
+
+bool MediaSampleReaderNDK::advanceExtractor_l() {
+    // Reset the "next" sample time whenever the extractor advances past a sample that is current,
+    // to ensure that "next" is appropriately updated when the extractor advances over the next
+    // sample of that track.
+    if (mTrackCursors[mExtractorTrackIndex].current.isSet &&
+        mTrackCursors[mExtractorTrackIndex].current.index == mExtractorSampleIndex) {
+        mTrackCursors[mExtractorTrackIndex].next.reset();
+    }
+
+    if (!AMediaExtractor_advance(mExtractor)) {
+        return false;
+    }
+
+    mExtractorTrackIndex = AMediaExtractor_getSampleTrackIndex(mExtractor);
+    mExtractorSampleIndex++;
+
+    SampleCursor& cursor = mTrackCursors[mExtractorTrackIndex];
+    if (mExtractorSampleIndex > cursor.previous.index) {
+        if (!cursor.current.isSet) {
+            cursor.current.set(mExtractorSampleIndex, AMediaExtractor_getSampleTime(mExtractor));
+        } else if (!cursor.next.isSet && mExtractorSampleIndex > cursor.current.index) {
+            cursor.next.set(mExtractorSampleIndex, AMediaExtractor_getSampleTime(mExtractor));
+        }
+    }
+    return true;
+}
+
+media_status_t MediaSampleReaderNDK::seekExtractorBackwards_l(int64_t targetTimeUs,
+                                                              int targetTrackIndex,
+                                                              uint64_t targetSampleIndex) {
+    if (targetSampleIndex > mExtractorSampleIndex) {
+        LOG(ERROR) << "Error: Forward seek is not supported";
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+
+    // AMediaExtractor supports reading negative timestamps but does not support seeking to them.
+    const int64_t seekToTimeUs = std::max(targetTimeUs, (int64_t)0);
+    media_status_t status =
+            AMediaExtractor_seekTo(mExtractor, seekToTimeUs, AMEDIAEXTRACTOR_SEEK_PREVIOUS_SYNC);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to seek to " << seekToTimeUs << ", target " << targetTimeUs;
+        return status;
+    }
+    mExtractorTrackIndex = AMediaExtractor_getSampleTrackIndex(mExtractor);
+    int64_t sampleTimeUs = AMediaExtractor_getSampleTime(mExtractor);
+
+    while (sampleTimeUs != targetTimeUs || mExtractorTrackIndex != targetTrackIndex) {
+        if (!AMediaExtractor_advance(mExtractor)) {
+            return AMEDIA_ERROR_END_OF_STREAM;
+        }
+        mExtractorTrackIndex = AMediaExtractor_getSampleTrackIndex(mExtractor);
+        sampleTimeUs = AMediaExtractor_getSampleTime(mExtractor);
+    }
+    mExtractorSampleIndex = targetSampleIndex;
+    return AMEDIA_OK;
+}
+
+void MediaSampleReaderNDK::advanceTrack(int trackIndex) {
+    std::scoped_lock lock(mExtractorMutex);
+
+    if (trackIndex < 0 || trackIndex >= mTrackCount) {
+        LOG(ERROR) << "Invalid trackIndex " << trackIndex << " for trackCount " << mTrackCount;
+        return;
+    }
+
+    // Note: Positioning the extractor before advancing the track is needed for two reasons:
+    // 1. To enable multiple advances without explicitly letting the extractor catch up.
+    // 2. To prevent the extractor from being farther than "next".
+    (void)positionExtractorForTrack_l(trackIndex);
+
+    SampleCursor& cursor = mTrackCursors[trackIndex];
+    cursor.previous = cursor.current;
+    cursor.current = cursor.next;
+    cursor.next.reset();
+}
+
+media_status_t MediaSampleReaderNDK::positionExtractorForTrack_l(int trackIndex) {
+    media_status_t status = AMEDIA_OK;
+    const SampleCursor& cursor = mTrackCursors[trackIndex];
+
+    // Seek backwards if the extractor is ahead of the current time.
+    if (cursor.current.isSet && mExtractorSampleIndex > cursor.current.index) {
+        status = seekExtractorBackwards_l(cursor.current.timeStampUs, trackIndex,
+                                          cursor.current.index);
+        if (status != AMEDIA_OK) return status;
+    }
+
+    // Advance until extractor points to the current sample.
+    while (!(cursor.current.isSet && cursor.current.index == mExtractorSampleIndex)) {
+        if (!advanceExtractor_l()) {
+            return AMEDIA_ERROR_END_OF_STREAM;
+        }
+    }
+
+    return AMEDIA_OK;
+}
+
+media_status_t MediaSampleReaderNDK::getSampleInfoForTrack(int trackIndex, MediaSampleInfo* info) {
+    std::scoped_lock lock(mExtractorMutex);
+
+    if (trackIndex < 0 || trackIndex >= mTrackCount) {
+        LOG(ERROR) << "Invalid trackIndex " << trackIndex << " for trackCount " << mTrackCount;
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    } else if (info == nullptr) {
+        LOG(ERROR) << "MediaSampleInfo pointer is NULL.";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    media_status_t status = positionExtractorForTrack_l(trackIndex);
+    if (status == AMEDIA_OK) {
+        info->presentationTimeUs = AMediaExtractor_getSampleTime(mExtractor);
+        info->flags = AMediaExtractor_getSampleFlags(mExtractor);
+        info->size = AMediaExtractor_getSampleSize(mExtractor);
+    } else if (status == AMEDIA_ERROR_END_OF_STREAM) {
+        info->presentationTimeUs = 0;
+        info->flags = SAMPLE_FLAG_END_OF_STREAM;
+        info->size = 0;
+    }
+
+    return status;
+}
+
+media_status_t MediaSampleReaderNDK::readSampleDataForTrack(int trackIndex, uint8_t* buffer,
+                                                            size_t bufferSize) {
+    std::scoped_lock lock(mExtractorMutex);
+
+    if (trackIndex < 0 || trackIndex >= mTrackCount) {
+        LOG(ERROR) << "Invalid trackIndex " << trackIndex << " for trackCount " << mTrackCount;
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    } else if (buffer == nullptr) {
+        LOG(ERROR) << "buffer pointer is NULL";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    media_status_t status = positionExtractorForTrack_l(trackIndex);
+    if (status != AMEDIA_OK) return status;
+
+    ssize_t sampleSize = AMediaExtractor_getSampleSize(mExtractor);
+    if (bufferSize < sampleSize) {
+        LOG(ERROR) << "Buffer is too small for sample, " << bufferSize << " vs " << sampleSize;
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    ssize_t bytesRead = AMediaExtractor_readSampleData(mExtractor, buffer, bufferSize);
+    if (bytesRead < sampleSize) {
+        LOG(ERROR) << "Unable to read full sample, " << bytesRead << " vs " << sampleSize;
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    return AMEDIA_OK;
+}
+
+AMediaFormat* MediaSampleReaderNDK::getFileFormat() {
+    return AMediaExtractor_getFileFormat(mExtractor);
+}
+
+size_t MediaSampleReaderNDK::getTrackCount() const {
+    return mTrackCount;
+}
+
+AMediaFormat* MediaSampleReaderNDK::getTrackFormat(int trackIndex) {
+    if (trackIndex < 0 || trackIndex >= mTrackCount) {
+        LOG(ERROR) << "Invalid trackIndex " << trackIndex << " for trackCount " << mTrackCount;
+        return AMediaFormat_new();
+    }
+
+    return AMediaExtractor_getTrackFormat(mExtractor, trackIndex);
+}
+
+}  // namespace android
diff --git a/media/libmediatranscoding/transcoder/MediaSampleWriter.cpp b/media/libmediatranscoding/transcoder/MediaSampleWriter.cpp
new file mode 100644
index 0000000..3676d73
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/MediaSampleWriter.cpp
@@ -0,0 +1,281 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaSampleWriter"
+
+#include <android-base/logging.h>
+#include <media/MediaSampleWriter.h>
+#include <media/NdkMediaMuxer.h>
+
+namespace android {
+
+class DefaultMuxer : public MediaSampleWriterMuxerInterface {
+public:
+    // MediaSampleWriterMuxerInterface
+    ssize_t addTrack(AMediaFormat* trackFormat) override {
+        // If the track format has rotation, need to call AMediaMuxer_setOrientationHint
+        // to set the rotation. Muxer doesn't take rotation specified on the track.
+        const char* mime;
+        if (AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime) &&
+            strncmp(mime, "video/", 6) == 0) {
+            int32_t rotation;
+            if (AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_ROTATION, &rotation) &&
+                (rotation != 0)) {
+                AMediaMuxer_setOrientationHint(mMuxer, rotation);
+            }
+        }
+
+        return AMediaMuxer_addTrack(mMuxer, trackFormat);
+    }
+    media_status_t start() override { return AMediaMuxer_start(mMuxer); }
+    media_status_t writeSampleData(size_t trackIndex, const uint8_t* data,
+                                   const AMediaCodecBufferInfo* info) override {
+        return AMediaMuxer_writeSampleData(mMuxer, trackIndex, data, info);
+    }
+    media_status_t stop() override { return AMediaMuxer_stop(mMuxer); }
+    // ~MediaSampleWriterMuxerInterface
+
+    static std::shared_ptr<DefaultMuxer> create(int fd) {
+        AMediaMuxer* ndkMuxer = AMediaMuxer_new(fd, AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4);
+        if (ndkMuxer == nullptr) {
+            LOG(ERROR) << "Unable to create AMediaMuxer";
+            return nullptr;
+        }
+
+        return std::make_shared<DefaultMuxer>(ndkMuxer);
+    }
+
+    ~DefaultMuxer() {
+        if (mMuxer != nullptr) {
+            AMediaMuxer_delete(mMuxer);
+        }
+    }
+
+    DefaultMuxer(AMediaMuxer* muxer) : mMuxer(muxer){};
+    DefaultMuxer() = delete;
+
+private:
+    AMediaMuxer* mMuxer;
+};
+
+MediaSampleWriter::~MediaSampleWriter() {
+    if (mState == STARTED) {
+        stop();  // Join thread.
+    }
+}
+
+bool MediaSampleWriter::init(int fd, const std::weak_ptr<CallbackInterface>& callbacks) {
+    return init(DefaultMuxer::create(fd), callbacks);
+}
+
+bool MediaSampleWriter::init(const std::shared_ptr<MediaSampleWriterMuxerInterface>& muxer,
+                             const std::weak_ptr<CallbackInterface>& callbacks) {
+    if (callbacks.lock() == nullptr) {
+        LOG(ERROR) << "Callback object cannot be null";
+        return false;
+    } else if (muxer == nullptr) {
+        LOG(ERROR) << "Muxer cannot be null";
+        return false;
+    }
+
+    std::scoped_lock lock(mStateMutex);
+    if (mState != UNINITIALIZED) {
+        LOG(ERROR) << "Sample writer is already initialized";
+        return false;
+    }
+
+    mState = INITIALIZED;
+    mMuxer = muxer;
+    mCallbacks = callbacks;
+    return true;
+}
+
+bool MediaSampleWriter::addTrack(const std::shared_ptr<MediaSampleQueue>& sampleQueue,
+                                 const std::shared_ptr<AMediaFormat>& trackFormat) {
+    if (sampleQueue == nullptr || trackFormat == nullptr) {
+        LOG(ERROR) << "Sample queue and track format must be non-null";
+        return false;
+    }
+
+    std::scoped_lock lock(mStateMutex);
+    if (mState != INITIALIZED) {
+        LOG(ERROR) << "Muxer needs to be initialized when adding tracks.";
+        return false;
+    }
+    ssize_t trackIndex = mMuxer->addTrack(trackFormat.get());
+    if (trackIndex < 0) {
+        LOG(ERROR) << "Failed to add media track to muxer: " << trackIndex;
+        return false;
+    }
+
+    int64_t durationUs;
+    if (!AMediaFormat_getInt64(trackFormat.get(), AMEDIAFORMAT_KEY_DURATION, &durationUs)) {
+        durationUs = 0;
+    }
+
+    const char* mime = nullptr;
+    const bool isVideo = AMediaFormat_getString(trackFormat.get(), AMEDIAFORMAT_KEY_MIME, &mime) &&
+                         (strncmp(mime, "video/", 6) == 0);
+
+    mTracks.emplace_back(sampleQueue, static_cast<size_t>(trackIndex), durationUs, isVideo);
+    return true;
+}
+
+bool MediaSampleWriter::start() {
+    std::scoped_lock lock(mStateMutex);
+
+    if (mTracks.size() == 0) {
+        LOG(ERROR) << "No tracks to write.";
+        return false;
+    } else if (mState != INITIALIZED) {
+        LOG(ERROR) << "Sample writer is not initialized";
+        return false;
+    }
+
+    mThread = std::thread([this] {
+        media_status_t status = writeSamples();
+        if (auto callbacks = mCallbacks.lock()) {
+            callbacks->onFinished(this, status);
+        }
+    });
+    mState = STARTED;
+    return true;
+}
+
+bool MediaSampleWriter::stop() {
+    std::scoped_lock lock(mStateMutex);
+
+    if (mState != STARTED) {
+        LOG(ERROR) << "Sample writer is not started.";
+        return false;
+    }
+
+    // Stop the sources, and wait for thread to join.
+    for (auto& track : mTracks) {
+        track.mSampleQueue->abort();
+    }
+    mThread.join();
+    mState = STOPPED;
+    return true;
+}
+
+media_status_t MediaSampleWriter::writeSamples() {
+    media_status_t muxerStatus = mMuxer->start();
+    if (muxerStatus != AMEDIA_OK) {
+        LOG(ERROR) << "Error starting muxer: " << muxerStatus;
+        return muxerStatus;
+    }
+
+    media_status_t writeStatus = runWriterLoop();
+    if (writeStatus != AMEDIA_OK) {
+        LOG(ERROR) << "Error writing samples: " << writeStatus;
+    }
+
+    muxerStatus = mMuxer->stop();
+    if (muxerStatus != AMEDIA_OK) {
+        LOG(ERROR) << "Error stopping muxer: " << muxerStatus;
+    }
+
+    return writeStatus != AMEDIA_OK ? writeStatus : muxerStatus;
+}
+
+media_status_t MediaSampleWriter::runWriterLoop() {
+    AMediaCodecBufferInfo bufferInfo;
+    uint32_t segmentEndTimeUs = mTrackSegmentLengthUs;
+    bool samplesLeft = true;
+    int32_t lastProgressUpdate = 0;
+
+    // Set the "primary" track that will be used to determine progress to the track with longest
+    // duration.
+    int primaryTrackIndex = -1;
+    int64_t longestDurationUs = 0;
+    for (int trackIndex = 0; trackIndex < mTracks.size(); ++trackIndex) {
+        if (mTracks[trackIndex].mDurationUs > longestDurationUs) {
+            primaryTrackIndex = trackIndex;
+            longestDurationUs = mTracks[trackIndex].mDurationUs;
+        }
+    }
+
+    while (samplesLeft) {
+        samplesLeft = false;
+        for (auto& track : mTracks) {
+            if (track.mReachedEos) continue;
+
+            std::shared_ptr<MediaSample> sample;
+            do {
+                if (track.mSampleQueue->dequeue(&sample)) {
+                    // Track queue was aborted.
+                    return AMEDIA_ERROR_UNKNOWN;  // TODO(lnilsson): Custom error code.
+                } else if (sample->info.flags & SAMPLE_FLAG_END_OF_STREAM) {
+                    // Track reached end of stream.
+                    track.mReachedEos = true;
+
+                    // Preserve source track duration by setting the appropriate timestamp on the
+                    // empty End-Of-Stream sample.
+                    if (track.mDurationUs > 0 && track.mFirstSampleTimeSet) {
+                        sample->info.presentationTimeUs =
+                                track.mDurationUs + track.mFirstSampleTimeUs;
+                    }
+                } else {
+                    samplesLeft = true;
+                }
+
+                track.mPrevSampleTimeUs = sample->info.presentationTimeUs;
+                if (!track.mFirstSampleTimeSet) {
+                    // Record the first sample's timestamp in order to translate duration to EOS
+                    // time for tracks that does not start at 0.
+                    track.mFirstSampleTimeUs = sample->info.presentationTimeUs;
+                    track.mFirstSampleTimeSet = true;
+                }
+
+                bufferInfo.offset = sample->dataOffset;
+                bufferInfo.size = sample->info.size;
+                bufferInfo.flags = sample->info.flags;
+                bufferInfo.presentationTimeUs = sample->info.presentationTimeUs;
+
+                media_status_t status =
+                        mMuxer->writeSampleData(track.mTrackIndex, sample->buffer, &bufferInfo);
+                if (status != AMEDIA_OK) {
+                    LOG(ERROR) << "writeSampleData returned " << status;
+                    return status;
+                }
+
+            } while (sample->info.presentationTimeUs < segmentEndTimeUs && !track.mReachedEos);
+        }
+
+        // TODO(lnilsson): Add option to toggle progress reporting on/off.
+        if (primaryTrackIndex >= 0) {
+            const TrackRecord& track = mTracks[primaryTrackIndex];
+
+            const int64_t elapsed = track.mPrevSampleTimeUs - track.mFirstSampleTimeUs;
+            int32_t progress = (elapsed * 100) / track.mDurationUs;
+            progress = std::clamp(progress, 0, 100);
+
+            if (progress > lastProgressUpdate) {
+                if (auto callbacks = mCallbacks.lock()) {
+                    callbacks->onProgressUpdate(this, progress);
+                }
+                lastProgressUpdate = progress;
+            }
+        }
+
+        segmentEndTimeUs += mTrackSegmentLengthUs;
+    }
+
+    return AMEDIA_OK;
+}
+}  // namespace android
diff --git a/media/libmediatranscoding/transcoder/MediaTrackTranscoder.cpp b/media/libmediatranscoding/transcoder/MediaTrackTranscoder.cpp
new file mode 100644
index 0000000..e3996b0
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/MediaTrackTranscoder.cpp
@@ -0,0 +1,116 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaTrackTranscoder"
+
+#include <android-base/logging.h>
+#include <media/MediaTrackTranscoder.h>
+#include <media/MediaTrackTranscoderCallback.h>
+
+namespace android {
+
+media_status_t MediaTrackTranscoder::configure(
+        const std::shared_ptr<MediaSampleReader>& mediaSampleReader, int trackIndex,
+        const std::shared_ptr<AMediaFormat>& destinationFormat) {
+    std::scoped_lock lock{mStateMutex};
+
+    if (mState != UNINITIALIZED) {
+        LOG(ERROR) << "Configure can only be called once";
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+
+    if (mediaSampleReader == nullptr) {
+        LOG(ERROR) << "MediaSampleReader is null";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+    if (trackIndex < 0 || trackIndex >= mediaSampleReader->getTrackCount()) {
+        LOG(ERROR) << "TrackIndex is invalid " << trackIndex;
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    mMediaSampleReader = mediaSampleReader;
+    mTrackIndex = trackIndex;
+
+    mSourceFormat =
+            std::shared_ptr<AMediaFormat>(mMediaSampleReader->getTrackFormat(mTrackIndex),
+                                          std::bind(AMediaFormat_delete, std::placeholders::_1));
+    if (mSourceFormat == nullptr) {
+        LOG(ERROR) << "Unable to get format for track #" << mTrackIndex;
+        return AMEDIA_ERROR_MALFORMED;
+    }
+
+    media_status_t status = configureDestinationFormat(destinationFormat);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "configure failed with error " << status;
+        return status;
+    }
+
+    mState = CONFIGURED;
+    return AMEDIA_OK;
+}
+
+bool MediaTrackTranscoder::start() {
+    std::scoped_lock lock{mStateMutex};
+
+    if (mState != CONFIGURED) {
+        LOG(ERROR) << "TrackTranscoder must be configured before started";
+        return false;
+    }
+
+    mTranscodingThread = std::thread([this] {
+        media_status_t status = runTranscodeLoop();
+
+        // Notify the client.
+        if (auto callbacks = mTranscoderCallback.lock()) {
+            if (status != AMEDIA_OK) {
+                callbacks->onTrackError(this, status);
+            } else {
+                callbacks->onTrackFinished(this);
+            }
+        }
+    });
+
+    mState = STARTED;
+    return true;
+}
+
+bool MediaTrackTranscoder::stop() {
+    std::scoped_lock lock{mStateMutex};
+
+    if (mState == STARTED) {
+        abortTranscodeLoop();
+        mTranscodingThread.join();
+        mOutputQueue->abort();  // Wake up any threads waiting for samples.
+        mState = STOPPED;
+        return true;
+    }
+
+    LOG(ERROR) << "TrackTranscoder must be started before stopped";
+    return false;
+}
+
+void MediaTrackTranscoder::notifyTrackFormatAvailable() {
+    if (auto callbacks = mTranscoderCallback.lock()) {
+        callbacks->onTrackFormatAvailable(this);
+    }
+}
+
+std::shared_ptr<MediaSampleQueue> MediaTrackTranscoder::getOutputQueue() const {
+    return mOutputQueue;
+}
+
+}  // namespace android
diff --git a/media/libmediatranscoding/transcoder/MediaTranscoder.cpp b/media/libmediatranscoding/transcoder/MediaTranscoder.cpp
new file mode 100644
index 0000000..ed702db
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/MediaTranscoder.cpp
@@ -0,0 +1,354 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaTranscoder"
+
+#include <android-base/logging.h>
+#include <binder/Parcel.h>
+#include <fcntl.h>
+#include <media/MediaSampleReaderNDK.h>
+#include <media/MediaSampleWriter.h>
+#include <media/MediaTranscoder.h>
+#include <media/PassthroughTrackTranscoder.h>
+#include <media/VideoTrackTranscoder.h>
+#include <unistd.h>
+
+namespace android {
+
+#define DEFINE_FORMAT_VALUE_COPY_FUNC(_type, _typeName)                                  \
+    static void copy##_typeName(const char* key, AMediaFormat* to, AMediaFormat* from) { \
+        _type value;                                                                     \
+        if (AMediaFormat_get##_typeName(from, key, &value)) {                            \
+            AMediaFormat_set##_typeName(to, key, value);                                 \
+        }                                                                                \
+    }
+
+DEFINE_FORMAT_VALUE_COPY_FUNC(const char*, String);
+DEFINE_FORMAT_VALUE_COPY_FUNC(int64_t, Int64);
+DEFINE_FORMAT_VALUE_COPY_FUNC(int32_t, Int32);
+
+static AMediaFormat* mergeMediaFormats(AMediaFormat* base, AMediaFormat* overlay) {
+    if (base == nullptr || overlay == nullptr) {
+        LOG(ERROR) << "Cannot merge null formats";
+        return nullptr;
+    }
+
+    AMediaFormat* format = AMediaFormat_new();
+    if (AMediaFormat_copy(format, base) != AMEDIA_OK) {
+        AMediaFormat_delete(format);
+        return nullptr;
+    }
+
+    // Note: AMediaFormat does not expose a function for appending values from another format or for
+    // iterating over all values and keys in a format. Instead we define a static list of known keys
+    // along with their value types and copy the ones that are present. A better solution would be
+    // to either implement required functions in NDK or to parse the overlay format's string
+    // representation and copy all existing keys.
+    static const struct {
+        const char* key;
+        void (*copyValue)(const char* key, AMediaFormat* to, AMediaFormat* from);
+    } kSupportedConfigs[] = {
+            {AMEDIAFORMAT_KEY_MIME, copyString},
+            {AMEDIAFORMAT_KEY_DURATION, copyInt64},
+            {AMEDIAFORMAT_KEY_WIDTH, copyInt32},
+            {AMEDIAFORMAT_KEY_HEIGHT, copyInt32},
+            {AMEDIAFORMAT_KEY_BIT_RATE, copyInt32},
+            {AMEDIAFORMAT_KEY_PROFILE, copyInt32},
+            {AMEDIAFORMAT_KEY_LEVEL, copyInt32},
+            {AMEDIAFORMAT_KEY_COLOR_FORMAT, copyInt32},
+            {AMEDIAFORMAT_KEY_COLOR_RANGE, copyInt32},
+            {AMEDIAFORMAT_KEY_COLOR_STANDARD, copyInt32},
+            {AMEDIAFORMAT_KEY_COLOR_TRANSFER, copyInt32},
+            {AMEDIAFORMAT_KEY_FRAME_RATE, copyInt32},
+            {AMEDIAFORMAT_KEY_I_FRAME_INTERVAL, copyInt32},
+    };
+
+    for (int i = 0; i < (sizeof(kSupportedConfigs) / sizeof(kSupportedConfigs[0])); ++i) {
+        kSupportedConfigs[i].copyValue(kSupportedConfigs[i].key, format, overlay);
+    }
+
+    return format;
+}
+
+void MediaTranscoder::sendCallback(media_status_t status) {
+    // If the transcoder is already cancelled explicitly, don't send any error callbacks.
+    // Tracks and sample writer will report errors for abort. However, currently we can't
+    // tell it apart from real errors. Ideally we still want to report real errors back
+    // to client, as there is a small chance that explicit abort and the real error come
+    // at around the same time, we should report that if abort has a specific error code.
+    // On the other hand, if the transcoder actually finished (status is AMEDIA_OK) at around
+    // the same time of the abort, we should still report the finish back to the client.
+    if (mCancelled && status != AMEDIA_OK) {
+        return;
+    }
+
+    bool expected = false;
+    if (mCallbackSent.compare_exchange_strong(expected, true)) {
+        if (status == AMEDIA_OK) {
+            mCallbacks->onFinished(this);
+        } else {
+            mCallbacks->onError(this, status);
+        }
+
+        // Transcoding is done and the callback to the client has been sent, so tear down the
+        // pipeline but do it asynchronously to avoid deadlocks. If an error occurred, client
+        // should clean up the file.
+        std::thread asyncCancelThread{[self = shared_from_this()] { self->cancel(); }};
+        asyncCancelThread.detach();
+    }
+}
+
+void MediaTranscoder::onTrackFormatAvailable(const MediaTrackTranscoder* transcoder) {
+    LOG(INFO) << "TrackTranscoder " << transcoder << " format available.";
+
+    std::scoped_lock lock{mTracksAddedMutex};
+
+    // Ignore duplicate format change.
+    if (mTracksAdded.count(transcoder) > 0) {
+        return;
+    }
+
+    // Add track to the writer.
+    const bool ok =
+            mSampleWriter->addTrack(transcoder->getOutputQueue(), transcoder->getOutputFormat());
+    if (!ok) {
+        LOG(ERROR) << "Unable to add track to sample writer.";
+        sendCallback(AMEDIA_ERROR_UNKNOWN);
+        return;
+    }
+
+    mTracksAdded.insert(transcoder);
+    if (mTracksAdded.size() == mTrackTranscoders.size()) {
+        LOG(INFO) << "Starting sample writer.";
+        bool started = mSampleWriter->start();
+        if (!started) {
+            LOG(ERROR) << "Unable to start sample writer.";
+            sendCallback(AMEDIA_ERROR_UNKNOWN);
+        }
+    }
+}
+
+void MediaTranscoder::onTrackFinished(const MediaTrackTranscoder* transcoder) {
+    LOG(DEBUG) << "TrackTranscoder " << transcoder << " finished";
+}
+
+void MediaTranscoder::onTrackError(const MediaTrackTranscoder* transcoder, media_status_t status) {
+    LOG(DEBUG) << "TrackTranscoder " << transcoder << " returned error " << status;
+    sendCallback(status);
+}
+
+void MediaTranscoder::onFinished(const MediaSampleWriter* writer __unused, media_status_t status) {
+    LOG((status != AMEDIA_OK) ? ERROR : DEBUG) << "Sample writer finished with status " << status;
+    sendCallback(status);
+}
+
+void MediaTranscoder::onProgressUpdate(const MediaSampleWriter* writer __unused, int32_t progress) {
+    // Dispatch progress updated to the client.
+    mCallbacks->onProgressUpdate(this, progress);
+}
+
+MediaTranscoder::MediaTranscoder(const std::shared_ptr<CallbackInterface>& callbacks)
+      : mCallbacks(callbacks) {}
+
+std::shared_ptr<MediaTranscoder> MediaTranscoder::create(
+        const std::shared_ptr<CallbackInterface>& callbacks,
+        const std::shared_ptr<const Parcel>& pausedState) {
+    if (pausedState != nullptr) {
+        LOG(INFO) << "Initializing from paused state.";
+    }
+    if (callbacks == nullptr) {
+        LOG(ERROR) << "Callbacks cannot be null";
+        return nullptr;
+    }
+
+    return std::shared_ptr<MediaTranscoder>(new MediaTranscoder(callbacks));
+}
+
+media_status_t MediaTranscoder::configureSource(int fd) {
+    if (fd < 0) {
+        LOG(ERROR) << "Invalid source fd: " << fd;
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    const size_t fileSize = lseek(fd, 0, SEEK_END);
+    lseek(fd, 0, SEEK_SET);
+
+    mSampleReader = MediaSampleReaderNDK::createFromFd(fd, 0 /* offset */, fileSize);
+
+    if (mSampleReader == nullptr) {
+        LOG(ERROR) << "Unable to parse source fd: " << fd;
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+
+    const size_t trackCount = mSampleReader->getTrackCount();
+    for (size_t trackIndex = 0; trackIndex < trackCount; ++trackIndex) {
+        AMediaFormat* trackFormat = mSampleReader->getTrackFormat(static_cast<int>(trackIndex));
+        if (trackFormat == nullptr) {
+            LOG(ERROR) << "Track #" << trackIndex << " has no format";
+            return AMEDIA_ERROR_MALFORMED;
+        }
+
+        mSourceTrackFormats.emplace_back(trackFormat, &AMediaFormat_delete);
+    }
+
+    return AMEDIA_OK;
+}
+
+std::vector<std::shared_ptr<AMediaFormat>> MediaTranscoder::getTrackFormats() const {
+    // Return a deep copy of the formats to avoid the caller modifying our internal formats.
+    std::vector<std::shared_ptr<AMediaFormat>> trackFormats;
+    for (const std::shared_ptr<AMediaFormat>& sourceFormat : mSourceTrackFormats) {
+        AMediaFormat* copy = AMediaFormat_new();
+        AMediaFormat_copy(copy, sourceFormat.get());
+        trackFormats.emplace_back(copy, &AMediaFormat_delete);
+    }
+    return trackFormats;
+}
+
+media_status_t MediaTranscoder::configureTrackFormat(size_t trackIndex, AMediaFormat* trackFormat) {
+    if (mSampleReader == nullptr) {
+        LOG(ERROR) << "Source must be configured before tracks";
+        return AMEDIA_ERROR_INVALID_OPERATION;
+    } else if (trackIndex >= mSourceTrackFormats.size()) {
+        LOG(ERROR) << "Track index " << trackIndex
+                   << " is out of bounds. Track count: " << mSourceTrackFormats.size();
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    std::shared_ptr<MediaTrackTranscoder> transcoder;
+    std::shared_ptr<AMediaFormat> format;
+
+    if (trackFormat == nullptr) {
+        transcoder = std::make_shared<PassthroughTrackTranscoder>(shared_from_this());
+    } else {
+        const char* srcMime = nullptr;
+        if (!AMediaFormat_getString(mSourceTrackFormats[trackIndex].get(), AMEDIAFORMAT_KEY_MIME,
+                                    &srcMime)) {
+            LOG(ERROR) << "Source track #" << trackIndex << " has no mime type";
+            return AMEDIA_ERROR_MALFORMED;
+        }
+
+        if (strncmp(srcMime, "video/", 6) != 0) {
+            LOG(ERROR) << "Only video tracks are supported for transcoding. Unable to configure "
+                          "track #"
+                       << trackIndex << " with mime " << srcMime;
+            return AMEDIA_ERROR_UNSUPPORTED;
+        }
+
+        const char* dstMime = nullptr;
+        if (AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &dstMime)) {
+            if (strncmp(dstMime, "video/", 6) != 0) {
+                LOG(ERROR) << "Unable to convert media types for track #" << trackIndex << ", from "
+                           << srcMime << " to " << dstMime;
+                return AMEDIA_ERROR_UNSUPPORTED;
+            }
+        }
+
+        transcoder = VideoTrackTranscoder::create(shared_from_this());
+
+        AMediaFormat* mergedFormat =
+                mergeMediaFormats(mSourceTrackFormats[trackIndex].get(), trackFormat);
+        if (mergedFormat == nullptr) {
+            LOG(ERROR) << "Unable to merge source and destination formats";
+            return AMEDIA_ERROR_UNKNOWN;
+        }
+
+        format = std::shared_ptr<AMediaFormat>(mergedFormat, &AMediaFormat_delete);
+    }
+
+    media_status_t status = transcoder->configure(mSampleReader, trackIndex, format);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "Configure track transcoder for track #" << trackIndex << " returned error "
+                   << status;
+        return status;
+    }
+
+    mTrackTranscoders.emplace_back(std::move(transcoder));
+    return AMEDIA_OK;
+}
+
+media_status_t MediaTranscoder::configureDestination(int fd) {
+    if (fd < 0) {
+        LOG(ERROR) << "Invalid destination fd: " << fd;
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    if (mSampleWriter != nullptr) {
+        LOG(ERROR) << "Destination is already configured.";
+        return AMEDIA_ERROR_INVALID_OPERATION;
+    }
+
+    mSampleWriter = std::make_unique<MediaSampleWriter>();
+    const bool initOk = mSampleWriter->init(fd, shared_from_this());
+
+    if (!initOk) {
+        LOG(ERROR) << "Unable to initialize sample writer with destination fd: " << fd;
+        mSampleWriter.reset();
+        return AMEDIA_ERROR_UNKNOWN;
+    }
+
+    return AMEDIA_OK;
+}
+
+media_status_t MediaTranscoder::start() {
+    if (mTrackTranscoders.size() < 1) {
+        LOG(ERROR) << "Unable to start, no tracks are configured.";
+        return AMEDIA_ERROR_INVALID_OPERATION;
+    } else if (mSampleWriter == nullptr) {
+        LOG(ERROR) << "Unable to start, destination is not configured";
+        return AMEDIA_ERROR_INVALID_OPERATION;
+    }
+
+    // Start transcoders
+    for (auto& transcoder : mTrackTranscoders) {
+        bool started = transcoder->start();
+        if (!started) {
+            LOG(ERROR) << "Unable to start track transcoder.";
+            cancel();
+            return AMEDIA_ERROR_UNKNOWN;
+        }
+    }
+    return AMEDIA_OK;
+}
+
+media_status_t MediaTranscoder::pause(std::shared_ptr<const Parcel>* pausedState) {
+    // TODO: write internal states to parcel.
+    *pausedState = std::make_shared<Parcel>();
+    return cancel();
+}
+
+media_status_t MediaTranscoder::resume() {
+    // TODO: restore internal states from parcel.
+    return start();
+}
+
+media_status_t MediaTranscoder::cancel() {
+    bool expected = false;
+    if (!mCancelled.compare_exchange_strong(expected, true)) {
+        // Already cancelled.
+        return AMEDIA_OK;
+    }
+
+    mSampleWriter->stop();
+    for (auto& transcoder : mTrackTranscoders) {
+        transcoder->stop();
+    }
+
+    return AMEDIA_OK;
+}
+
+}  // namespace android
diff --git a/media/libmediatranscoding/transcoder/NdkCommon.cpp b/media/libmediatranscoding/transcoder/NdkCommon.cpp
new file mode 100644
index 0000000..67a8e10
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/NdkCommon.cpp
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NdkCommon"
+
+#include <log/log.h>
+#include <media/NdkCommon.h>
+
+#include <cstdio>
+#include <cstring>
+#include <utility>
+
+/* TODO(b/153592281)
+ * Note: constants used by the native media tests but not available in media ndk api
+ */
+const char* AMEDIA_MIMETYPE_VIDEO_VP8 = "video/x-vnd.on2.vp8";
+const char* AMEDIA_MIMETYPE_VIDEO_VP9 = "video/x-vnd.on2.vp9";
+const char* AMEDIA_MIMETYPE_VIDEO_AV1 = "video/av01";
+const char* AMEDIA_MIMETYPE_VIDEO_AVC = "video/avc";
+const char* AMEDIA_MIMETYPE_VIDEO_HEVC = "video/hevc";
+const char* AMEDIA_MIMETYPE_VIDEO_MPEG4 = "video/mp4v-es";
+const char* AMEDIA_MIMETYPE_VIDEO_H263 = "video/3gpp";
+
+/* TODO(b/153592281) */
+const char* TBD_AMEDIACODEC_PARAMETER_KEY_REQUEST_SYNC_FRAME = "request-sync";
+const char* TBD_AMEDIACODEC_PARAMETER_KEY_VIDEO_BITRATE = "video-bitrate";
+const char* TBD_AMEDIACODEC_PARAMETER_KEY_MAX_B_FRAMES = "max-bframes";
diff --git a/media/libmediatranscoding/transcoder/PassthroughTrackTranscoder.cpp b/media/libmediatranscoding/transcoder/PassthroughTrackTranscoder.cpp
new file mode 100644
index 0000000..e2cc6b6
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/PassthroughTrackTranscoder.cpp
@@ -0,0 +1,163 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "PassthroughTrackTranscoder"
+
+#include <android-base/logging.h>
+#include <media/PassthroughTrackTranscoder.h>
+
+namespace android {
+
+PassthroughTrackTranscoder::BufferPool::~BufferPool() {
+    for (auto it = mAddressSizeMap.begin(); it != mAddressSizeMap.end(); ++it) {
+        delete[] it->first;
+    }
+}
+
+uint8_t* PassthroughTrackTranscoder::BufferPool::getBufferWithSize(size_t minimumBufferSize)
+        NO_THREAD_SAFETY_ANALYSIS {
+    std::unique_lock lock(mMutex);
+
+    // Wait if maximum number of buffers are allocated but none are free.
+    while (mAddressSizeMap.size() >= mMaxBufferCount && mFreeBufferMap.empty() && !mAborted) {
+        mCondition.wait(lock);
+    }
+
+    if (mAborted) {
+        return nullptr;
+    }
+
+    // Check if the free list contains a large enough buffer.
+    auto it = mFreeBufferMap.lower_bound(minimumBufferSize);
+    if (it != mFreeBufferMap.end()) {
+        uint8_t* buffer = it->second;
+        mFreeBufferMap.erase(it);
+        return buffer;
+    }
+
+    // If the maximum buffer count is reached, remove an existing free buffer.
+    if (mAddressSizeMap.size() >= mMaxBufferCount) {
+        auto it = mFreeBufferMap.begin();
+        mAddressSizeMap.erase(it->second);
+        delete[] it->second;
+        mFreeBufferMap.erase(it);
+    }
+
+    // Allocate a new buffer.
+    uint8_t* buffer = new (std::nothrow) uint8_t[minimumBufferSize];
+    if (buffer == nullptr) {
+        LOG(ERROR) << "Unable to allocate new buffer of size: " << minimumBufferSize;
+        return nullptr;
+    }
+
+    // Add the buffer to the tracking set.
+    mAddressSizeMap.emplace(buffer, minimumBufferSize);
+    return buffer;
+}
+
+void PassthroughTrackTranscoder::BufferPool::returnBuffer(uint8_t* buffer) {
+    std::scoped_lock lock(mMutex);
+
+    if (buffer == nullptr || mAddressSizeMap.find(buffer) == mAddressSizeMap.end()) {
+        LOG(WARNING) << "Ignoring untracked buffer " << buffer;
+        return;
+    }
+
+    mFreeBufferMap.emplace(mAddressSizeMap[buffer], buffer);
+    mCondition.notify_one();
+}
+
+void PassthroughTrackTranscoder::BufferPool::abort() {
+    std::scoped_lock lock(mMutex);
+    mAborted = true;
+    mCondition.notify_all();
+}
+
+media_status_t PassthroughTrackTranscoder::configureDestinationFormat(
+        const std::shared_ptr<AMediaFormat>& destinationFormat __unused) {
+    // Called by MediaTrackTranscoder. Passthrough doesn't care about destination so just return ok.
+    return AMEDIA_OK;
+}
+
+media_status_t PassthroughTrackTranscoder::runTranscodeLoop() {
+    MediaSampleInfo info;
+    std::shared_ptr<MediaSample> sample;
+
+    // Notify the track format as soon as we start. It's same as the source format.
+    notifyTrackFormatAvailable();
+
+    MediaSample::OnSampleReleasedCallback bufferReleaseCallback =
+            [bufferPool = mBufferPool](MediaSample* sample) {
+                bufferPool->returnBuffer(const_cast<uint8_t*>(sample->buffer));
+            };
+
+    // Move samples until EOS is reached or transcoding is stopped.
+    while (!mStopRequested && !mEosFromSource) {
+        media_status_t status = mMediaSampleReader->getSampleInfoForTrack(mTrackIndex, &info);
+
+        if (status == AMEDIA_OK) {
+            uint8_t* buffer = mBufferPool->getBufferWithSize(info.size);
+            if (buffer == nullptr) {
+                if (mStopRequested) {
+                    break;
+                }
+
+                LOG(ERROR) << "Unable to get buffer from pool";
+                return AMEDIA_ERROR_IO;  // TODO: Custom error codes?
+            }
+
+            sample = MediaSample::createWithReleaseCallback(
+                    buffer, 0 /* offset */, 0 /* bufferId */, bufferReleaseCallback);
+
+            status = mMediaSampleReader->readSampleDataForTrack(mTrackIndex, buffer, info.size);
+            if (status != AMEDIA_OK) {
+                LOG(ERROR) << "Unable to read next sample data. Aborting transcode.";
+                return status;
+            }
+
+        } else if (status == AMEDIA_ERROR_END_OF_STREAM) {
+            sample = std::make_shared<MediaSample>();
+            mEosFromSource = true;
+        } else {
+            LOG(ERROR) << "Unable to get next sample info. Aborting transcode.";
+            return status;
+        }
+
+        sample->info = info;
+        if (mOutputQueue->enqueue(sample)) {
+            LOG(ERROR) << "Output queue aborted";
+            return AMEDIA_ERROR_IO;
+        }
+
+        mMediaSampleReader->advanceTrack(mTrackIndex);
+    }
+
+    if (mStopRequested && !mEosFromSource) {
+        return AMEDIA_ERROR_UNKNOWN;  // TODO: Custom error codes?
+    }
+    return AMEDIA_OK;
+}
+
+void PassthroughTrackTranscoder::abortTranscodeLoop() {
+    mStopRequested = true;
+    mBufferPool->abort();
+}
+
+std::shared_ptr<AMediaFormat> PassthroughTrackTranscoder::getOutputFormat() const {
+    return mSourceFormat;
+}
+}  // namespace android
diff --git a/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp b/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
new file mode 100644
index 0000000..65dcad3
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
@@ -0,0 +1,484 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "VideoTrackTranscoder"
+
+#include <android-base/logging.h>
+#include <media/VideoTrackTranscoder.h>
+
+namespace android {
+
+// Check that the codec sample flags have the expected NDK meaning.
+static_assert(SAMPLE_FLAG_CODEC_CONFIG == AMEDIACODEC_BUFFER_FLAG_CODEC_CONFIG,
+              "Sample flag mismatch: CODEC_CONFIG");
+static_assert(SAMPLE_FLAG_END_OF_STREAM == AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM,
+              "Sample flag mismatch: END_OF_STREAM");
+static_assert(SAMPLE_FLAG_PARTIAL_FRAME == AMEDIACODEC_BUFFER_FLAG_PARTIAL_FRAME,
+              "Sample flag mismatch: PARTIAL_FRAME");
+
+// Color format defined by surface. (See MediaCodecInfo.CodecCapabilities#COLOR_FormatSurface.)
+static constexpr int32_t kColorFormatSurface = 0x7f000789;
+// Default key frame interval in seconds.
+static constexpr float kDefaultKeyFrameIntervalSeconds = 1.0f;
+
+template <typename T>
+void VideoTrackTranscoder::BlockingQueue<T>::push(T const& value, bool front) {
+    {
+        std::unique_lock<std::mutex> lock(mMutex);
+        if (front) {
+            mQueue.push_front(value);
+        } else {
+            mQueue.push_back(value);
+        }
+    }
+    mCondition.notify_one();
+}
+
+template <typename T>
+T VideoTrackTranscoder::BlockingQueue<T>::pop() {
+    std::unique_lock<std::mutex> lock(mMutex);
+    while (mQueue.empty()) {
+        mCondition.wait(lock);
+    }
+    T value = mQueue.front();
+    mQueue.pop_front();
+    return value;
+}
+
+// The CodecWrapper class is used to let AMediaCodec instances outlive the transcoder object itself
+// by giving the codec a weak pointer to the transcoder. Codecs wrapped in this object are kept
+// alive by the transcoder and the codec's outstanding buffers. Once the transcoder stops and all
+// output buffers have been released by downstream components the codec will also be released.
+class VideoTrackTranscoder::CodecWrapper {
+public:
+    CodecWrapper(AMediaCodec* codec, const std::weak_ptr<VideoTrackTranscoder>& transcoder)
+          : mCodec(codec), mTranscoder(transcoder), mCodecStarted(false) {}
+    ~CodecWrapper() {
+        if (mCodecStarted) {
+            AMediaCodec_stop(mCodec);
+        }
+        AMediaCodec_delete(mCodec);
+    }
+
+    AMediaCodec* getCodec() { return mCodec; }
+    std::shared_ptr<VideoTrackTranscoder> getTranscoder() const { return mTranscoder.lock(); };
+    void setStarted() { mCodecStarted = true; }
+
+private:
+    AMediaCodec* mCodec;
+    std::weak_ptr<VideoTrackTranscoder> mTranscoder;
+    bool mCodecStarted;
+};
+
+// Dispatch responses to codec callbacks onto the message queue.
+struct AsyncCodecCallbackDispatch {
+    static void onAsyncInputAvailable(AMediaCodec* codec, void* userdata, int32_t index) {
+        VideoTrackTranscoder::CodecWrapper* wrapper =
+                static_cast<VideoTrackTranscoder::CodecWrapper*>(userdata);
+        if (auto transcoder = wrapper->getTranscoder()) {
+            if (codec == transcoder->mDecoder) {
+                transcoder->mCodecMessageQueue.push(
+                        [transcoder, index] { transcoder->enqueueInputSample(index); });
+            }
+        }
+    }
+
+    static void onAsyncOutputAvailable(AMediaCodec* codec, void* userdata, int32_t index,
+                                       AMediaCodecBufferInfo* bufferInfoPtr) {
+        VideoTrackTranscoder::CodecWrapper* wrapper =
+                static_cast<VideoTrackTranscoder::CodecWrapper*>(userdata);
+        AMediaCodecBufferInfo bufferInfo = *bufferInfoPtr;
+        if (auto transcoder = wrapper->getTranscoder()) {
+            transcoder->mCodecMessageQueue.push([transcoder, index, codec, bufferInfo] {
+                if (codec == transcoder->mDecoder) {
+                    transcoder->transferBuffer(index, bufferInfo);
+                } else if (codec == transcoder->mEncoder->getCodec()) {
+                    transcoder->dequeueOutputSample(index, bufferInfo);
+                }
+            });
+        }
+    }
+
+    static void onAsyncFormatChanged(AMediaCodec* codec, void* userdata, AMediaFormat* format) {
+        VideoTrackTranscoder::CodecWrapper* wrapper =
+                static_cast<VideoTrackTranscoder::CodecWrapper*>(userdata);
+        if (auto transcoder = wrapper->getTranscoder()) {
+            const char* kCodecName = (codec == transcoder->mDecoder ? "Decoder" : "Encoder");
+            LOG(DEBUG) << kCodecName << " format changed: " << AMediaFormat_toString(format);
+            if (codec == transcoder->mEncoder->getCodec()) {
+                transcoder->mCodecMessageQueue.push(
+                        [transcoder, format] { transcoder->updateTrackFormat(format); });
+            }
+        }
+    }
+
+    static void onAsyncError(AMediaCodec* codec, void* userdata, media_status_t error,
+                             int32_t actionCode, const char* detail) {
+        LOG(ERROR) << "Error from codec " << codec << ", userdata " << userdata << ", error "
+                   << error << ", action " << actionCode << ", detail " << detail;
+        VideoTrackTranscoder::CodecWrapper* wrapper =
+                static_cast<VideoTrackTranscoder::CodecWrapper*>(userdata);
+        if (auto transcoder = wrapper->getTranscoder()) {
+            transcoder->mCodecMessageQueue.push(
+                    [transcoder, error] {
+                        transcoder->mStatus = error;
+                        transcoder->mStopRequested = true;
+                    },
+                    true);
+        }
+    }
+};
+
+// static
+std::shared_ptr<VideoTrackTranscoder> VideoTrackTranscoder::create(
+        const std::weak_ptr<MediaTrackTranscoderCallback>& transcoderCallback) {
+    return std::shared_ptr<VideoTrackTranscoder>(new VideoTrackTranscoder(transcoderCallback));
+}
+
+VideoTrackTranscoder::~VideoTrackTranscoder() {
+    if (mDecoder != nullptr) {
+        AMediaCodec_delete(mDecoder);
+    }
+
+    if (mSurface != nullptr) {
+        ANativeWindow_release(mSurface);
+    }
+}
+
+// Creates and configures the codecs.
+media_status_t VideoTrackTranscoder::configureDestinationFormat(
+        const std::shared_ptr<AMediaFormat>& destinationFormat) {
+    media_status_t status = AMEDIA_OK;
+
+    if (destinationFormat == nullptr) {
+        LOG(ERROR) << "Destination format is null, use passthrough transcoder";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    AMediaFormat* encoderFormat = AMediaFormat_new();
+    if (!encoderFormat || AMediaFormat_copy(encoderFormat, destinationFormat.get()) != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to copy destination format";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    float tmp;
+    if (!AMediaFormat_getFloat(encoderFormat, AMEDIAFORMAT_KEY_I_FRAME_INTERVAL, &tmp)) {
+        AMediaFormat_setFloat(encoderFormat, AMEDIAFORMAT_KEY_I_FRAME_INTERVAL,
+                              kDefaultKeyFrameIntervalSeconds);
+    }
+    AMediaFormat_setInt32(encoderFormat, AMEDIAFORMAT_KEY_COLOR_FORMAT, kColorFormatSurface);
+
+    // Always encode without rotation. The rotation degree will be transferred directly to
+    // MediaSampleWriter track format, and MediaSampleWriter will call AMediaMuxer_setOrientationHint.
+    AMediaFormat_setInt32(encoderFormat, AMEDIAFORMAT_KEY_ROTATION, 0);
+
+    mDestinationFormat = std::shared_ptr<AMediaFormat>(encoderFormat, &AMediaFormat_delete);
+
+    // Create and configure the encoder.
+    const char* destinationMime = nullptr;
+    bool ok = AMediaFormat_getString(mDestinationFormat.get(), AMEDIAFORMAT_KEY_MIME,
+                                     &destinationMime);
+    if (!ok) {
+        LOG(ERROR) << "Destination MIME type is required for transcoding.";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    AMediaCodec* encoder = AMediaCodec_createEncoderByType(destinationMime);
+    if (encoder == nullptr) {
+        LOG(ERROR) << "Unable to create encoder for type " << destinationMime;
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+    mEncoder = std::make_shared<CodecWrapper>(encoder, shared_from_this());
+
+    status = AMediaCodec_configure(mEncoder->getCodec(), mDestinationFormat.get(),
+                                   NULL /* surface */, NULL /* crypto */,
+                                   AMEDIACODEC_CONFIGURE_FLAG_ENCODE);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to configure video encoder: " << status;
+        return status;
+    }
+
+    status = AMediaCodec_createInputSurface(mEncoder->getCodec(), &mSurface);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to create an encoder input surface: %d" << status;
+        return status;
+    }
+
+    // Create and configure the decoder.
+    const char* sourceMime = nullptr;
+    ok = AMediaFormat_getString(mSourceFormat.get(), AMEDIAFORMAT_KEY_MIME, &sourceMime);
+    if (!ok) {
+        LOG(ERROR) << "Source MIME type is required for transcoding.";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    mDecoder = AMediaCodec_createDecoderByType(sourceMime);
+    if (mDecoder == nullptr) {
+        LOG(ERROR) << "Unable to create decoder for type " << sourceMime;
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+
+    status = AMediaCodec_configure(mDecoder, mSourceFormat.get(), mSurface, NULL /* crypto */,
+                                   0 /* flags */);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to configure video decoder: " << status;
+        return status;
+    }
+
+    // Configure codecs to run in async mode.
+    AMediaCodecOnAsyncNotifyCallback asyncCodecCallbacks = {
+            .onAsyncInputAvailable = AsyncCodecCallbackDispatch::onAsyncInputAvailable,
+            .onAsyncOutputAvailable = AsyncCodecCallbackDispatch::onAsyncOutputAvailable,
+            .onAsyncFormatChanged = AsyncCodecCallbackDispatch::onAsyncFormatChanged,
+            .onAsyncError = AsyncCodecCallbackDispatch::onAsyncError};
+
+    // Note: The decoder does not need its own wrapper because its lifetime is tied to the
+    // transcoder. But the same callbacks are reused for decoder and encoder so we pass the encoder
+    // wrapper as userdata here but never read the codec from it in the callback.
+    status = AMediaCodec_setAsyncNotifyCallback(mDecoder, asyncCodecCallbacks, mEncoder.get());
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to set decoder to async mode: " << status;
+        return status;
+    }
+
+    status = AMediaCodec_setAsyncNotifyCallback(mEncoder->getCodec(), asyncCodecCallbacks,
+                                                mEncoder.get());
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to set encoder to async mode: " << status;
+        return status;
+    }
+
+    return AMEDIA_OK;
+}
+
+void VideoTrackTranscoder::enqueueInputSample(int32_t bufferIndex) {
+    media_status_t status = AMEDIA_OK;
+
+    if (mEosFromSource) {
+        return;
+    }
+
+    status = mMediaSampleReader->getSampleInfoForTrack(mTrackIndex, &mSampleInfo);
+    if (status != AMEDIA_OK && status != AMEDIA_ERROR_END_OF_STREAM) {
+        LOG(ERROR) << "Error getting next sample info: " << status;
+        mStatus = status;
+        return;
+    }
+    const bool endOfStream = (status == AMEDIA_ERROR_END_OF_STREAM);
+
+    if (!endOfStream) {
+        size_t bufferSize = 0;
+        uint8_t* sourceBuffer = AMediaCodec_getInputBuffer(mDecoder, bufferIndex, &bufferSize);
+        if (sourceBuffer == nullptr) {
+            LOG(ERROR) << "Decoder returned a NULL input buffer.";
+            mStatus = AMEDIA_ERROR_UNKNOWN;
+            return;
+        } else if (bufferSize < mSampleInfo.size) {
+            LOG(ERROR) << "Decoder returned an input buffer that is smaller than the sample.";
+            mStatus = AMEDIA_ERROR_UNKNOWN;
+            return;
+        }
+
+        status = mMediaSampleReader->readSampleDataForTrack(mTrackIndex, sourceBuffer,
+                                                            mSampleInfo.size);
+        if (status != AMEDIA_OK) {
+            LOG(ERROR) << "Unable to read next sample data. Aborting transcode.";
+            mStatus = status;
+            return;
+        }
+
+        mMediaSampleReader->advanceTrack(mTrackIndex);
+    } else {
+        LOG(DEBUG) << "EOS from source.";
+        mEosFromSource = true;
+    }
+
+    status = AMediaCodec_queueInputBuffer(mDecoder, bufferIndex, 0, mSampleInfo.size,
+                                          mSampleInfo.presentationTimeUs, mSampleInfo.flags);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to queue input buffer for decode: " << status;
+        mStatus = status;
+        return;
+    }
+}
+
+void VideoTrackTranscoder::transferBuffer(int32_t bufferIndex, AMediaCodecBufferInfo bufferInfo) {
+    if (bufferIndex >= 0) {
+        bool needsRender = bufferInfo.size > 0;
+        AMediaCodec_releaseOutputBuffer(mDecoder, bufferIndex, needsRender);
+    }
+
+    if (bufferInfo.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM) {
+        LOG(DEBUG) << "EOS from decoder.";
+        media_status_t status = AMediaCodec_signalEndOfInputStream(mEncoder->getCodec());
+        if (status != AMEDIA_OK) {
+            LOG(ERROR) << "SignalEOS on encoder returned error: " << status;
+            mStatus = status;
+        }
+    }
+}
+
+void VideoTrackTranscoder::dequeueOutputSample(int32_t bufferIndex,
+                                               AMediaCodecBufferInfo bufferInfo) {
+    if (bufferIndex >= 0) {
+        size_t sampleSize = 0;
+        uint8_t* buffer =
+                AMediaCodec_getOutputBuffer(mEncoder->getCodec(), bufferIndex, &sampleSize);
+
+        MediaSample::OnSampleReleasedCallback bufferReleaseCallback =
+                [encoder = mEncoder](MediaSample* sample) {
+                    AMediaCodec_releaseOutputBuffer(encoder->getCodec(), sample->bufferId,
+                                                    false /* render */);
+                };
+
+        std::shared_ptr<MediaSample> sample = MediaSample::createWithReleaseCallback(
+                buffer, bufferInfo.offset, bufferIndex, bufferReleaseCallback);
+        sample->info.size = bufferInfo.size;
+        sample->info.flags = bufferInfo.flags;
+        sample->info.presentationTimeUs = bufferInfo.presentationTimeUs;
+
+        const bool aborted = mOutputQueue->enqueue(sample);
+        if (aborted) {
+            LOG(ERROR) << "Output sample queue was aborted. Stopping transcode.";
+            mStatus = AMEDIA_ERROR_IO;  // TODO: Define custom error codes?
+            return;
+        }
+    } else if (bufferIndex == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED) {
+        AMediaFormat* newFormat = AMediaCodec_getOutputFormat(mEncoder->getCodec());
+        LOG(DEBUG) << "Encoder output format changed: " << AMediaFormat_toString(newFormat);
+    }
+
+    if (bufferInfo.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM) {
+        LOG(DEBUG) << "EOS from encoder.";
+        mEosFromEncoder = true;
+    }
+}
+
+void VideoTrackTranscoder::updateTrackFormat(AMediaFormat* outputFormat) {
+    if (mActualOutputFormat != nullptr) {
+        LOG(WARNING) << "Ignoring duplicate format change.";
+        return;
+    }
+
+    AMediaFormat* formatCopy = AMediaFormat_new();
+    if (!formatCopy || AMediaFormat_copy(formatCopy, outputFormat) != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to copy outputFormat";
+        AMediaFormat_delete(formatCopy);
+        mStatus = AMEDIA_ERROR_INVALID_PARAMETER;
+        return;
+    }
+
+    // Generate the actual track format for muxer based on the encoder output format,
+    // since many vital information comes in the encoder format (eg. CSD).
+    // Transfer necessary fields from the user-configured track format (derived from
+    // source track format and user transcoding request) where needed.
+
+    // Transfer SAR settings:
+    // If mDestinationFormat has SAR set, it means the original source has SAR specified
+    // at container level. This is supposed to override any SAR settings in the bitstream,
+    // thus should always be transferred to the container of the transcoded file.
+    int32_t sarWidth, sarHeight;
+    if (AMediaFormat_getInt32(mSourceFormat.get(), AMEDIAFORMAT_KEY_SAR_WIDTH, &sarWidth) &&
+        (sarWidth > 0) &&
+        AMediaFormat_getInt32(mSourceFormat.get(), AMEDIAFORMAT_KEY_SAR_HEIGHT, &sarHeight) &&
+        (sarHeight > 0)) {
+        AMediaFormat_setInt32(formatCopy, AMEDIAFORMAT_KEY_SAR_WIDTH, sarWidth);
+        AMediaFormat_setInt32(formatCopy, AMEDIAFORMAT_KEY_SAR_HEIGHT, sarHeight);
+    }
+    // Transfer DAR settings.
+    int32_t displayWidth, displayHeight;
+    if (AMediaFormat_getInt32(mSourceFormat.get(), AMEDIAFORMAT_KEY_DISPLAY_WIDTH, &displayWidth) &&
+        (displayWidth > 0) &&
+        AMediaFormat_getInt32(mSourceFormat.get(), AMEDIAFORMAT_KEY_DISPLAY_HEIGHT,
+                              &displayHeight) &&
+        (displayHeight > 0)) {
+        AMediaFormat_setInt32(formatCopy, AMEDIAFORMAT_KEY_DISPLAY_WIDTH, displayWidth);
+        AMediaFormat_setInt32(formatCopy, AMEDIAFORMAT_KEY_DISPLAY_HEIGHT, displayHeight);
+    }
+
+    // Transfer rotation settings.
+    // Note that muxer itself doesn't take rotation from the track format. It requires
+    // AMediaMuxer_setOrientationHint to set the rotation. Here we pass the rotation to
+    // MediaSampleWriter using the track format. MediaSampleWriter will then call
+    // AMediaMuxer_setOrientationHint as needed.
+    int32_t rotation;
+    if (AMediaFormat_getInt32(mSourceFormat.get(), AMEDIAFORMAT_KEY_ROTATION, &rotation) &&
+        (rotation != 0)) {
+        AMediaFormat_setInt32(formatCopy, AMEDIAFORMAT_KEY_ROTATION, rotation);
+    }
+
+    // Transfer track duration.
+    // Preserve the source track duration by sending it to MediaSampleWriter.
+    int64_t durationUs;
+    if (AMediaFormat_getInt64(mSourceFormat.get(), AMEDIAFORMAT_KEY_DURATION, &durationUs) &&
+        durationUs > 0) {
+        AMediaFormat_setInt64(formatCopy, AMEDIAFORMAT_KEY_DURATION, durationUs);
+    }
+
+    // TODO: transfer other fields as required.
+
+    mActualOutputFormat = std::shared_ptr<AMediaFormat>(formatCopy, &AMediaFormat_delete);
+
+    notifyTrackFormatAvailable();
+}
+
+media_status_t VideoTrackTranscoder::runTranscodeLoop() {
+    // Push start decoder and encoder as two messages, so that these are subject to the
+    // stop request as well. If the job is cancelled (or paused) immediately after start,
+    // we don't need to waste time start then stop the codecs.
+    mCodecMessageQueue.push([this] {
+        media_status_t status = AMediaCodec_start(mDecoder);
+        if (status != AMEDIA_OK) {
+            LOG(ERROR) << "Unable to start video decoder: " << status;
+            mStatus = status;
+        }
+    });
+
+    mCodecMessageQueue.push([this] {
+        media_status_t status = AMediaCodec_start(mEncoder->getCodec());
+        if (status != AMEDIA_OK) {
+            LOG(ERROR) << "Unable to start video encoder: " << status;
+            mStatus = status;
+        }
+        mEncoder->setStarted();
+    });
+
+    // Process codec events until EOS is reached, transcoding is stopped or an error occurs.
+    while (!mStopRequested && !mEosFromEncoder && mStatus == AMEDIA_OK) {
+        std::function<void()> message = mCodecMessageQueue.pop();
+        message();
+    }
+
+    // Return error if transcoding was stopped before it finished.
+    if (mStopRequested && !mEosFromEncoder && mStatus == AMEDIA_OK) {
+        mStatus = AMEDIA_ERROR_UNKNOWN;  // TODO: Define custom error codes?
+    }
+
+    AMediaCodec_stop(mDecoder);
+    return mStatus;
+}
+
+void VideoTrackTranscoder::abortTranscodeLoop() {
+    // Push abort message to the front of the codec event queue.
+    mCodecMessageQueue.push([this] { mStopRequested = true; }, true /* front */);
+}
+
+std::shared_ptr<AMediaFormat> VideoTrackTranscoder::getOutputFormat() const {
+    return mActualOutputFormat;
+}
+
+}  // namespace android
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaSample.h b/media/libmediatranscoding/transcoder/include/media/MediaSample.h
new file mode 100644
index 0000000..8a239a6
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/MediaSample.h
@@ -0,0 +1,133 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_SAMPLE_H
+#define ANDROID_MEDIA_SAMPLE_H
+
+#include <cstdint>
+#include <functional>
+#include <memory>
+
+namespace android {
+
+/**
+ * Media sample flags.
+ * These flags purposely match the media NDK's buffer and extractor flags with one exception. The
+ * NDK extractor's flag for encrypted samples (AMEDIAEXTRACTOR_SAMPLE_FLAG_ENCRYPTED) is equal to 2,
+ * i.e. the same as SAMPLE_FLAG_CODEC_CONFIG below and NDK's AMEDIACODEC_BUFFER_FLAG_CODEC_CONFIG.
+ * Sample producers based on the NDK's extractor is responsible for catching those values.
+ * Note that currently the media transcoder does not support encrypted samples.
+ */
+enum : uint32_t {
+    SAMPLE_FLAG_SYNC_SAMPLE = 1,
+    SAMPLE_FLAG_CODEC_CONFIG = 2,
+    SAMPLE_FLAG_END_OF_STREAM = 4,
+    SAMPLE_FLAG_PARTIAL_FRAME = 8,
+};
+
+/**
+ * MediaSampleInfo is an object that carries information about a compressed media sample without
+ * holding any sample data.
+ */
+struct MediaSampleInfo {
+    /** The sample's presentation timestamp in microseconds. */
+    int64_t presentationTimeUs = 0;
+
+    /** The size of the compressed sample data in bytes. */
+    size_t size = 0;
+
+    /** Sample flags. */
+    uint32_t flags = 0;
+};
+
+/**
+ * MediaSample holds a compressed media sample in memory.
+ */
+struct MediaSample {
+    /**
+     * Callback to notify that a media sample is about to be released, giving the creator a chance
+     * to reclaim the data buffer backing the sample. Once this callback returns, the media sample
+     * instance *will* be released so it cannot be used outside of the callback. To enable the
+     * callback, create the media sample with {@link #createWithReleaseCallback}.
+     * @param sample The sample to be released.
+     */
+    using OnSampleReleasedCallback = std::function<void(MediaSample* sample)>;
+
+    /**
+     * Creates a new media sample instance with a registered release callback. The release callback
+     * will get called right before the media sample is released giving the creator a chance to
+     * reclaim the buffer.
+     * @param buffer Byte buffer containing the sample's compressed data.
+     * @param dataOffset Offset, in bytes, to the sample's compressed data inside the buffer.
+     * @param bufferId Buffer identifier that can be used to identify the buffer on release.
+     * @param releaseCallback The sample release callback.
+     * @return A new media sample instance.
+     */
+    static std::shared_ptr<MediaSample> createWithReleaseCallback(
+            uint8_t* buffer, size_t dataOffset, uint32_t bufferId,
+            OnSampleReleasedCallback releaseCallback) {
+        MediaSample* sample = new MediaSample(buffer, dataOffset, bufferId, releaseCallback);
+        return std::shared_ptr<MediaSample>(
+                sample, std::bind(&MediaSample::releaseSample, std::placeholders::_1));
+    }
+
+    /**
+     * Byte buffer containing the sample's compressed data. The media sample instance does not take
+     * ownership of the buffer and will not automatically release the memory, but the caller can
+     * register a release callback by creating the media sample with
+     * {@link #createWithReleaseCallback}.
+     */
+    const uint8_t* buffer = nullptr;
+
+    /** Offset, in bytes, to the sample's compressed data inside the buffer. */
+    size_t dataOffset = 0;
+
+    /**
+     * Buffer identifier. This identifier is likely only meaningful to the sample data producer and
+     * can be used for reclaiming the buffer once a consumer is done processing it.
+     */
+    uint32_t bufferId = 0xBAADF00D;
+
+    /** Media sample information. */
+    MediaSampleInfo info;
+
+    MediaSample() = default;
+
+private:
+    MediaSample(uint8_t* buffer, size_t dataOffset, uint32_t bufferId,
+                OnSampleReleasedCallback releaseCallback)
+          : buffer(buffer),
+            dataOffset(dataOffset),
+            bufferId(bufferId),
+            mReleaseCallback(releaseCallback){};
+
+    static void releaseSample(MediaSample* sample) {
+        if (sample->mReleaseCallback != nullptr) {
+            sample->mReleaseCallback(sample);
+        }
+        delete sample;
+    }
+
+    // Do not allow copying to prevent dangling pointers in the copied object after the original is
+    // released.
+    MediaSample(const MediaSample&) = delete;
+    MediaSample& operator=(const MediaSample&) = delete;
+
+    const OnSampleReleasedCallback mReleaseCallback = nullptr;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_SAMPLE_H
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaSampleQueue.h b/media/libmediatranscoding/transcoder/include/media/MediaSampleQueue.h
new file mode 100644
index 0000000..dc22423
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/MediaSampleQueue.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_SAMPLE_QUEUE_H
+#define ANDROID_MEDIA_SAMPLE_QUEUE_H
+
+#include <media/MediaSample.h>
+#include <utils/Mutex.h>
+
+#include <memory>
+#include <mutex>
+#include <queue>
+
+namespace android {
+
+/**
+ * MediaSampleQueue asynchronously connects a producer and a consumer of media samples.
+ * Media samples flows through the queue in FIFO order. If the queue is empty the consumer will be
+ * blocked until a new media sample is added or until the producer aborts the queue operation.
+ */
+class MediaSampleQueue {
+public:
+    /**
+     * Enqueues a media sample at the end of the queue and notifies potentially waiting consumers.
+     * If the queue has previously been aborted this method does nothing.
+     * @param sample The media sample to enqueue.
+     * @return True if the queue has been aborted.
+     */
+    bool enqueue(const std::shared_ptr<MediaSample>& sample);
+
+    /**
+     * Removes the next media sample from the queue and returns it. If the queue has previously been
+     * aborted this method returns null. Note that this method will block while the queue is empty.
+     * @param[out] sample The next media sample in the queue.
+     * @return True if the queue has been aborted.
+     */
+    bool dequeue(std::shared_ptr<MediaSample>* sample /* nonnull */);
+
+    /**
+     * Aborts the queue operation. This clears the queue and notifies waiting consumers. After the
+     * has been aborted it is not possible to enqueue more samples, and dequeue will return null.
+     */
+    void abort();
+
+private:
+    std::queue<std::shared_ptr<MediaSample>> mSampleQueue GUARDED_BY(mMutex);
+    std::mutex mMutex;
+    std::condition_variable mCondition;
+    bool mAborted GUARDED_BY(mMutex) = false;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_SAMPLE_QUEUE_H
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaSampleReader.h b/media/libmediatranscoding/transcoder/include/media/MediaSampleReader.h
new file mode 100644
index 0000000..acebac2
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/MediaSampleReader.h
@@ -0,0 +1,100 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_SAMPLE_READER_H
+#define ANDROID_MEDIA_SAMPLE_READER_H
+
+#include <media/MediaSample.h>
+#include <media/NdkMediaError.h>
+#include <media/NdkMediaFormat.h>
+
+namespace android {
+
+/**
+ * MediaSampleReader is an interface for reading media samples from a container.
+ * MediaSampleReader allows for reading samples from multiple tracks independently of each other
+ * while preserving the order of samples within each individual track.
+ * MediaSampleReader implementations are thread safe and can be used by multiple threads
+ * concurrently. But note that MediaSampleReader only maintains one state per track so concurrent
+ * usage of the same track from multiple threads has no benefit.
+ */
+class MediaSampleReader {
+public:
+    /**
+     * Returns the file format of the media container as a AMediaFormat.
+     * The caller is responsible for releasing the format when finished with it using
+     * AMediaFormat_delete().
+     * @return The file media format.
+     */
+    virtual AMediaFormat* getFileFormat() = 0;
+
+    /**
+     * Returns the number of tracks in the media container.
+     * @return The number of tracks.
+     */
+    virtual size_t getTrackCount() const = 0;
+
+    /**
+     * Returns the media format of a specific track as a AMediaFormat.
+     * The caller is responsible for releasing the format when finished with it using
+     * AMediaFormat_delete().
+     * @param trackIndex The track index (zero-based).
+     * @return The track media format.
+     */
+    virtual AMediaFormat* getTrackFormat(int trackIndex) = 0;
+
+    /**
+     * Returns the sample information for the current sample in the specified track.
+     * @param trackIndex The track index (zero-based).
+     * @param info Pointer to a MediaSampleInfo object where the sample information is written.
+     * @return AMEDIA_OK on success, AMEDIA_ERROR_END_OF_STREAM if there are no more samples to read
+     * from the track and AMEDIA_ERROR_INVALID_PARAMETER if trackIndex is out of bounds or the
+     * info pointer is NULL. Other AMEDIA_ERROR_* return values may not be recoverable.
+     */
+    virtual media_status_t getSampleInfoForTrack(int trackIndex, MediaSampleInfo* info) = 0;
+
+    /**
+     * Reads the current sample's data into the supplied buffer.
+     * @param trackIndex The track index (zero-based).
+     * @param buffer The buffer to write the sample's data to.
+     * @param bufferSize The size of the supplied buffer.
+     * @return AMEDIA_OK on success, AMEDIA_ERROR_END_OF_STREAM if there are no more samples to read
+     * from the track and AMEDIA_ERROR_INVALID_PARAMETER if trackIndex is out of bounds, if the
+     * buffer pointer is NULL or if bufferSize is too small for the sample. Other AMEDIA_ERROR_*
+     * return values may not be recoverable.
+     */
+    virtual media_status_t readSampleDataForTrack(int trackIndex, uint8_t* buffer,
+                                                  size_t bufferSize) = 0;
+
+    /**
+     * Advance the specified track to the next sample.
+     * @param trackIndex The track index (zero-based).
+     */
+    virtual void advanceTrack(int trackIndex) = 0;
+
+    /** Destructor. */
+    virtual ~MediaSampleReader() = default;
+
+    /** Constructor. */
+    MediaSampleReader() = default;
+
+private:
+    MediaSampleReader(const MediaSampleReader&) = delete;
+    MediaSampleReader& operator=(const MediaSampleReader&) = delete;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_SAMPLE_READER_H
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaSampleReaderNDK.h b/media/libmediatranscoding/transcoder/include/media/MediaSampleReaderNDK.h
new file mode 100644
index 0000000..2dc9029
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/MediaSampleReaderNDK.h
@@ -0,0 +1,112 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_SAMPLE_READER_NDK_H
+#define ANDROID_MEDIA_SAMPLE_READER_NDK_H
+
+#include <media/MediaSampleReader.h>
+#include <media/NdkMediaExtractor.h>
+
+#include <memory>
+#include <mutex>
+#include <vector>
+
+namespace android {
+
+/**
+ * MediaSampleReaderNDK is a concrete implementation of the MediaSampleReader interface based on the
+ * media NDK extractor.
+ */
+class MediaSampleReaderNDK : public MediaSampleReader {
+public:
+    /**
+     * Creates a new MediaSampleReaderNDK instance wrapped in a shared pointer.
+     * @param fd Source file descriptor. The caller is responsible for closing the fd and it is safe
+     *           to do so when this method returns.
+     * @param offset Source data offset.
+     * @param size Source data size.
+     * @return A shared pointer referencing the new MediaSampleReaderNDK instance on success, or an
+     *         empty shared pointer if an error occurred.
+     */
+    static std::shared_ptr<MediaSampleReader> createFromFd(int fd, size_t offset, size_t size);
+
+    AMediaFormat* getFileFormat() override;
+    size_t getTrackCount() const override;
+    AMediaFormat* getTrackFormat(int trackIndex) override;
+    media_status_t getSampleInfoForTrack(int trackIndex, MediaSampleInfo* info) override;
+    media_status_t readSampleDataForTrack(int trackIndex, uint8_t* buffer,
+                                          size_t bufferSize) override;
+    void advanceTrack(int trackIndex) override;
+
+    virtual ~MediaSampleReaderNDK() override;
+
+private:
+    /**
+     * Creates a new MediaSampleReaderNDK object from an AMediaExtractor. The extractor needs to be
+     * initialized with a valid data source before attempting to create a MediaSampleReaderNDK.
+     * @param extractor The initialized media extractor.
+     */
+    MediaSampleReaderNDK(AMediaExtractor* extractor);
+    media_status_t init();
+
+    AMediaExtractor* mExtractor = nullptr;
+    std::mutex mExtractorMutex;
+    const size_t mTrackCount;
+
+    int mExtractorTrackIndex = -1;
+    uint64_t mExtractorSampleIndex = 0;
+
+    /**
+     * SamplePosition describes the position of a single sample in the media file using its
+     * timestamp and index in the file.
+     */
+    struct SamplePosition {
+        uint64_t index = 0;
+        int64_t timeStampUs = 0;
+        bool isSet = false;
+
+        void set(uint64_t sampleIndex, int64_t sampleTimeUs) {
+            index = sampleIndex;
+            timeStampUs = sampleTimeUs;
+            isSet = true;
+        }
+
+        void reset() { isSet = false; }
+    };
+
+    /**
+     * SampleCursor keeps track of the sample position for a specific track. When the track is
+     * advanced, previous is set to current, current to next and next is reset. As the extractor
+     * advances over the combined timeline of tracks, it updates current and next for the track it
+     * points to if they are not already set.
+     */
+    struct SampleCursor {
+        SamplePosition previous;
+        SamplePosition current;
+        SamplePosition next;
+    };
+
+    /** Samples cursor for each track in the file. */
+    std::vector<SampleCursor> mTrackCursors;
+
+    bool advanceExtractor_l();
+    media_status_t positionExtractorForTrack_l(int trackIndex);
+    media_status_t seekExtractorBackwards_l(int64_t targetTimeUs, int targetTrackIndex,
+                                            uint64_t targetSampleIndex);
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_SAMPLE_READER_NDK_H
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaSampleWriter.h b/media/libmediatranscoding/transcoder/include/media/MediaSampleWriter.h
new file mode 100644
index 0000000..92ddc2f
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/MediaSampleWriter.h
@@ -0,0 +1,191 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_SAMPLE_WRITER_H
+#define ANDROID_MEDIA_SAMPLE_WRITER_H
+
+#include <media/MediaSampleQueue.h>
+#include <media/NdkMediaCodec.h>
+#include <media/NdkMediaError.h>
+#include <media/NdkMediaFormat.h>
+#include <utils/Mutex.h>
+
+#include <functional>
+#include <memory>
+#include <mutex>
+#include <thread>
+
+namespace android {
+
+/**
+ * Muxer interface used by MediaSampleWriter.
+ * Methods in this interface are guaranteed to be called sequentially by MediaSampleWriter.
+ */
+class MediaSampleWriterMuxerInterface {
+public:
+    /**
+     * Adds a new track to the muxer.
+     * @param trackFormat Format of the new track.
+     * @return A non-negative track index on success, or a negative number on failure.
+     */
+    virtual ssize_t addTrack(AMediaFormat* trackFormat) = 0;
+
+    /** Starts the muxer. */
+    virtual media_status_t start() = 0;
+    /**
+     * Writes sample data to a previously added track.
+     * @param trackIndex Index of the track the sample data belongs to.
+     * @param data The sample data.
+     * @param info The sample information.
+     * @return The number of bytes written.
+     */
+    virtual media_status_t writeSampleData(size_t trackIndex, const uint8_t* data,
+                                           const AMediaCodecBufferInfo* info) = 0;
+
+    /** Stops the muxer. */
+    virtual media_status_t stop() = 0;
+    virtual ~MediaSampleWriterMuxerInterface() = default;
+};
+
+/**
+ * MediaSampleWriter writes samples in interleaved segments of a configurable duration.
+ * Each track have its own MediaSampleQueue from which samples are dequeued by the sample writer in
+ * output order. The dequeued samples are written to an instance of the writer's muxer interface.
+ * The default muxer interface implementation is based directly on AMediaMuxer.
+ */
+class MediaSampleWriter {
+public:
+    /** The default segment length. */
+    static constexpr uint32_t kDefaultTrackSegmentLengthUs = 1 * 1000 * 1000;  // 1 sec.
+
+    /** Callback interface. */
+    class CallbackInterface {
+    public:
+        /**
+         * Sample writer finished. The finished callback is only called after the sample writer has
+         * been successfully started.
+         */
+        virtual void onFinished(const MediaSampleWriter* writer, media_status_t status) = 0;
+
+        /** Sample writer progress update in percent. */
+        virtual void onProgressUpdate(const MediaSampleWriter* writer, int32_t progress) = 0;
+
+        virtual ~CallbackInterface() = default;
+    };
+
+    /**
+     * Constructor with custom segment length.
+     * @param trackSegmentLengthUs The segment length to use for this MediaSampleWriter.
+     */
+    MediaSampleWriter(uint32_t trackSegmentLengthUs)
+          : mTrackSegmentLengthUs(trackSegmentLengthUs), mMuxer(nullptr), mState(UNINITIALIZED){};
+
+    /** Constructor using the default segment length. */
+    MediaSampleWriter() : MediaSampleWriter(kDefaultTrackSegmentLengthUs){};
+
+    /** Destructor. */
+    ~MediaSampleWriter();
+
+    /**
+     * Initializes the sample writer with its default muxer implementation. MediaSampleWriter needs
+     * to be initialized before tracks are added and can only be initialized once.
+     * @param fd An open file descriptor to write to. The caller is responsible for closing this
+     *        file descriptor and it is safe to do so once this method returns.
+     * @param callbacks Client callback object that gets called by the sample writer.
+     * @return True if the writer was successfully initialized.
+     */
+    bool init(int fd, const std::weak_ptr<CallbackInterface>& callbacks /* nonnull */);
+
+    /**
+     * Initializes the sample writer with a custom muxer interface implementation.
+     * @param muxer The custom muxer interface implementation.
+     * @param @param callbacks Client callback object that gets called by the sample writer.
+     * @return True if the writer was successfully initialized.
+     */
+    bool init(const std::shared_ptr<MediaSampleWriterMuxerInterface>& muxer /* nonnull */,
+              const std::weak_ptr<CallbackInterface>& callbacks /* nonnull */);
+
+    /**
+     * Adds a new track to the sample writer. Tracks must be added after the sample writer has been
+     * initialized and before it is started.
+     * @param sampleQueue The MediaSampleQueue to pull samples from.
+     * @param trackFormat The format of the track to add.
+     * @return True if the track was successfully added.
+     */
+    bool addTrack(const std::shared_ptr<MediaSampleQueue>& sampleQueue /* nonnull */,
+                  const std::shared_ptr<AMediaFormat>& trackFormat /* nonnull */);
+
+    /**
+     * Starts the sample writer. The sample writer will start processing samples and writing them to
+     * its muxer on an internal thread. MediaSampleWriter can only be started once.
+     * @return True if the sample writer was successfully started.
+     */
+    bool start();
+
+    /**
+     * Stops the sample writer. If the sample writer is not yet finished its operation will be
+     * aborted and an error value will be returned to the client in the callback supplied to
+     * {@link #start}. If the sample writer has already finished and the client callback has fired
+     * the writer has already automatically stopped and there is no need to call stop manually. Once
+     * the sample writer has been stopped it cannot be restarted.
+     * @return True if the sample writer was successfully stopped on this call. False if the sample
+     *         writer was already stopped or was never started.
+     */
+    bool stop();
+
+private:
+    media_status_t writeSamples();
+    media_status_t runWriterLoop();
+
+    struct TrackRecord {
+        TrackRecord(const std::shared_ptr<MediaSampleQueue>& sampleQueue, size_t trackIndex,
+                    int64_t durationUs, bool isVideo)
+              : mSampleQueue(sampleQueue),
+                mTrackIndex(trackIndex),
+                mDurationUs(durationUs),
+                mFirstSampleTimeUs(0),
+                mPrevSampleTimeUs(0),
+                mFirstSampleTimeSet(false),
+                mReachedEos(false),
+                mIsVideo(isVideo) {}
+
+        std::shared_ptr<MediaSampleQueue> mSampleQueue;
+        const size_t mTrackIndex;
+        int64_t mDurationUs;
+        int64_t mFirstSampleTimeUs;
+        int64_t mPrevSampleTimeUs;
+        bool mFirstSampleTimeSet;
+        bool mReachedEos;
+        bool mIsVideo;
+    };
+
+    const uint32_t mTrackSegmentLengthUs;
+    std::weak_ptr<CallbackInterface> mCallbacks;
+    std::shared_ptr<MediaSampleWriterMuxerInterface> mMuxer;
+    std::vector<TrackRecord> mTracks;
+    std::thread mThread;
+
+    std::mutex mStateMutex;
+    enum : int {
+        UNINITIALIZED,
+        INITIALIZED,
+        STARTED,
+        STOPPED,
+    } mState GUARDED_BY(mStateMutex);
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_SAMPLE_WRITER_H
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoder.h b/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoder.h
new file mode 100644
index 0000000..60a9139
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoder.h
@@ -0,0 +1,131 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRACK_TRANSCODER_H
+#define ANDROID_MEDIA_TRACK_TRANSCODER_H
+
+#include <media/MediaSampleQueue.h>
+#include <media/MediaSampleReader.h>
+#include <media/NdkMediaError.h>
+#include <media/NdkMediaFormat.h>
+#include <utils/Mutex.h>
+
+#include <functional>
+#include <memory>
+#include <mutex>
+#include <thread>
+
+namespace android {
+
+class MediaTrackTranscoderCallback;
+
+/**
+ * Base class for all track transcoders. MediaTrackTranscoder operates asynchronously on an internal
+ * thread and communicates through a MediaTrackTranscoderCallback instance. Transcoded samples are
+ * enqueued on the MediaTrackTranscoder's output queue. Samples need to be dequeued from the output
+ * queue or the transcoder will run out of buffers and stall. Once the consumer is done with a
+ * transcoded sample it is the consumer's responsibility to as soon as possible release all
+ * references to that sample in order to return the buffer to the transcoder. MediaTrackTranscoder
+ * is an abstract class and instances are created through one of the concrete subclasses.
+ *
+ * The base class MediaTrackTranscoder is responsible for thread and state management and guarantees
+ * that operations {configure, start, stop} are sent to the derived class in correct order.
+ * MediaTrackTranscoder is also responsible for delivering callback notifications once the
+ * transcoder has been successfully started.
+ */
+class MediaTrackTranscoder {
+public:
+    /**
+     * Configures the track transcoder with an input MediaSampleReader and a destination format.
+     * A track transcoder have to be configured before it is started.
+     * @param mediaSampleReader The MediaSampleReader to read input samples from.
+     * @param trackIndex The index of the track to transcode in mediaSampleReader.
+     * @param destinationFormat The destination format.
+     * @return AMEDIA_OK if the track transcoder was successfully configured.
+     */
+    media_status_t configure(const std::shared_ptr<MediaSampleReader>& mediaSampleReader,
+                             int trackIndex,
+                             const std::shared_ptr<AMediaFormat>& destinationFormat);
+
+    /**
+     * Starts the track transcoder. Once started the track transcoder have to be stopped by calling
+     * {@link #stop}, even after completing successfully. Start should only be called once.
+     * @return True if the track transcoder started, or false if it had already been started.
+     */
+    bool start();
+
+    /**
+     * Stops the track transcoder. Once the transcoding has been stopped it cannot be restarted
+     * again. It is safe to call stop multiple times.
+     * @return True if the track transcoder stopped, or false if it was already stopped.
+     */
+    bool stop();
+
+    /**
+     * Retrieves the track transcoder's output sample queue.
+     * @return The output sample queue.
+     */
+    std::shared_ptr<MediaSampleQueue> getOutputQueue() const;
+
+    /**
+      * Retrieves the track transcoder's final output format. The output is available after the
+      * track transcoder has been successfully configured.
+      * @return The track output format.
+      */
+    virtual std::shared_ptr<AMediaFormat> getOutputFormat() const = 0;
+
+    virtual ~MediaTrackTranscoder() = default;
+
+protected:
+    MediaTrackTranscoder(const std::weak_ptr<MediaTrackTranscoderCallback>& transcoderCallback)
+          : mOutputQueue(std::make_shared<MediaSampleQueue>()),
+            mTranscoderCallback(transcoderCallback){};
+
+    // Called by subclasses when the actual track format becomes available.
+    void notifyTrackFormatAvailable();
+
+    // configureDestinationFormat needs to be implemented by subclasses, and gets called on an
+    // external thread before start.
+    virtual media_status_t configureDestinationFormat(
+            const std::shared_ptr<AMediaFormat>& destinationFormat) = 0;
+
+    // runTranscodeLoop needs to be implemented by subclasses, and gets called on
+    // MediaTrackTranscoder's internal thread when the track transcoder is started.
+    virtual media_status_t runTranscodeLoop() = 0;
+
+    // abortTranscodeLoop needs to be implemented by subclasses, and should request transcoding to
+    // be aborted as soon as possible. It should be safe to call abortTranscodeLoop multiple times.
+    virtual void abortTranscodeLoop() = 0;
+
+    std::shared_ptr<MediaSampleQueue> mOutputQueue;
+    std::shared_ptr<MediaSampleReader> mMediaSampleReader;
+    int mTrackIndex;
+    std::shared_ptr<AMediaFormat> mSourceFormat;
+
+private:
+    const std::weak_ptr<MediaTrackTranscoderCallback> mTranscoderCallback;
+    std::mutex mStateMutex;
+    std::thread mTranscodingThread GUARDED_BY(mStateMutex);
+    enum {
+        UNINITIALIZED,
+        CONFIGURED,
+        STARTED,
+        STOPPED,
+    } mState GUARDED_BY(mStateMutex) = UNINITIALIZED;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_TRACK_TRANSCODER_H
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoderCallback.h b/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoderCallback.h
new file mode 100644
index 0000000..654171e
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoderCallback.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRACK_TRANSCODER_CALLBACK_H
+#define ANDROID_MEDIA_TRACK_TRANSCODER_CALLBACK_H
+
+#include <media/NdkMediaError.h>
+
+namespace android {
+
+class MediaTrackTranscoder;
+
+/** Callback interface for MediaTrackTranscoder. */
+class MediaTrackTranscoderCallback {
+public:
+    /**
+     * Called when the MediaTrackTranscoder's actual track format becomes available.
+     * @param transcoder The MediaTrackTranscoder whose track format becomes available.
+     */
+    virtual void onTrackFormatAvailable(const MediaTrackTranscoder* transcoder);
+    /**
+     * Called when the MediaTrackTranscoder instance have finished transcoding all media samples
+     * successfully.
+     * @param transcoder The MediaTrackTranscoder that finished the transcoding.
+     */
+    virtual void onTrackFinished(const MediaTrackTranscoder* transcoder);
+
+    /**
+     * Called when the MediaTrackTranscoder instance encountered an error it could not recover from.
+     * @param transcoder The MediaTrackTranscoder that encountered the error.
+     * @param status The non-zero error code describing the encountered error.
+     */
+    virtual void onTrackError(const MediaTrackTranscoder* transcoder, media_status_t status);
+
+protected:
+    virtual ~MediaTrackTranscoderCallback() = default;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_TRACK_TRANSCODER_CALLBACK_H
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaTranscoder.h b/media/libmediatranscoding/transcoder/include/media/MediaTranscoder.h
new file mode 100644
index 0000000..031d01e
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/MediaTranscoder.h
@@ -0,0 +1,154 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRANSCODER_H
+#define ANDROID_MEDIA_TRANSCODER_H
+
+#include <binder/Parcel.h>
+#include <binder/Parcelable.h>
+#include <media/MediaSampleWriter.h>
+#include <media/MediaTrackTranscoderCallback.h>
+#include <media/NdkMediaError.h>
+#include <media/NdkMediaFormat.h>
+#include <utils/Mutex.h>
+
+#include <atomic>
+#include <memory>
+#include <mutex>
+#include <unordered_set>
+
+namespace android {
+
+class MediaSampleReader;
+class Parcel;
+
+class MediaTranscoder : public std::enable_shared_from_this<MediaTranscoder>,
+                        public MediaTrackTranscoderCallback,
+                        public MediaSampleWriter::CallbackInterface {
+public:
+    /** Callbacks from transcoder to client. */
+    class CallbackInterface {
+    public:
+        /** Transcoder finished successfully. */
+        virtual void onFinished(const MediaTranscoder* transcoder) = 0;
+
+        /** Transcoder encountered an unrecoverable error. */
+        virtual void onError(const MediaTranscoder* transcoder, media_status_t error) = 0;
+
+        /** Transcoder progress update reported in percent from 0 to 100. */
+        virtual void onProgressUpdate(const MediaTranscoder* transcoder, int32_t progress) = 0;
+
+        /**
+         * Transcoder lost codec resources and paused operations. The client can resume transcoding
+         * again when resources are available by either:
+         *   1) Calling resume on the same MediaTranscoder instance.
+         *   2) Creating a new MediaTranscoding instance with the paused state and then calling
+         *      resume.
+         */
+        virtual void onCodecResourceLost(const MediaTranscoder* transcoder,
+                                         const std::shared_ptr<const Parcel>& pausedState) = 0;
+
+        virtual ~CallbackInterface() = default;
+    };
+
+    /**
+     * Creates a new MediaTranscoder instance. If the supplied paused state is valid, the transcoder
+     * will be initialized with the paused state and be ready to be resumed right away. It is not
+     * possible to change any configurations on a paused transcoder.
+     */
+    static std::shared_ptr<MediaTranscoder> create(
+            const std::shared_ptr<CallbackInterface>& callbacks,
+            const std::shared_ptr<const Parcel>& pausedState = nullptr);
+
+    /** Configures source from path fd. */
+    media_status_t configureSource(int fd);
+
+    /** Gets the media formats of all tracks in the file. */
+    std::vector<std::shared_ptr<AMediaFormat>> getTrackFormats() const;
+
+    /**
+     * Configures transcoding of a track. Tracks that are not configured will not present in the
+     * final transcoded file, i.e. tracks will be dropped by default. Passing nullptr for
+     * trackFormat means the track will be copied unchanged ("passthrough") to the destination.
+     * Track configurations must be done after the source has been configured.
+     * Note: trackFormat is not modified but cannot be const.
+     */
+    media_status_t configureTrackFormat(size_t trackIndex, AMediaFormat* trackFormat);
+
+    /** Configures destination from fd. */
+    media_status_t configureDestination(int fd);
+
+    /** Starts transcoding. No configurations can be made once the transcoder has started. */
+    media_status_t start();
+
+    /**
+     * Pauses transcoding. The transcoder's paused state is returned through pausedState. The
+     * paused state is only needed for resuming transcoding with a new MediaTranscoder instance. The
+     * caller can resume transcoding with the current MediaTranscoder instance at any time by
+     * calling resume(). It is not required to cancel a paused transcoder. The paused state is
+     * independent and the caller can always initialize a new transcoder instance with the same
+     * paused state. If the caller wishes to abandon a paused transcoder's operation they can
+     * release the transcoder instance, clear the paused state and delete the partial destination
+     * file. The caller can optionally call cancel to let the transcoder clean up the partial
+     * destination file.
+     *
+     * TODO: use NDK AParcel instead
+     * libbinder shouldn't be used by mainline modules. When transcoding goes mainline
+     * it needs to be replaced by stable AParcel.
+     */
+    media_status_t pause(std::shared_ptr<const Parcel>* pausedState);
+
+    /** Resumes a paused transcoding. */
+    media_status_t resume();
+
+    /** Cancels the transcoding. Once canceled the transcoding can not be restarted. Client
+     * will be responsible for cleaning up the abandoned file. */
+    media_status_t cancel();
+
+    virtual ~MediaTranscoder() = default;
+
+private:
+    MediaTranscoder(const std::shared_ptr<CallbackInterface>& callbacks);
+
+    // MediaTrackTranscoderCallback
+    virtual void onTrackFormatAvailable(const MediaTrackTranscoder* transcoder) override;
+    virtual void onTrackFinished(const MediaTrackTranscoder* transcoder) override;
+    virtual void onTrackError(const MediaTrackTranscoder* transcoder,
+                              media_status_t status) override;
+    // ~MediaTrackTranscoderCallback
+
+    // MediaSampleWriter::CallbackInterface
+    virtual void onFinished(const MediaSampleWriter* writer, media_status_t status) override;
+    virtual void onProgressUpdate(const MediaSampleWriter* writer, int32_t progress) override;
+    // ~MediaSampleWriter::CallbackInterface
+
+    void onSampleWriterFinished(media_status_t status);
+    void sendCallback(media_status_t status);
+
+    std::shared_ptr<CallbackInterface> mCallbacks;
+    std::shared_ptr<MediaSampleReader> mSampleReader;
+    std::unique_ptr<MediaSampleWriter> mSampleWriter;
+    std::vector<std::shared_ptr<AMediaFormat>> mSourceTrackFormats;
+    std::vector<std::shared_ptr<MediaTrackTranscoder>> mTrackTranscoders;
+    std::mutex mTracksAddedMutex;
+    std::unordered_set<const MediaTrackTranscoder*> mTracksAdded GUARDED_BY(mTracksAddedMutex);
+
+    std::atomic_bool mCallbackSent = false;
+    std::atomic_bool mCancelled = false;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_TRANSCODER_H
diff --git a/media/libmediatranscoding/transcoder/include/media/NdkCommon.h b/media/libmediatranscoding/transcoder/include/media/NdkCommon.h
new file mode 100644
index 0000000..dcba812
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/NdkCommon.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRANSCODING_NDK_COMMON_H
+#define ANDROID_MEDIA_TRANSCODING_NDK_COMMON_H
+
+#include <media/NdkMediaFormat.h>
+
+extern const char* AMEDIA_MIMETYPE_VIDEO_VP8;
+extern const char* AMEDIA_MIMETYPE_VIDEO_VP9;
+extern const char* AMEDIA_MIMETYPE_VIDEO_AV1;
+extern const char* AMEDIA_MIMETYPE_VIDEO_AVC;
+extern const char* AMEDIA_MIMETYPE_VIDEO_HEVC;
+extern const char* AMEDIA_MIMETYPE_VIDEO_MPEG4;
+extern const char* AMEDIA_MIMETYPE_VIDEO_H263;
+
+// TODO(b/146420990)
+// TODO: make MediaTranscoder use the consts from this header.
+typedef enum {
+    OUTPUT_FORMAT_START = 0,
+    OUTPUT_FORMAT_MPEG_4 = OUTPUT_FORMAT_START,
+    OUTPUT_FORMAT_WEBM = OUTPUT_FORMAT_START + 1,
+    OUTPUT_FORMAT_THREE_GPP = OUTPUT_FORMAT_START + 2,
+    OUTPUT_FORMAT_HEIF = OUTPUT_FORMAT_START + 3,
+    OUTPUT_FORMAT_OGG = OUTPUT_FORMAT_START + 4,
+    OUTPUT_FORMAT_LIST_END = OUTPUT_FORMAT_START + 4,
+} MuxerFormat;
+
+// Color formats supported by encoder - should mirror supportedColorList
+// from MediaCodecConstants.h (are these going to be deprecated)
+static constexpr int COLOR_FormatYUV420SemiPlanar = 21;
+static constexpr int COLOR_FormatYUV420Flexible = 0x7F420888;
+static constexpr int COLOR_FormatSurface = 0x7f000789;
+
+// constants not defined in NDK
+extern const char* TBD_AMEDIACODEC_PARAMETER_KEY_REQUEST_SYNC_FRAME;
+extern const char* TBD_AMEDIACODEC_PARAMETER_KEY_VIDEO_BITRATE;
+extern const char* TBD_AMEDIACODEC_PARAMETER_KEY_MAX_B_FRAMES;
+static constexpr int TBD_AMEDIACODEC_BUFFER_FLAG_KEY_FRAME = 0x1;
+
+static constexpr int kBitrateModeConstant = 2;
+
+#endif  // ANDROID_MEDIA_TRANSCODING_NDK_COMMON_H
diff --git a/media/libmediatranscoding/transcoder/include/media/PassthroughTrackTranscoder.h b/media/libmediatranscoding/transcoder/include/media/PassthroughTrackTranscoder.h
new file mode 100644
index 0000000..b9491ed
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/PassthroughTrackTranscoder.h
@@ -0,0 +1,102 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_PASSTHROUGH_TRACK_TRANSCODER_H
+#define ANDROID_PASSTHROUGH_TRACK_TRANSCODER_H
+
+#include <media/MediaTrackTranscoder.h>
+#include <media/NdkMediaFormat.h>
+
+#include <condition_variable>
+#include <map>
+#include <mutex>
+#include <unordered_map>
+
+namespace android {
+
+/**
+ * Track transcoder for passthrough mode. Passthrough mode copies sample data from a track unchanged
+ * from source file to destination file. This track transcoder uses an internal pool of buffers.
+ * When the maximum number of buffers are allocated and all of them are waiting on the output queue
+ * the transcoder will stall until samples are dequeued from the output queue and released.
+ */
+class PassthroughTrackTranscoder : public MediaTrackTranscoder {
+public:
+    /** Maximum number of buffers to be allocated at a given time. */
+    static constexpr int kMaxBufferCountDefault = 16;
+
+    PassthroughTrackTranscoder(
+            const std::weak_ptr<MediaTrackTranscoderCallback>& transcoderCallback)
+          : MediaTrackTranscoder(transcoderCallback),
+            mBufferPool(std::make_shared<BufferPool>(kMaxBufferCountDefault)){};
+    virtual ~PassthroughTrackTranscoder() override = default;
+
+private:
+    friend class BufferPoolTests;
+
+    /** Class to pool and reuse buffers. */
+    class BufferPool {
+    public:
+        explicit BufferPool(int maxBufferCount) : mMaxBufferCount(maxBufferCount){};
+        ~BufferPool();
+
+        /**
+         * Retrieve a buffer from the pool. Buffers are allocated on demand. This method will block
+         * if the maximum number of buffers is reached and there are no free buffers available.
+         * @param minimumBufferSize The minimum size of the buffer.
+         * @return The buffer or nullptr if allocation failed or the pool was aborted.
+         */
+        uint8_t* getBufferWithSize(size_t minimumBufferSize);
+
+        /**
+         * Return a buffer to the pool.
+         * @param buffer The buffer to return.
+         */
+        void returnBuffer(uint8_t* buffer);
+
+        /** Wakes up threads waiting on buffers and prevents new buffers from being returned. */
+        void abort();
+
+    private:
+        // Maximum number of active buffers at a time.
+        const int mMaxBufferCount;
+
+        // Map containing all tracked buffers.
+        std::unordered_map<uint8_t*, size_t> mAddressSizeMap GUARDED_BY(mMutex);
+
+        // Map containing the currently free buffers.
+        std::multimap<size_t, uint8_t*> mFreeBufferMap GUARDED_BY(mMutex);
+
+        std::mutex mMutex;
+        std::condition_variable mCondition;
+        bool mAborted GUARDED_BY(mMutex) = false;
+    };
+
+    // MediaTrackTranscoder
+    media_status_t runTranscodeLoop() override;
+    void abortTranscodeLoop() override;
+    media_status_t configureDestinationFormat(
+            const std::shared_ptr<AMediaFormat>& destinationFormat) override;
+    std::shared_ptr<AMediaFormat> getOutputFormat() const override;
+    // ~MediaTrackTranscoder
+
+    std::shared_ptr<BufferPool> mBufferPool;
+    bool mEosFromSource = false;
+    std::atomic_bool mStopRequested = false;
+};
+
+}  // namespace android
+#endif  // ANDROID_PASSTHROUGH_TRACK_TRANSCODER_H
diff --git a/media/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h b/media/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h
new file mode 100644
index 0000000..0a7bf33
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h
@@ -0,0 +1,99 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_VIDEO_TRACK_TRANSCODER_H
+#define ANDROID_VIDEO_TRACK_TRANSCODER_H
+
+#include <android/native_window.h>
+#include <media/MediaTrackTranscoder.h>
+#include <media/NdkMediaCodec.h>
+#include <media/NdkMediaFormat.h>
+
+#include <condition_variable>
+#include <deque>
+#include <mutex>
+
+namespace android {
+
+/**
+ * Track transcoder for video tracks. VideoTrackTranscoder uses AMediaCodec from the Media NDK
+ * internally. The two media codecs are run in asynchronous mode and shares uncompressed buffers
+ * using a native surface (ANativeWindow). Codec callback events are placed on a message queue and
+ * serviced in order on the transcoding thread managed by MediaTrackTranscoder.
+ */
+class VideoTrackTranscoder : public std::enable_shared_from_this<VideoTrackTranscoder>,
+                             public MediaTrackTranscoder {
+public:
+    static std::shared_ptr<VideoTrackTranscoder> create(
+            const std::weak_ptr<MediaTrackTranscoderCallback>& transcoderCallback);
+
+    virtual ~VideoTrackTranscoder() override;
+
+private:
+    friend struct AsyncCodecCallbackDispatch;
+
+    // Minimal blocking queue used as a message queue by VideoTrackTranscoder.
+    template <typename T>
+    class BlockingQueue {
+    public:
+        void push(T const& value, bool front = false);
+        T pop();
+
+    private:
+        std::mutex mMutex;
+        std::condition_variable mCondition;
+        std::deque<T> mQueue;
+    };
+    class CodecWrapper;
+
+    VideoTrackTranscoder(const std::weak_ptr<MediaTrackTranscoderCallback>& transcoderCallback)
+          : MediaTrackTranscoder(transcoderCallback){};
+
+    // MediaTrackTranscoder
+    media_status_t runTranscodeLoop() override;
+    void abortTranscodeLoop() override;
+    media_status_t configureDestinationFormat(
+            const std::shared_ptr<AMediaFormat>& destinationFormat) override;
+    std::shared_ptr<AMediaFormat> getOutputFormat() const override;
+    // ~MediaTrackTranscoder
+
+    // Enqueues an input sample with the decoder.
+    void enqueueInputSample(int32_t bufferIndex);
+
+    // Moves a decoded buffer from the decoder's output to the encoder's input.
+    void transferBuffer(int32_t bufferIndex, AMediaCodecBufferInfo bufferInfo);
+
+    // Dequeues an encoded buffer from the encoder and adds it to the output queue.
+    void dequeueOutputSample(int32_t bufferIndex, AMediaCodecBufferInfo bufferInfo);
+
+    // Updates the video track's actual format based on encoder output format.
+    void updateTrackFormat(AMediaFormat* outputFormat);
+
+    AMediaCodec* mDecoder = nullptr;
+    std::shared_ptr<CodecWrapper> mEncoder;
+    ANativeWindow* mSurface = nullptr;
+    bool mEosFromSource = false;
+    bool mEosFromEncoder = false;
+    bool mStopRequested = false;
+    media_status_t mStatus = AMEDIA_OK;
+    MediaSampleInfo mSampleInfo;
+    BlockingQueue<std::function<void()>> mCodecMessageQueue;
+    std::shared_ptr<AMediaFormat> mDestinationFormat;
+    std::shared_ptr<AMediaFormat> mActualOutputFormat;
+};
+
+}  // namespace android
+#endif  // ANDROID_VIDEO_TRACK_TRANSCODER_H
diff --git a/media/libmediatranscoding/transcoder/tests/Android.bp b/media/libmediatranscoding/transcoder/tests/Android.bp
new file mode 100644
index 0000000..4160c30
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/Android.bp
@@ -0,0 +1,83 @@
+// Unit tests for libmediatranscoder.
+
+filegroup {
+    name: "test_assets",
+    srcs: ["assets/*"],
+}
+
+cc_defaults {
+    name: "testdefaults",
+
+    header_libs: [
+        "libbase_headers",
+        "libmedia_headers",
+    ],
+
+    shared_libs: [
+        "libbase",
+        "libcutils",
+        "libmediandk",
+        "libmediatranscoder",
+        "libutils",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    data: [":test_assets"],
+    test_config_template: "AndroidTestTemplate.xml",
+    test_suites: ["device-tests", "TranscoderTests"],
+}
+
+// MediaSampleReaderNDK unit test
+cc_test {
+    name: "MediaSampleReaderNDKTests",
+    defaults: ["testdefaults"],
+    srcs: ["MediaSampleReaderNDKTests.cpp"],
+}
+
+// MediaSampleQueue unit test
+cc_test {
+    name: "MediaSampleQueueTests",
+    defaults: ["testdefaults"],
+    srcs: ["MediaSampleQueueTests.cpp"],
+}
+
+// MediaTrackTranscoder unit test
+cc_test {
+    name: "MediaTrackTranscoderTests",
+    defaults: ["testdefaults"],
+    srcs: ["MediaTrackTranscoderTests.cpp"],
+    shared_libs: ["libbinder_ndk"],
+}
+
+// VideoTrackTranscoder unit test
+cc_test {
+    name: "VideoTrackTranscoderTests",
+    defaults: ["testdefaults"],
+    srcs: ["VideoTrackTranscoderTests.cpp"],
+}
+
+// PassthroughTrackTranscoder unit test
+cc_test {
+    name: "PassthroughTrackTranscoderTests",
+    defaults: ["testdefaults"],
+    srcs: ["PassthroughTrackTranscoderTests.cpp"],
+    shared_libs: ["libcrypto"],
+}
+
+// MediaSampleWriter unit test
+cc_test {
+    name: "MediaSampleWriterTests",
+    defaults: ["testdefaults"],
+    srcs: ["MediaSampleWriterTests.cpp"],
+}
+
+// MediaTranscoder unit test
+cc_test {
+    name: "MediaTranscoderTests",
+    defaults: ["testdefaults"],
+    srcs: ["MediaTranscoderTests.cpp"],
+}
diff --git a/media/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml b/media/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml
new file mode 100644
index 0000000..a9a7e2e
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Unit test configuration for {MODULE}">
+    <option name="test-suite-tag" value="TranscoderTests" />
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="false" />
+        <option name="push-file"
+            key="assets"
+            value="/data/local/tmp/TranscodingTestAssets" />
+    </target_preparer>
+
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="module-name" value="{MODULE}" />
+    </test>
+</configuration>
+
diff --git a/media/libmediatranscoding/transcoder/tests/MediaSampleQueueTests.cpp b/media/libmediatranscoding/transcoder/tests/MediaSampleQueueTests.cpp
new file mode 100644
index 0000000..2046ca0
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/MediaSampleQueueTests.cpp
@@ -0,0 +1,229 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaSampleQueue
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaSampleQueueTests"
+
+#include <android-base/logging.h>
+#include <gtest/gtest.h>
+#include <media/MediaSampleQueue.h>
+
+#include <thread>
+
+namespace android {
+
+/** Duration to use when delaying threads to order operations. */
+static constexpr int64_t kThreadDelayDurationMs = 100;
+
+class MediaSampleQueueTests : public ::testing::Test {
+public:
+    MediaSampleQueueTests() { LOG(DEBUG) << "MediaSampleQueueTests created"; }
+    ~MediaSampleQueueTests() { LOG(DEBUG) << "MediaSampleQueueTests destroyed"; }
+};
+
+static std::shared_ptr<MediaSample> newSample(uint32_t id) {
+    return MediaSample::createWithReleaseCallback(nullptr /* buffer */, 0 /* offset */, id,
+                                                  nullptr /* callback */);
+}
+
+TEST_F(MediaSampleQueueTests, TestSequentialDequeueOrder) {
+    LOG(DEBUG) << "TestSequentialDequeueOrder Starts";
+
+    static constexpr int kNumSamples = 4;
+    MediaSampleQueue sampleQueue;
+
+    // Enqueue loop.
+    for (int i = 0; i < kNumSamples; ++i) {
+        sampleQueue.enqueue(newSample(i));
+    }
+
+    // Dequeue loop.
+    for (int i = 0; i < kNumSamples; ++i) {
+        std::shared_ptr<MediaSample> sample;
+        bool aborted = sampleQueue.dequeue(&sample);
+        EXPECT_NE(sample, nullptr);
+        EXPECT_EQ(sample->bufferId, i);
+        EXPECT_FALSE(aborted);
+    }
+}
+
+TEST_F(MediaSampleQueueTests, TestInterleavedDequeueOrder) {
+    LOG(DEBUG) << "TestInterleavedDequeueOrder Starts";
+
+    static constexpr int kNumSamples = 4;
+    MediaSampleQueue sampleQueue;
+
+    // Enqueue and dequeue.
+    for (int i = 0; i < kNumSamples; ++i) {
+        sampleQueue.enqueue(newSample(i));
+
+        std::shared_ptr<MediaSample> sample;
+        bool aborted = sampleQueue.dequeue(&sample);
+        EXPECT_NE(sample, nullptr);
+        EXPECT_EQ(sample->bufferId, i);
+        EXPECT_FALSE(aborted);
+    }
+}
+
+TEST_F(MediaSampleQueueTests, TestBlockingDequeue) {
+    LOG(DEBUG) << "TestBlockingDequeue Starts";
+
+    MediaSampleQueue sampleQueue;
+
+    std::thread enqueueThread([&sampleQueue] {
+        // Note: This implementation is a bit racy. Any amount of sleep will not guarantee that the
+        // main thread will be blocked on the sample queue by the time this thread calls enqueue.
+        // But we can say with high confidence that it will and the test will not fail regardless.
+        std::this_thread::sleep_for(std::chrono::milliseconds(kThreadDelayDurationMs));
+        sampleQueue.enqueue(newSample(1));
+    });
+
+    std::shared_ptr<MediaSample> sample;
+    bool aborted = sampleQueue.dequeue(&sample);
+    EXPECT_NE(sample, nullptr);
+    EXPECT_EQ(sample->bufferId, 1);
+    EXPECT_FALSE(aborted);
+
+    enqueueThread.join();
+}
+
+TEST_F(MediaSampleQueueTests, TestDequeueBufferRelease) {
+    LOG(DEBUG) << "TestDequeueBufferRelease Starts";
+
+    static constexpr int kNumSamples = 4;
+    std::vector<bool> bufferReleased(kNumSamples, false);
+
+    MediaSample::OnSampleReleasedCallback callback = [&bufferReleased](MediaSample* sample) {
+        bufferReleased[sample->bufferId] = true;
+    };
+
+    MediaSampleQueue sampleQueue;
+    for (int i = 0; i < kNumSamples; ++i) {
+        bool aborted = sampleQueue.enqueue(
+                MediaSample::createWithReleaseCallback(nullptr, 0, i, callback));
+        EXPECT_FALSE(aborted);
+    }
+
+    for (int i = 0; i < kNumSamples; ++i) {
+        EXPECT_FALSE(bufferReleased[i]);
+    }
+
+    for (int i = 0; i < kNumSamples; ++i) {
+        {
+            std::shared_ptr<MediaSample> sample;
+            bool aborted = sampleQueue.dequeue(&sample);
+            EXPECT_NE(sample, nullptr);
+            EXPECT_EQ(sample->bufferId, i);
+            EXPECT_FALSE(bufferReleased[i]);
+            EXPECT_FALSE(aborted);
+        }
+
+        for (int j = 0; j < kNumSamples; ++j) {
+            EXPECT_EQ(bufferReleased[j], j <= i);
+        }
+    }
+}
+
+TEST_F(MediaSampleQueueTests, TestAbortBufferRelease) {
+    LOG(DEBUG) << "TestAbortBufferRelease Starts";
+
+    static constexpr int kNumSamples = 4;
+    std::vector<bool> bufferReleased(kNumSamples, false);
+
+    MediaSample::OnSampleReleasedCallback callback = [&bufferReleased](MediaSample* sample) {
+        bufferReleased[sample->bufferId] = true;
+    };
+
+    MediaSampleQueue sampleQueue;
+    for (int i = 0; i < kNumSamples; ++i) {
+        bool aborted = sampleQueue.enqueue(
+                MediaSample::createWithReleaseCallback(nullptr, 0, i, callback));
+        EXPECT_FALSE(aborted);
+    }
+
+    for (int i = 0; i < kNumSamples; ++i) {
+        EXPECT_FALSE(bufferReleased[i]);
+    }
+
+    sampleQueue.abort();
+
+    for (int i = 0; i < kNumSamples; ++i) {
+        EXPECT_TRUE(bufferReleased[i]);
+    }
+}
+
+TEST_F(MediaSampleQueueTests, TestNonEmptyAbort) {
+    LOG(DEBUG) << "TestNonEmptyAbort Starts";
+
+    MediaSampleQueue sampleQueue;
+    bool aborted = sampleQueue.enqueue(newSample(1));
+    EXPECT_FALSE(aborted);
+
+    sampleQueue.abort();
+
+    std::shared_ptr<MediaSample> sample;
+    aborted = sampleQueue.dequeue(&sample);
+    EXPECT_TRUE(aborted);
+    EXPECT_EQ(sample, nullptr);
+
+    aborted = sampleQueue.enqueue(sample);
+    EXPECT_TRUE(aborted);
+}
+
+TEST_F(MediaSampleQueueTests, TestEmptyAbort) {
+    LOG(DEBUG) << "TestEmptyAbort Starts";
+
+    MediaSampleQueue sampleQueue;
+    sampleQueue.abort();
+
+    std::shared_ptr<MediaSample> sample;
+    bool aborted = sampleQueue.dequeue(&sample);
+    EXPECT_TRUE(aborted);
+    EXPECT_EQ(sample, nullptr);
+
+    aborted = sampleQueue.enqueue(sample);
+    EXPECT_TRUE(aborted);
+}
+
+TEST_F(MediaSampleQueueTests, TestBlockingAbort) {
+    LOG(DEBUG) << "TestBlockingAbort Starts";
+
+    MediaSampleQueue sampleQueue;
+
+    std::thread abortingThread([&sampleQueue] {
+        // Note: This implementation is a bit racy. Any amount of sleep will not guarantee that the
+        // main thread will be blocked on the sample queue by the time this thread calls abort.
+        // But we can say with high confidence that it will and the test will not fail regardless.
+        std::this_thread::sleep_for(std::chrono::milliseconds(kThreadDelayDurationMs));
+        sampleQueue.abort();
+    });
+
+    std::shared_ptr<MediaSample> sample;
+    bool aborted = sampleQueue.dequeue(&sample);
+    EXPECT_TRUE(aborted);
+    EXPECT_EQ(sample, nullptr);
+
+    abortingThread.join();
+}
+
+}  // namespace android
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libmediatranscoding/transcoder/tests/MediaSampleReaderNDKTests.cpp b/media/libmediatranscoding/transcoder/tests/MediaSampleReaderNDKTests.cpp
new file mode 100644
index 0000000..805095e
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/MediaSampleReaderNDKTests.cpp
@@ -0,0 +1,170 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaSampleReaderNDK
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaSampleReaderNDKTests"
+
+#include <android-base/logging.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <fcntl.h>
+#include <gtest/gtest.h>
+#include <media/MediaSampleReaderNDK.h>
+#include <utils/Timers.h>
+
+// TODO(b/153453392): Test more asset types and validate sample data from readSampleDataForTrack.
+
+namespace android {
+
+#define SEC_TO_USEC(s) ((s)*1000 * 1000)
+
+class MediaSampleReaderNDKTests : public ::testing::Test {
+public:
+    MediaSampleReaderNDKTests() { LOG(DEBUG) << "MediaSampleReaderNDKTests created"; }
+
+    void SetUp() override {
+        LOG(DEBUG) << "MediaSampleReaderNDKTests set up";
+        const char* sourcePath =
+                "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+
+        mExtractor = AMediaExtractor_new();
+        ASSERT_NE(mExtractor, nullptr);
+
+        mSourceFd = open(sourcePath, O_RDONLY);
+        ASSERT_GT(mSourceFd, 0);
+
+        mFileSize = lseek(mSourceFd, 0, SEEK_END);
+        lseek(mSourceFd, 0, SEEK_SET);
+
+        media_status_t status =
+                AMediaExtractor_setDataSourceFd(mExtractor, mSourceFd, 0, mFileSize);
+        ASSERT_EQ(status, AMEDIA_OK);
+
+        mTrackCount = AMediaExtractor_getTrackCount(mExtractor);
+        for (size_t trackIndex = 0; trackIndex < mTrackCount; trackIndex++) {
+            AMediaExtractor_selectTrack(mExtractor, trackIndex);
+        }
+    }
+
+    void initExtractorTimestamps() {
+        // Save all sample timestamps, per track, as reported by the extractor.
+        mExtractorTimestamps.resize(mTrackCount);
+        do {
+            const int trackIndex = AMediaExtractor_getSampleTrackIndex(mExtractor);
+            const int64_t sampleTime = AMediaExtractor_getSampleTime(mExtractor);
+
+            mExtractorTimestamps[trackIndex].push_back(sampleTime);
+        } while (AMediaExtractor_advance(mExtractor));
+    }
+
+    void TearDown() override {
+        LOG(DEBUG) << "MediaSampleReaderNDKTests tear down";
+        AMediaExtractor_delete(mExtractor);
+        close(mSourceFd);
+    }
+
+    ~MediaSampleReaderNDKTests() { LOG(DEBUG) << "MediaSampleReaderNDKTests destroyed"; }
+
+    AMediaExtractor* mExtractor = nullptr;
+    size_t mTrackCount;
+    int mSourceFd;
+    size_t mFileSize;
+    std::vector<std::vector<int64_t>> mExtractorTimestamps;
+};
+
+TEST_F(MediaSampleReaderNDKTests, TestSampleTimes) {
+    LOG(DEBUG) << "TestSampleTimes Starts";
+
+    std::shared_ptr<MediaSampleReader> sampleReader =
+            MediaSampleReaderNDK::createFromFd(mSourceFd, 0, mFileSize);
+    ASSERT_TRUE(sampleReader);
+
+    MediaSampleInfo info;
+    int trackEosCount = 0;
+    std::vector<bool> trackReachedEos(mTrackCount, false);
+    std::vector<std::vector<int64_t>> readerTimestamps(mTrackCount);
+
+    // Initialize the extractor timestamps.
+    initExtractorTimestamps();
+
+    // Read 5s of each track at a time.
+    const int64_t chunkDurationUs = SEC_TO_USEC(5);
+    int64_t chunkEndTimeUs = chunkDurationUs;
+
+    // Loop until all tracks have reached End Of Stream.
+    while (trackEosCount < mTrackCount) {
+        for (int trackIndex = 0; trackIndex < mTrackCount; trackIndex++) {
+            if (trackReachedEos[trackIndex]) continue;
+
+            // Advance current track to next chunk end time.
+            do {
+                media_status_t status = sampleReader->getSampleInfoForTrack(trackIndex, &info);
+                if (status != AMEDIA_OK) {
+                    ASSERT_EQ(status, AMEDIA_ERROR_END_OF_STREAM);
+                    ASSERT_TRUE((info.flags & SAMPLE_FLAG_END_OF_STREAM) != 0);
+                    trackReachedEos[trackIndex] = true;
+                    trackEosCount++;
+                    break;
+                }
+                ASSERT_TRUE((info.flags & SAMPLE_FLAG_END_OF_STREAM) == 0);
+                readerTimestamps[trackIndex].push_back(info.presentationTimeUs);
+                sampleReader->advanceTrack(trackIndex);
+            } while (info.presentationTimeUs < chunkEndTimeUs);
+        }
+        chunkEndTimeUs += chunkDurationUs;
+    }
+
+    for (int trackIndex = 0; trackIndex < mTrackCount; trackIndex++) {
+        LOG(DEBUG) << "Track " << trackIndex << ", comparing "
+                   << readerTimestamps[trackIndex].size() << " samples.";
+        ASSERT_EQ(readerTimestamps[trackIndex].size(), mExtractorTimestamps[trackIndex].size());
+        for (size_t sampleIndex = 0; sampleIndex < readerTimestamps[trackIndex].size();
+             sampleIndex++) {
+            ASSERT_EQ(readerTimestamps[trackIndex][sampleIndex],
+                      mExtractorTimestamps[trackIndex][sampleIndex]);
+        }
+    }
+}
+
+TEST_F(MediaSampleReaderNDKTests, TestInvalidFd) {
+    std::shared_ptr<MediaSampleReader> sampleReader =
+            MediaSampleReaderNDK::createFromFd(0, 0, mFileSize);
+    ASSERT_TRUE(sampleReader == nullptr);
+
+    sampleReader = MediaSampleReaderNDK::createFromFd(-1, 0, mFileSize);
+    ASSERT_TRUE(sampleReader == nullptr);
+}
+
+TEST_F(MediaSampleReaderNDKTests, TestZeroSize) {
+    std::shared_ptr<MediaSampleReader> sampleReader =
+            MediaSampleReaderNDK::createFromFd(mSourceFd, 0, 0);
+    ASSERT_TRUE(sampleReader == nullptr);
+}
+
+TEST_F(MediaSampleReaderNDKTests, TestInvalidOffset) {
+    std::shared_ptr<MediaSampleReader> sampleReader =
+            MediaSampleReaderNDK::createFromFd(mSourceFd, mFileSize, mFileSize);
+    ASSERT_TRUE(sampleReader == nullptr);
+}
+
+}  // namespace android
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libmediatranscoding/transcoder/tests/MediaSampleWriterTests.cpp b/media/libmediatranscoding/transcoder/tests/MediaSampleWriterTests.cpp
new file mode 100644
index 0000000..c82ec28
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/MediaSampleWriterTests.cpp
@@ -0,0 +1,612 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaSampleWriter
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaSampleWriterTests"
+
+#include <android-base/logging.h>
+#include <fcntl.h>
+#include <gtest/gtest.h>
+#include <media/MediaSampleQueue.h>
+#include <media/MediaSampleWriter.h>
+#include <media/NdkMediaExtractor.h>
+
+#include <condition_variable>
+#include <list>
+#include <mutex>
+
+namespace android {
+
+/** Muxer interface to enable MediaSampleWriter testing. */
+class TestMuxer : public MediaSampleWriterMuxerInterface {
+public:
+    // MuxerInterface
+    ssize_t addTrack(AMediaFormat* trackFormat) override {
+        mEventQueue.push_back(AddTrack(trackFormat));
+        return mTrackCount++;
+    }
+    media_status_t start() override {
+        mEventQueue.push_back(Start());
+        return AMEDIA_OK;
+    }
+
+    media_status_t writeSampleData(size_t trackIndex, const uint8_t* data,
+                                   const AMediaCodecBufferInfo* info) override {
+        mEventQueue.push_back(WriteSample(trackIndex, data, info));
+        return AMEDIA_OK;
+    }
+    media_status_t stop() override {
+        mEventQueue.push_back(Stop());
+        return AMEDIA_OK;
+    }
+    // ~MuxerInterface
+
+    struct Event {
+        enum { NoEvent, AddTrack, Start, WriteSample, Stop } type = NoEvent;
+        const AMediaFormat* format = nullptr;
+        size_t trackIndex = 0;
+        const uint8_t* data = nullptr;
+        AMediaCodecBufferInfo info{};
+    };
+
+    static constexpr Event NoEvent = {Event::NoEvent, nullptr, 0, nullptr, {}};
+
+    static Event AddTrack(const AMediaFormat* format) {
+        return {.type = Event::AddTrack, .format = format};
+    }
+
+    static Event Start() { return {.type = Event::Start}; }
+    static Event Stop() { return {.type = Event::Stop}; }
+
+    static Event WriteSample(size_t trackIndex, const uint8_t* data,
+                             const AMediaCodecBufferInfo* info) {
+        return {.type = Event::WriteSample, .trackIndex = trackIndex, .data = data, .info = *info};
+    }
+
+    const Event& popEvent() {
+        if (mEventQueue.empty()) {
+            mPoppedEvent = NoEvent;
+        } else {
+            mPoppedEvent = *mEventQueue.begin();
+            mEventQueue.pop_front();
+        }
+        return mPoppedEvent;
+    }
+
+private:
+    Event mPoppedEvent;
+    std::list<Event> mEventQueue;
+    ssize_t mTrackCount = 0;
+};
+
+bool operator==(const AMediaCodecBufferInfo& lhs, const AMediaCodecBufferInfo& rhs) {
+    return lhs.offset == rhs.offset && lhs.size == rhs.size &&
+           lhs.presentationTimeUs == rhs.presentationTimeUs && lhs.flags == rhs.flags;
+}
+
+bool operator==(const TestMuxer::Event& lhs, const TestMuxer::Event& rhs) {
+    return lhs.type == rhs.type && lhs.format == rhs.format && lhs.trackIndex == rhs.trackIndex &&
+           lhs.data == rhs.data && lhs.info == rhs.info;
+}
+
+/** Represents a media source file. */
+class TestMediaSource {
+public:
+    void init() {
+        static const char* sourcePath =
+                "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+
+        mExtractor = AMediaExtractor_new();
+        ASSERT_NE(mExtractor, nullptr);
+
+        int sourceFd = open(sourcePath, O_RDONLY);
+        ASSERT_GT(sourceFd, 0);
+
+        off_t fileSize = lseek(sourceFd, 0, SEEK_END);
+        lseek(sourceFd, 0, SEEK_SET);
+
+        media_status_t status = AMediaExtractor_setDataSourceFd(mExtractor, sourceFd, 0, fileSize);
+        ASSERT_EQ(status, AMEDIA_OK);
+        close(sourceFd);
+
+        mTrackCount = AMediaExtractor_getTrackCount(mExtractor);
+        ASSERT_GT(mTrackCount, 1);
+        for (size_t trackIndex = 0; trackIndex < mTrackCount; trackIndex++) {
+            AMediaFormat* trackFormat = AMediaExtractor_getTrackFormat(mExtractor, trackIndex);
+            ASSERT_NE(trackFormat, nullptr);
+
+            const char* mime = nullptr;
+            AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+            if (strncmp(mime, "video/", 6) == 0) {
+                mVideoTrackIndex = trackIndex;
+            } else if (strncmp(mime, "audio/", 6) == 0) {
+                mAudioTrackIndex = trackIndex;
+            }
+
+            mTrackFormats.push_back(
+                    std::shared_ptr<AMediaFormat>(trackFormat, &AMediaFormat_delete));
+
+            AMediaExtractor_selectTrack(mExtractor, trackIndex);
+        }
+        EXPECT_GE(mVideoTrackIndex, 0);
+        EXPECT_GE(mAudioTrackIndex, 0);
+    }
+
+    void reset() const {
+        media_status_t status = AMediaExtractor_seekTo(mExtractor, 0 /* seekPosUs */,
+                                                       AMEDIAEXTRACTOR_SEEK_PREVIOUS_SYNC);
+        ASSERT_EQ(status, AMEDIA_OK);
+    }
+
+    AMediaExtractor* mExtractor = nullptr;
+    size_t mTrackCount = 0;
+    std::vector<std::shared_ptr<AMediaFormat>> mTrackFormats;
+    int mVideoTrackIndex = -1;
+    int mAudioTrackIndex = -1;
+};
+
+class TestCallbacks : public MediaSampleWriter::CallbackInterface {
+public:
+    TestCallbacks(bool expectSuccess = true) : mExpectSuccess(expectSuccess) {}
+
+    bool hasFinished() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        return mFinished;
+    }
+
+    // MediaSampleWriter::CallbackInterface
+    virtual void onFinished(const MediaSampleWriter* writer __unused,
+                            media_status_t status) override {
+        std::unique_lock<std::mutex> lock(mMutex);
+        EXPECT_FALSE(mFinished);
+        if (mExpectSuccess) {
+            EXPECT_EQ(status, AMEDIA_OK);
+        } else {
+            EXPECT_NE(status, AMEDIA_OK);
+        }
+        mFinished = true;
+        mCondition.notify_all();
+    }
+
+    virtual void onProgressUpdate(const MediaSampleWriter* writer __unused,
+                                  int32_t progress) override {
+        EXPECT_GT(progress, mLastProgress);
+        EXPECT_GE(progress, 0);
+        EXPECT_LE(progress, 100);
+
+        mLastProgress = progress;
+        mProgressUpdateCount++;
+    }
+    // ~MediaSampleWriter::CallbackInterface
+
+    void waitForWritingFinished() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        while (!mFinished) {
+            mCondition.wait(lock);
+        }
+    }
+
+    uint32_t getProgressUpdateCount() const { return mProgressUpdateCount; }
+
+private:
+    std::mutex mMutex;
+    std::condition_variable mCondition;
+    bool mFinished = false;
+    bool mExpectSuccess;
+    int32_t mLastProgress = -1;
+    uint32_t mProgressUpdateCount = 0;
+};
+
+class MediaSampleWriterTests : public ::testing::Test {
+public:
+    MediaSampleWriterTests() { LOG(DEBUG) << "MediaSampleWriterTests created"; }
+    ~MediaSampleWriterTests() { LOG(DEBUG) << "MediaSampleWriterTests destroyed"; }
+
+    static const TestMediaSource& getMediaSource() {
+        static TestMediaSource sMediaSource;
+        static std::once_flag sOnceToken;
+
+        std::call_once(sOnceToken, [] { sMediaSource.init(); });
+
+        sMediaSource.reset();
+        return sMediaSource;
+    }
+
+    static std::shared_ptr<MediaSample> newSample(int64_t ptsUs, uint32_t flags, size_t size,
+                                                  size_t offset, const uint8_t* buffer) {
+        auto sample = std::make_shared<MediaSample>();
+        sample->info.presentationTimeUs = ptsUs;
+        sample->info.flags = flags;
+        sample->info.size = size;
+        sample->dataOffset = offset;
+        sample->buffer = buffer;
+        return sample;
+    }
+
+    static std::shared_ptr<MediaSample> newSampleEos() {
+        return newSample(0, SAMPLE_FLAG_END_OF_STREAM, 0, 0, nullptr);
+    }
+
+    static std::shared_ptr<MediaSample> newSampleWithPts(int64_t ptsUs) {
+        static uint32_t sampleCount = 0;
+
+        // Use sampleCount to get a unique dummy sample.
+        uint32_t sampleId = ++sampleCount;
+        return newSample(ptsUs, 0, sampleId, sampleId, reinterpret_cast<const uint8_t*>(sampleId));
+    }
+
+    void SetUp() override {
+        LOG(DEBUG) << "MediaSampleWriterTests set up";
+        mTestMuxer = std::make_shared<TestMuxer>();
+        mSampleQueue = std::make_shared<MediaSampleQueue>();
+    }
+
+    void TearDown() override {
+        LOG(DEBUG) << "MediaSampleWriterTests tear down";
+        mTestMuxer.reset();
+        mSampleQueue.reset();
+    }
+
+protected:
+    std::shared_ptr<TestMuxer> mTestMuxer;
+    std::shared_ptr<MediaSampleQueue> mSampleQueue;
+    std::shared_ptr<TestCallbacks> mTestCallbacks = std::make_shared<TestCallbacks>();
+};
+
+TEST_F(MediaSampleWriterTests, TestAddTrackWithoutInit) {
+    const TestMediaSource& mediaSource = getMediaSource();
+
+    MediaSampleWriter writer{};
+    EXPECT_FALSE(writer.addTrack(mSampleQueue, mediaSource.mTrackFormats[0]));
+}
+
+TEST_F(MediaSampleWriterTests, TestStartWithoutInit) {
+    MediaSampleWriter writer{};
+    EXPECT_FALSE(writer.start());
+}
+
+TEST_F(MediaSampleWriterTests, TestStartWithoutTracks) {
+    MediaSampleWriter writer{};
+    EXPECT_TRUE(writer.init(mTestMuxer, mTestCallbacks));
+    EXPECT_FALSE(writer.start());
+    EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::NoEvent);
+}
+
+TEST_F(MediaSampleWriterTests, TestAddInvalidTrack) {
+    MediaSampleWriter writer{};
+    EXPECT_TRUE(writer.init(mTestMuxer, mTestCallbacks));
+
+    EXPECT_FALSE(writer.addTrack(mSampleQueue, nullptr));
+    EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::NoEvent);
+
+    const TestMediaSource& mediaSource = getMediaSource();
+    EXPECT_FALSE(writer.addTrack(nullptr, mediaSource.mTrackFormats[0]));
+    EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::NoEvent);
+}
+
+TEST_F(MediaSampleWriterTests, TestDoubleStartStop) {
+    MediaSampleWriter writer{};
+
+    std::shared_ptr<TestCallbacks> callbacks =
+            std::make_shared<TestCallbacks>(false /* expectSuccess */);
+    EXPECT_TRUE(writer.init(mTestMuxer, callbacks));
+
+    const TestMediaSource& mediaSource = getMediaSource();
+    EXPECT_TRUE(writer.addTrack(mSampleQueue, mediaSource.mTrackFormats[0]));
+    EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::AddTrack(mediaSource.mTrackFormats[0].get()));
+
+    ASSERT_TRUE(writer.start());
+    EXPECT_FALSE(writer.start());
+
+    EXPECT_TRUE(writer.stop());
+    EXPECT_TRUE(callbacks->hasFinished());
+    EXPECT_FALSE(writer.stop());
+}
+
+TEST_F(MediaSampleWriterTests, TestStopWithoutStart) {
+    MediaSampleWriter writer{};
+    EXPECT_TRUE(writer.init(mTestMuxer, mTestCallbacks));
+
+    const TestMediaSource& mediaSource = getMediaSource();
+    EXPECT_TRUE(writer.addTrack(mSampleQueue, mediaSource.mTrackFormats[0]));
+    EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::AddTrack(mediaSource.mTrackFormats[0].get()));
+
+    EXPECT_FALSE(writer.stop());
+    EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::NoEvent);
+}
+
+TEST_F(MediaSampleWriterTests, TestStartWithoutCallback) {
+    MediaSampleWriter writer{};
+
+    std::weak_ptr<MediaSampleWriter::CallbackInterface> unassignedWp;
+    EXPECT_FALSE(writer.init(mTestMuxer, unassignedWp));
+
+    std::shared_ptr<MediaSampleWriter::CallbackInterface> unassignedSp;
+    EXPECT_FALSE(writer.init(mTestMuxer, unassignedSp));
+
+    const TestMediaSource& mediaSource = getMediaSource();
+    EXPECT_FALSE(writer.addTrack(mSampleQueue, mediaSource.mTrackFormats[0]));
+    ASSERT_FALSE(writer.start());
+}
+
+TEST_F(MediaSampleWriterTests, TestProgressUpdate) {
+    static constexpr uint32_t kSegmentLengthUs = 1;
+    const TestMediaSource& mediaSource = getMediaSource();
+
+    MediaSampleWriter writer{kSegmentLengthUs};
+    EXPECT_TRUE(writer.init(mTestMuxer, mTestCallbacks));
+
+    std::shared_ptr<AMediaFormat> videoFormat =
+            std::shared_ptr<AMediaFormat>(AMediaFormat_new(), &AMediaFormat_delete);
+    AMediaFormat_copy(videoFormat.get(),
+                      mediaSource.mTrackFormats[mediaSource.mVideoTrackIndex].get());
+
+    AMediaFormat_setInt64(videoFormat.get(), AMEDIAFORMAT_KEY_DURATION, 100);
+    EXPECT_TRUE(writer.addTrack(mSampleQueue, videoFormat));
+    ASSERT_TRUE(writer.start());
+
+    for (int64_t pts = 0; pts < 100; ++pts) {
+        mSampleQueue->enqueue(newSampleWithPts(pts));
+    }
+    mSampleQueue->enqueue(newSampleEos());
+    mTestCallbacks->waitForWritingFinished();
+
+    EXPECT_EQ(mTestCallbacks->getProgressUpdateCount(), 100);
+}
+
+TEST_F(MediaSampleWriterTests, TestInterleaving) {
+    static constexpr uint32_t kSegmentLength = MediaSampleWriter::kDefaultTrackSegmentLengthUs;
+
+    MediaSampleWriter writer{kSegmentLength};
+    EXPECT_TRUE(writer.init(mTestMuxer, mTestCallbacks));
+
+    // Use two tracks for this test.
+    static constexpr int kNumTracks = 2;
+    std::shared_ptr<MediaSampleQueue> sampleQueues[kNumTracks];
+    std::vector<std::pair<std::shared_ptr<MediaSample>, size_t>> interleavedSamples;
+    const TestMediaSource& mediaSource = getMediaSource();
+
+    for (int trackIdx = 0; trackIdx < kNumTracks; ++trackIdx) {
+        sampleQueues[trackIdx] = std::make_shared<MediaSampleQueue>();
+
+        auto trackFormat = mediaSource.mTrackFormats[trackIdx % mediaSource.mTrackCount];
+        EXPECT_TRUE(writer.addTrack(sampleQueues[trackIdx], trackFormat));
+        EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::AddTrack(trackFormat.get()));
+    }
+
+    // Create samples in the expected interleaved order for easy verification.
+    auto addSampleToTrackWithPts = [&interleavedSamples, &sampleQueues](int trackIndex,
+                                                                        int64_t pts) {
+        auto sample = newSampleWithPts(pts);
+        sampleQueues[trackIndex]->enqueue(sample);
+        interleavedSamples.emplace_back(sample, trackIndex);
+    };
+
+    addSampleToTrackWithPts(0, 0);
+    addSampleToTrackWithPts(0, kSegmentLength / 2);
+    addSampleToTrackWithPts(0, kSegmentLength);  // Track 0 reached 1st segment end
+
+    addSampleToTrackWithPts(1, 0);
+    addSampleToTrackWithPts(1, kSegmentLength);  // Track 1 reached 1st segment end
+
+    addSampleToTrackWithPts(0, kSegmentLength * 2);  // Track 0 reached 2nd segment end
+
+    addSampleToTrackWithPts(1, kSegmentLength + 1);
+    addSampleToTrackWithPts(1, kSegmentLength * 2);  // Track 1 reached 2nd segment end
+
+    addSampleToTrackWithPts(0, kSegmentLength * 2 + 1);
+
+    for (int trackIndex = 0; trackIndex < kNumTracks; ++trackIndex) {
+        sampleQueues[trackIndex]->enqueue(newSampleEos());
+    }
+
+    // Start the writer.
+    ASSERT_TRUE(writer.start());
+
+    // Wait for writer to complete.
+    mTestCallbacks->waitForWritingFinished();
+    EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::Start());
+
+    // Verify sample order.
+    for (auto entry : interleavedSamples) {
+        auto sample = entry.first;
+        auto trackIndex = entry.second;
+
+        const TestMuxer::Event& event = mTestMuxer->popEvent();
+        EXPECT_EQ(event.type, TestMuxer::Event::WriteSample);
+        EXPECT_EQ(event.trackIndex, trackIndex);
+        EXPECT_EQ(event.data, sample->buffer);
+        EXPECT_EQ(event.info.offset, sample->dataOffset);
+        EXPECT_EQ(event.info.size, sample->info.size);
+        EXPECT_EQ(event.info.presentationTimeUs, sample->info.presentationTimeUs);
+        EXPECT_EQ(event.info.flags, sample->info.flags);
+    }
+
+    // Verify EOS samples.
+    for (int trackIndex = 0; trackIndex < kNumTracks; ++trackIndex) {
+        auto trackFormat = mediaSource.mTrackFormats[trackIndex % mediaSource.mTrackCount];
+        int64_t duration = 0;
+        AMediaFormat_getInt64(trackFormat.get(), AMEDIAFORMAT_KEY_DURATION, &duration);
+
+        const AMediaCodecBufferInfo info = {0, 0, duration, AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM};
+        EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::WriteSample(trackIndex, nullptr, &info));
+    }
+
+    EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::Stop());
+    EXPECT_TRUE(writer.stop());
+    EXPECT_TRUE(mTestCallbacks->hasFinished());
+}
+
+TEST_F(MediaSampleWriterTests, TestAbortInputQueue) {
+    MediaSampleWriter writer{};
+    std::shared_ptr<TestCallbacks> callbacks =
+            std::make_shared<TestCallbacks>(false /* expectSuccess */);
+    EXPECT_TRUE(writer.init(mTestMuxer, callbacks));
+
+    // Use two tracks for this test.
+    static constexpr int kNumTracks = 2;
+    std::shared_ptr<MediaSampleQueue> sampleQueues[kNumTracks];
+    const TestMediaSource& mediaSource = getMediaSource();
+
+    for (int trackIdx = 0; trackIdx < kNumTracks; ++trackIdx) {
+        sampleQueues[trackIdx] = std::make_shared<MediaSampleQueue>();
+
+        auto trackFormat = mediaSource.mTrackFormats[trackIdx % mediaSource.mTrackCount];
+        EXPECT_TRUE(writer.addTrack(sampleQueues[trackIdx], trackFormat));
+        EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::AddTrack(trackFormat.get()));
+    }
+
+    // Start the writer.
+    ASSERT_TRUE(writer.start());
+
+    // Abort the input queues and wait for the writer to complete.
+    for (int trackIdx = 0; trackIdx < kNumTracks; ++trackIdx) {
+        sampleQueues[trackIdx]->abort();
+    }
+
+    callbacks->waitForWritingFinished();
+
+    EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::Start());
+    EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::Stop());
+    EXPECT_TRUE(writer.stop());
+}
+
+// Convenience function for reading a sample from an AMediaExtractor represented as a MediaSample.
+static std::shared_ptr<MediaSample> readSampleAndAdvance(AMediaExtractor* extractor,
+                                                         size_t* trackIndexOut) {
+    int trackIndex = AMediaExtractor_getSampleTrackIndex(extractor);
+    if (trackIndex < 0) {
+        return nullptr;
+    }
+
+    if (trackIndexOut != nullptr) {
+        *trackIndexOut = trackIndex;
+    }
+
+    ssize_t sampleSize = AMediaExtractor_getSampleSize(extractor);
+    int64_t sampleTimeUs = AMediaExtractor_getSampleTime(extractor);
+    uint32_t flags = AMediaExtractor_getSampleFlags(extractor);
+
+    size_t bufferSize = static_cast<size_t>(sampleSize);
+    uint8_t* buffer = new uint8_t[bufferSize];
+
+    ssize_t dataRead = AMediaExtractor_readSampleData(extractor, buffer, bufferSize);
+    EXPECT_EQ(dataRead, sampleSize);
+
+    auto sample = MediaSample::createWithReleaseCallback(
+            buffer, 0 /* offset */, 0 /* id */, [buffer](MediaSample*) { delete[] buffer; });
+    sample->info.size = bufferSize;
+    sample->info.presentationTimeUs = sampleTimeUs;
+    sample->info.flags = flags;
+
+    (void)AMediaExtractor_advance(extractor);
+    return sample;
+}
+
+TEST_F(MediaSampleWriterTests, TestDefaultMuxer) {
+    // Write samples straight from an extractor and validate output file.
+    static const char* destinationPath =
+            "/data/local/tmp/MediaSampleWriterTests_TestDefaultMuxer_output.MP4";
+    const int destinationFd =
+            open(destinationPath, O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR | S_IROTH);
+    ASSERT_GT(destinationFd, 0);
+
+    // Initialize writer.
+    MediaSampleWriter writer{};
+    EXPECT_TRUE(writer.init(destinationFd, mTestCallbacks));
+    close(destinationFd);
+
+    // Add tracks.
+    const TestMediaSource& mediaSource = getMediaSource();
+    std::vector<std::shared_ptr<MediaSampleQueue>> inputQueues;
+
+    for (size_t trackIndex = 0; trackIndex < mediaSource.mTrackCount; trackIndex++) {
+        inputQueues.push_back(std::make_shared<MediaSampleQueue>());
+        EXPECT_TRUE(
+                writer.addTrack(inputQueues[trackIndex], mediaSource.mTrackFormats[trackIndex]));
+    }
+
+    // Start the writer.
+    ASSERT_TRUE(writer.start());
+
+    // Enqueue samples and finally End Of Stream.
+    std::shared_ptr<MediaSample> sample;
+    size_t trackIndex;
+    while ((sample = readSampleAndAdvance(mediaSource.mExtractor, &trackIndex)) != nullptr) {
+        inputQueues[trackIndex]->enqueue(sample);
+    }
+    for (trackIndex = 0; trackIndex < mediaSource.mTrackCount; trackIndex++) {
+        inputQueues[trackIndex]->enqueue(newSampleEos());
+    }
+
+    // Wait for writer.
+    mTestCallbacks->waitForWritingFinished();
+    EXPECT_TRUE(writer.stop());
+
+    // Compare output file with source.
+    mediaSource.reset();
+
+    AMediaExtractor* extractor = AMediaExtractor_new();
+    ASSERT_NE(extractor, nullptr);
+
+    int sourceFd = open(destinationPath, O_RDONLY);
+    ASSERT_GT(sourceFd, 0);
+
+    off_t fileSize = lseek(sourceFd, 0, SEEK_END);
+    lseek(sourceFd, 0, SEEK_SET);
+
+    media_status_t status = AMediaExtractor_setDataSourceFd(extractor, sourceFd, 0, fileSize);
+    ASSERT_EQ(status, AMEDIA_OK);
+    close(sourceFd);
+
+    size_t trackCount = AMediaExtractor_getTrackCount(extractor);
+    EXPECT_EQ(trackCount, mediaSource.mTrackCount);
+
+    for (size_t trackIndex = 0; trackIndex < trackCount; trackIndex++) {
+        AMediaFormat* trackFormat = AMediaExtractor_getTrackFormat(extractor, trackIndex);
+        ASSERT_NE(trackFormat, nullptr);
+
+        AMediaExtractor_selectTrack(extractor, trackIndex);
+    }
+
+    // Compare samples.
+    std::shared_ptr<MediaSample> sample1 = readSampleAndAdvance(mediaSource.mExtractor, nullptr);
+    std::shared_ptr<MediaSample> sample2 = readSampleAndAdvance(extractor, nullptr);
+
+    while (sample1 != nullptr && sample2 != nullptr) {
+        EXPECT_EQ(sample1->info.presentationTimeUs, sample2->info.presentationTimeUs);
+        EXPECT_EQ(sample1->info.size, sample2->info.size);
+        EXPECT_EQ(sample1->info.flags, sample2->info.flags);
+
+        EXPECT_EQ(memcmp(sample1->buffer, sample2->buffer, sample1->info.size), 0);
+
+        sample1 = readSampleAndAdvance(mediaSource.mExtractor, nullptr);
+        sample2 = readSampleAndAdvance(extractor, nullptr);
+    }
+    EXPECT_EQ(sample1, nullptr);
+    EXPECT_EQ(sample2, nullptr);
+
+    AMediaExtractor_delete(extractor);
+}
+
+}  // namespace android
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libmediatranscoding/transcoder/tests/MediaTrackTranscoderTests.cpp b/media/libmediatranscoding/transcoder/tests/MediaTrackTranscoderTests.cpp
new file mode 100644
index 0000000..502d5aa
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/MediaTrackTranscoderTests.cpp
@@ -0,0 +1,317 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaTrackTranscoder
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaTrackTranscoderTests"
+
+#include <android-base/logging.h>
+#include <android/binder_process.h>
+#include <fcntl.h>
+#include <gtest/gtest.h>
+#include <media/MediaSampleReaderNDK.h>
+#include <media/MediaTrackTranscoder.h>
+#include <media/PassthroughTrackTranscoder.h>
+#include <media/VideoTrackTranscoder.h>
+
+#include "TrackTranscoderTestUtils.h"
+
+namespace android {
+
+/** TrackTranscoder types to test. */
+enum TrackTranscoderType {
+    VIDEO,
+    PASSTHROUGH,
+};
+
+class MediaTrackTranscoderTests : public ::testing::TestWithParam<TrackTranscoderType> {
+public:
+    MediaTrackTranscoderTests() { LOG(DEBUG) << "MediaTrackTranscoderTests created"; }
+
+    void SetUp() override {
+        LOG(DEBUG) << "MediaTrackTranscoderTests set up";
+
+        // Need to start a thread pool to prevent AMediaExtractor binder calls from starving
+        // (b/155663561).
+        ABinderProcess_startThreadPool();
+
+        mCallback = std::make_shared<TestCallback>();
+
+        switch (GetParam()) {
+        case VIDEO:
+            mTranscoder = VideoTrackTranscoder::create(mCallback);
+            break;
+        case PASSTHROUGH:
+            mTranscoder = std::make_shared<PassthroughTrackTranscoder>(mCallback);
+            break;
+        }
+        ASSERT_NE(mTranscoder, nullptr);
+        mTranscoderOutputQueue = mTranscoder->getOutputQueue();
+
+        initSampleReader();
+    }
+
+    void initSampleReader() {
+        const char* sourcePath =
+                "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+
+        const int sourceFd = open(sourcePath, O_RDONLY);
+        ASSERT_GT(sourceFd, 0);
+
+        const size_t fileSize = lseek(sourceFd, 0, SEEK_END);
+        lseek(sourceFd, 0, SEEK_SET);
+
+        mMediaSampleReader = MediaSampleReaderNDK::createFromFd(sourceFd, 0 /* offset */, fileSize);
+        ASSERT_NE(mMediaSampleReader, nullptr);
+        close(sourceFd);
+
+        for (size_t trackIndex = 0; trackIndex < mMediaSampleReader->getTrackCount();
+             ++trackIndex) {
+            AMediaFormat* trackFormat = mMediaSampleReader->getTrackFormat(trackIndex);
+            ASSERT_NE(trackFormat, nullptr);
+
+            const char* mime = nullptr;
+            AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+            ASSERT_NE(mime, nullptr);
+
+            if (GetParam() == VIDEO && strncmp(mime, "video/", 6) == 0) {
+                mTrackIndex = trackIndex;
+
+                mSourceFormat = std::shared_ptr<AMediaFormat>(
+                        trackFormat, std::bind(AMediaFormat_delete, std::placeholders::_1));
+                ASSERT_NE(mSourceFormat, nullptr);
+
+                mDestinationFormat =
+                        TrackTranscoderTestUtils::getDefaultVideoDestinationFormat(trackFormat);
+                ASSERT_NE(mDestinationFormat, nullptr);
+                break;
+            } else if (GetParam() == PASSTHROUGH && strncmp(mime, "audio/", 6) == 0) {
+                // TODO(lnilsson): Test metadata track passthrough after hkuang@ provides sample.
+                mTrackIndex = trackIndex;
+
+                mSourceFormat = std::shared_ptr<AMediaFormat>(
+                        trackFormat, std::bind(AMediaFormat_delete, std::placeholders::_1));
+                ASSERT_NE(mSourceFormat, nullptr);
+                break;
+            }
+
+            AMediaFormat_delete(trackFormat);
+        }
+
+        ASSERT_NE(mSourceFormat, nullptr);
+    }
+
+    // Drains the transcoder's output queue in a loop.
+    void drainOutputSampleQueue() {
+        mSampleQueueDrainThread = std::thread{[this] {
+            std::shared_ptr<MediaSample> sample;
+            bool aborted = false;
+            do {
+                aborted = mTranscoderOutputQueue->dequeue(&sample);
+            } while (!aborted && !(sample->info.flags & SAMPLE_FLAG_END_OF_STREAM));
+            mQueueWasAborted = aborted;
+            mGotEndOfStream =
+                    sample != nullptr && (sample->info.flags & SAMPLE_FLAG_END_OF_STREAM) != 0;
+        }};
+    }
+
+    void joinDrainThread() {
+        if (mSampleQueueDrainThread.joinable()) {
+            mSampleQueueDrainThread.join();
+        }
+    }
+    void TearDown() override {
+        LOG(DEBUG) << "MediaTrackTranscoderTests tear down";
+        joinDrainThread();
+    }
+
+    ~MediaTrackTranscoderTests() { LOG(DEBUG) << "MediaTrackTranscoderTests destroyed"; }
+
+protected:
+    std::shared_ptr<MediaTrackTranscoder> mTranscoder;
+    std::shared_ptr<MediaSampleQueue> mTranscoderOutputQueue;
+    std::shared_ptr<TestCallback> mCallback;
+
+    std::shared_ptr<MediaSampleReader> mMediaSampleReader;
+    int mTrackIndex;
+
+    std::shared_ptr<AMediaFormat> mSourceFormat;
+    std::shared_ptr<AMediaFormat> mDestinationFormat;
+
+    std::thread mSampleQueueDrainThread;
+    bool mQueueWasAborted = false;
+    bool mGotEndOfStream = false;
+};
+
+TEST_P(MediaTrackTranscoderTests, WaitNormalOperation) {
+    LOG(DEBUG) << "Testing WaitNormalOperation";
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    ASSERT_TRUE(mTranscoder->start());
+    drainOutputSampleQueue();
+    EXPECT_EQ(mCallback->waitUntilFinished(), AMEDIA_OK);
+    joinDrainThread();
+    EXPECT_TRUE(mTranscoder->stop());
+    EXPECT_FALSE(mQueueWasAborted);
+    EXPECT_TRUE(mGotEndOfStream);
+}
+
+TEST_P(MediaTrackTranscoderTests, StopNormalOperation) {
+    LOG(DEBUG) << "Testing StopNormalOperation";
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    EXPECT_TRUE(mTranscoder->start());
+    EXPECT_TRUE(mTranscoder->stop());
+}
+
+TEST_P(MediaTrackTranscoderTests, StartWithoutConfigure) {
+    LOG(DEBUG) << "Testing StartWithoutConfigure";
+    EXPECT_FALSE(mTranscoder->start());
+}
+
+TEST_P(MediaTrackTranscoderTests, StopWithoutStart) {
+    LOG(DEBUG) << "Testing StopWithoutStart";
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    EXPECT_FALSE(mTranscoder->stop());
+}
+
+TEST_P(MediaTrackTranscoderTests, DoubleStartStop) {
+    LOG(DEBUG) << "Testing DoubleStartStop";
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    EXPECT_TRUE(mTranscoder->start());
+    EXPECT_FALSE(mTranscoder->start());
+    EXPECT_TRUE(mTranscoder->stop());
+    EXPECT_FALSE(mTranscoder->stop());
+}
+
+TEST_P(MediaTrackTranscoderTests, DoubleConfigure) {
+    LOG(DEBUG) << "Testing DoubleConfigure";
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_ERROR_UNSUPPORTED);
+}
+
+TEST_P(MediaTrackTranscoderTests, ConfigureAfterFail) {
+    LOG(DEBUG) << "Testing ConfigureAfterFail";
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, -1, mDestinationFormat),
+              AMEDIA_ERROR_INVALID_PARAMETER);
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+}
+
+TEST_P(MediaTrackTranscoderTests, RestartAfterStop) {
+    LOG(DEBUG) << "Testing RestartAfterStop";
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    EXPECT_TRUE(mTranscoder->start());
+    EXPECT_TRUE(mTranscoder->stop());
+    EXPECT_FALSE(mTranscoder->start());
+}
+
+TEST_P(MediaTrackTranscoderTests, RestartAfterFinish) {
+    LOG(DEBUG) << "Testing RestartAfterFinish";
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    ASSERT_TRUE(mTranscoder->start());
+    drainOutputSampleQueue();
+    EXPECT_EQ(mCallback->waitUntilFinished(), AMEDIA_OK);
+    joinDrainThread();
+    EXPECT_TRUE(mTranscoder->stop());
+    EXPECT_FALSE(mTranscoder->start());
+    EXPECT_FALSE(mQueueWasAborted);
+    EXPECT_TRUE(mGotEndOfStream);
+}
+
+TEST_P(MediaTrackTranscoderTests, AbortOutputQueue) {
+    LOG(DEBUG) << "Testing AbortOutputQueue";
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    ASSERT_TRUE(mTranscoder->start());
+    mTranscoderOutputQueue->abort();
+    drainOutputSampleQueue();
+    EXPECT_EQ(mCallback->waitUntilFinished(), AMEDIA_ERROR_IO);
+    joinDrainThread();
+    EXPECT_TRUE(mTranscoder->stop());
+    EXPECT_TRUE(mQueueWasAborted);
+    EXPECT_FALSE(mGotEndOfStream);
+}
+
+TEST_P(MediaTrackTranscoderTests, HoldSampleAfterTranscoderRelease) {
+    LOG(DEBUG) << "Testing HoldSampleAfterTranscoderRelease";
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    ASSERT_TRUE(mTranscoder->start());
+
+    std::shared_ptr<MediaSample> sample;
+    EXPECT_FALSE(mTranscoderOutputQueue->dequeue(&sample));
+
+    drainOutputSampleQueue();
+    EXPECT_EQ(mCallback->waitUntilFinished(), AMEDIA_OK);
+    joinDrainThread();
+    EXPECT_TRUE(mTranscoder->stop());
+    EXPECT_FALSE(mQueueWasAborted);
+    EXPECT_TRUE(mGotEndOfStream);
+
+    mTranscoder.reset();
+    mTranscoderOutputQueue.reset();
+    std::this_thread::sleep_for(std::chrono::milliseconds(20));
+    sample.reset();
+}
+
+TEST_P(MediaTrackTranscoderTests, HoldSampleAfterTranscoderStop) {
+    LOG(DEBUG) << "Testing HoldSampleAfterTranscoderStop";
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    ASSERT_TRUE(mTranscoder->start());
+
+    std::shared_ptr<MediaSample> sample;
+    EXPECT_FALSE(mTranscoderOutputQueue->dequeue(&sample));
+    EXPECT_TRUE(mTranscoder->stop());
+
+    std::this_thread::sleep_for(std::chrono::milliseconds(20));
+    sample.reset();
+}
+
+TEST_P(MediaTrackTranscoderTests, NullSampleReader) {
+    LOG(DEBUG) << "Testing NullSampleReader";
+    std::shared_ptr<MediaSampleReader> nullSampleReader;
+    EXPECT_NE(mTranscoder->configure(nullSampleReader, mTrackIndex, mDestinationFormat), AMEDIA_OK);
+    ASSERT_FALSE(mTranscoder->start());
+}
+
+TEST_P(MediaTrackTranscoderTests, InvalidTrackIndex) {
+    LOG(DEBUG) << "Testing InvalidTrackIndex";
+    EXPECT_NE(mTranscoder->configure(mMediaSampleReader, -1, mDestinationFormat), AMEDIA_OK);
+    EXPECT_NE(mTranscoder->configure(mMediaSampleReader, mMediaSampleReader->getTrackCount(),
+                                     mDestinationFormat),
+              AMEDIA_OK);
+}
+
+};  // namespace android
+
+using namespace android;
+
+INSTANTIATE_TEST_SUITE_P(MediaTrackTranscoderTestsAll, MediaTrackTranscoderTests,
+                         ::testing::Values(VIDEO, PASSTHROUGH));
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libmediatranscoding/transcoder/tests/MediaTranscoderTests.cpp b/media/libmediatranscoding/transcoder/tests/MediaTranscoderTests.cpp
new file mode 100644
index 0000000..e68eaac
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/MediaTranscoderTests.cpp
@@ -0,0 +1,284 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaTranscoder
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaTranscoderTests"
+
+#include <android-base/logging.h>
+#include <fcntl.h>
+#include <gtest/gtest.h>
+#include <media/MediaSampleReaderNDK.h>
+#include <media/MediaTranscoder.h>
+#include <media/NdkCommon.h>
+
+namespace android {
+
+#define DEFINE_FORMAT_VALUE_EQUAL_FUNC(_type, _typeName)                                  \
+    static bool equal##_typeName(const char* key, AMediaFormat* src, AMediaFormat* dst) { \
+        _type srcVal, dstVal;                                                             \
+        bool srcPresent = AMediaFormat_get##_typeName(src, key, &srcVal);                 \
+        bool dstPresent = AMediaFormat_get##_typeName(dst, key, &dstVal);                 \
+        return (srcPresent == dstPresent) && (!srcPresent || (srcVal == dstVal));         \
+    }
+
+DEFINE_FORMAT_VALUE_EQUAL_FUNC(int64_t, Int64);
+DEFINE_FORMAT_VALUE_EQUAL_FUNC(int32_t, Int32);
+
+struct FormatVerifierEntry {
+    const char* key;
+    std::function<bool(const char*, AMediaFormat*, AMediaFormat*)> equal;
+};
+
+static const FormatVerifierEntry kFieldsToPreserve[] = {
+        {AMEDIAFORMAT_KEY_DURATION, equalInt64},       {AMEDIAFORMAT_KEY_WIDTH, equalInt32},
+        {AMEDIAFORMAT_KEY_HEIGHT, equalInt32},         {AMEDIAFORMAT_KEY_FRAME_RATE, equalInt32},
+        {AMEDIAFORMAT_KEY_FRAME_COUNT, equalInt32},    {AMEDIAFORMAT_KEY_DISPLAY_WIDTH, equalInt32},
+        {AMEDIAFORMAT_KEY_DISPLAY_HEIGHT, equalInt32}, {AMEDIAFORMAT_KEY_SAR_WIDTH, equalInt32},
+        {AMEDIAFORMAT_KEY_SAR_HEIGHT, equalInt32},     {AMEDIAFORMAT_KEY_ROTATION, equalInt32},
+};
+
+class TestCallbacks : public MediaTranscoder::CallbackInterface {
+public:
+    virtual void onFinished(const MediaTranscoder* transcoder __unused) override {
+        std::unique_lock<std::mutex> lock(mMutex);
+        EXPECT_FALSE(mFinished);
+        mFinished = true;
+        mCondition.notify_all();
+    }
+
+    virtual void onError(const MediaTranscoder* transcoder __unused,
+                         media_status_t error) override {
+        std::unique_lock<std::mutex> lock(mMutex);
+        EXPECT_NE(error, AMEDIA_OK);
+        EXPECT_FALSE(mFinished);
+        mFinished = true;
+        mStatus = error;
+        mCondition.notify_all();
+    }
+
+    virtual void onProgressUpdate(const MediaTranscoder* transcoder __unused,
+                                  int32_t progress __unused) override {}
+
+    virtual void onCodecResourceLost(const MediaTranscoder* transcoder __unused,
+                                     const std::shared_ptr<const Parcel>& pausedState
+                                             __unused) override {}
+
+    void waitForTranscodingFinished() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        while (!mFinished) {
+            mCondition.wait(lock);
+        }
+    }
+
+    media_status_t mStatus = AMEDIA_OK;
+
+private:
+    std::mutex mMutex;
+    std::condition_variable mCondition;
+    bool mFinished = false;
+};
+
+// Write-only, create file if non-existent, don't overwrite existing file.
+static constexpr int kOpenFlags = O_WRONLY | O_CREAT | O_EXCL;
+// User R+W permission.
+static constexpr int kFileMode = S_IRUSR | S_IWUSR;
+
+class MediaTranscoderTests : public ::testing::Test {
+public:
+    MediaTranscoderTests() { LOG(DEBUG) << "MediaTranscoderTests created"; }
+    ~MediaTranscoderTests() { LOG(DEBUG) << "MediaTranscoderTests destroyed"; }
+
+    void SetUp() override {
+        LOG(DEBUG) << "MediaTranscoderTests set up";
+        mCallbacks = std::make_shared<TestCallbacks>();
+    }
+
+    void TearDown() override {
+        LOG(DEBUG) << "MediaTranscoderTests tear down";
+        mCallbacks.reset();
+    }
+
+    void deleteFile(const char* path) { unlink(path); }
+
+    using FormatConfigurationCallback = std::function<AMediaFormat*(AMediaFormat*)>;
+    media_status_t transcodeHelper(const char* srcPath, const char* destPath,
+                                   FormatConfigurationCallback formatCallback) {
+        auto transcoder = MediaTranscoder::create(mCallbacks, nullptr);
+        EXPECT_NE(transcoder, nullptr);
+
+        const int srcFd = open(srcPath, O_RDONLY);
+        EXPECT_EQ(transcoder->configureSource(srcFd), AMEDIA_OK);
+
+        std::vector<std::shared_ptr<AMediaFormat>> trackFormats = transcoder->getTrackFormats();
+        EXPECT_GT(trackFormats.size(), 0);
+
+        for (int i = 0; i < trackFormats.size(); ++i) {
+            AMediaFormat* format = formatCallback(trackFormats[i].get());
+            EXPECT_EQ(transcoder->configureTrackFormat(i, format), AMEDIA_OK);
+
+            // Save original video track format for verification.
+            const char* mime = nullptr;
+            AMediaFormat_getString(trackFormats[i].get(), AMEDIAFORMAT_KEY_MIME, &mime);
+            if (strncmp(mime, "video/", 6) == 0) {
+                mSourceVideoFormat = trackFormats[i];
+            }
+
+            if (format != nullptr) {
+                AMediaFormat_delete(format);
+            }
+        }
+        deleteFile(destPath);
+        const int dstFd = open(destPath, kOpenFlags, kFileMode);
+        EXPECT_EQ(transcoder->configureDestination(dstFd), AMEDIA_OK);
+
+        media_status_t startStatus = transcoder->start();
+        EXPECT_EQ(startStatus, AMEDIA_OK);
+        if (startStatus == AMEDIA_OK) {
+            mCallbacks->waitForTranscodingFinished();
+        }
+        close(srcFd);
+        close(dstFd);
+
+        return mCallbacks->mStatus;
+    }
+
+    void testTranscodeVideo(const char* srcPath, const char* destPath, const char* dstMime) {
+        const int32_t kBitRate = 8 * 1000 * 1000;  // 8Mbs
+
+        EXPECT_EQ(
+                transcodeHelper(
+                        srcPath, destPath,
+                        [dstMime](AMediaFormat* sourceFormat) {
+                            AMediaFormat* format = nullptr;
+                            const char* mime = nullptr;
+                            AMediaFormat_getString(sourceFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+
+                            if (strncmp(mime, "video/", 6) == 0) {
+                                format = AMediaFormat_new();
+                                AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, kBitRate);
+
+                                if (dstMime != nullptr) {
+                                    AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, dstMime);
+                                }
+                            }
+                            return format;
+                        }),
+                AMEDIA_OK);
+
+        if (dstMime != nullptr) {
+            std::vector<FormatVerifierEntry> extraVerifiers = {
+                    {AMEDIAFORMAT_KEY_MIME,
+                     [dstMime](const char* key, AMediaFormat* src __unused, AMediaFormat* dst) {
+                         const char* mime = nullptr;
+                         AMediaFormat_getString(dst, key, &mime);
+                         return !strcmp(mime, dstMime);
+                     }},
+            };
+            verifyOutputFormat(destPath, &extraVerifiers);
+        } else {
+            verifyOutputFormat(destPath);
+        }
+    }
+
+    void verifyOutputFormat(const char* destPath,
+                            const std::vector<FormatVerifierEntry>* extraVerifiers = nullptr) {
+        int dstFd = open(destPath, O_RDONLY);
+        EXPECT_GT(dstFd, 0);
+        ssize_t fileSize = lseek(dstFd, 0, SEEK_END);
+        lseek(dstFd, 0, SEEK_SET);
+
+        std::shared_ptr<MediaSampleReader> sampleReader =
+                MediaSampleReaderNDK::createFromFd(dstFd, 0, fileSize);
+        ASSERT_NE(sampleReader, nullptr);
+
+        std::shared_ptr<AMediaFormat> videoFormat;
+        const size_t trackCount = sampleReader->getTrackCount();
+        for (size_t trackIndex = 0; trackIndex < trackCount; ++trackIndex) {
+            AMediaFormat* trackFormat = sampleReader->getTrackFormat(static_cast<int>(trackIndex));
+            if (trackFormat != nullptr) {
+                const char* mime = nullptr;
+                AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+
+                if (strncmp(mime, "video/", 6) == 0) {
+                    LOG(INFO) << "Track # " << trackIndex << ": "
+                              << AMediaFormat_toString(trackFormat);
+                    videoFormat = std::shared_ptr<AMediaFormat>(trackFormat, &AMediaFormat_delete);
+                    break;
+                }
+            }
+        }
+
+        EXPECT_NE(videoFormat, nullptr);
+
+        LOG(INFO) << "source video format: " << AMediaFormat_toString(mSourceVideoFormat.get());
+        LOG(INFO) << "transcoded video format: " << AMediaFormat_toString(videoFormat.get());
+
+        for (int i = 0; i < (sizeof(kFieldsToPreserve) / sizeof(kFieldsToPreserve[0])); ++i) {
+            EXPECT_TRUE(kFieldsToPreserve[i].equal(kFieldsToPreserve[i].key,
+                                                   mSourceVideoFormat.get(), videoFormat.get()))
+                    << "Failed at key " << kFieldsToPreserve[i].key;
+        }
+
+        if (extraVerifiers != nullptr) {
+            for (int i = 0; i < extraVerifiers->size(); ++i) {
+                const FormatVerifierEntry& entry = (*extraVerifiers)[i];
+                EXPECT_TRUE(entry.equal(entry.key, mSourceVideoFormat.get(), videoFormat.get()));
+            }
+        }
+
+        close(dstFd);
+    }
+
+    std::shared_ptr<TestCallbacks> mCallbacks;
+    std::shared_ptr<AMediaFormat> mSourceVideoFormat;
+};
+
+TEST_F(MediaTranscoderTests, TestPassthrough) {
+    const char* srcPath = "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+    const char* destPath = "/data/local/tmp/MediaTranscoder_Passthrough.MP4";
+
+    EXPECT_EQ(transcodeHelper(srcPath, destPath, [](AMediaFormat*) { return nullptr; }), AMEDIA_OK);
+
+    verifyOutputFormat(destPath);
+}
+
+TEST_F(MediaTranscoderTests, TestVideoTranscode_AvcToAvc_Basic) {
+    const char* srcPath = "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+    const char* destPath = "/data/local/tmp/MediaTranscoder_VideoTranscode_AvcToAvc_Basic.MP4";
+    testTranscodeVideo(srcPath, destPath, nullptr /*dstMime*/);
+}
+
+TEST_F(MediaTranscoderTests, TestVideoTranscode_HevcToAvc_Basic) {
+    const char* srcPath = "/data/local/tmp/TranscodingTestAssets/jets_hevc_1280x720_20Mbps.mp4";
+    const char* destPath = "/data/local/tmp/MediaTranscoder_VideoTranscode_HevcToAvc_Basic.MP4";
+    testTranscodeVideo(srcPath, destPath, AMEDIA_MIMETYPE_VIDEO_AVC);
+}
+
+TEST_F(MediaTranscoderTests, TestVideoTranscode_HevcToAvc_Rotation) {
+    const char* srcPath =
+            "/data/local/tmp/TranscodingTestAssets/desk_hevc_1920x1080_aac_48KHz_rot90.mp4";
+    const char* destPath = "/data/local/tmp/MediaTranscoder_VideoTranscode_HevcToAvc_Rotation.MP4";
+    testTranscodeVideo(srcPath, destPath, AMEDIA_MIMETYPE_VIDEO_AVC);
+}
+
+}  // namespace android
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libmediatranscoding/transcoder/tests/PassthroughTrackTranscoderTests.cpp b/media/libmediatranscoding/transcoder/tests/PassthroughTrackTranscoderTests.cpp
new file mode 100644
index 0000000..b79f58c
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/PassthroughTrackTranscoderTests.cpp
@@ -0,0 +1,306 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for PassthroughTrackTranscoder
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "PassthroughTrackTranscoderTests"
+
+#include <android-base/logging.h>
+#include <fcntl.h>
+#include <gtest/gtest.h>
+#include <media/MediaSampleReaderNDK.h>
+#include <media/NdkMediaExtractor.h>
+#include <media/PassthroughTrackTranscoder.h>
+#include <openssl/md5.h>
+
+#include <vector>
+
+#include "TrackTranscoderTestUtils.h"
+
+namespace android {
+
+class PassthroughTrackTranscoderTests : public ::testing::Test {
+public:
+    PassthroughTrackTranscoderTests() { LOG(DEBUG) << "PassthroughTrackTranscoderTests created"; }
+
+    void SetUp() override { LOG(DEBUG) << "PassthroughTrackTranscoderTests set up"; }
+
+    void initSourceAndExtractor() {
+        const char* sourcePath =
+                "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+
+        mExtractor = AMediaExtractor_new();
+        ASSERT_NE(mExtractor, nullptr);
+
+        mSourceFd = open(sourcePath, O_RDONLY);
+        ASSERT_GT(mSourceFd, 0);
+
+        mSourceFileSize = lseek(mSourceFd, 0, SEEK_END);
+        lseek(mSourceFd, 0, SEEK_SET);
+
+        media_status_t status =
+                AMediaExtractor_setDataSourceFd(mExtractor, mSourceFd, 0, mSourceFileSize);
+        ASSERT_EQ(status, AMEDIA_OK);
+
+        const size_t trackCount = AMediaExtractor_getTrackCount(mExtractor);
+        for (size_t trackIndex = 0; trackIndex < trackCount; trackIndex++) {
+            AMediaFormat* trackFormat = AMediaExtractor_getTrackFormat(mExtractor, trackIndex);
+            ASSERT_NE(trackFormat, nullptr);
+
+            const char* mime = nullptr;
+            AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+            ASSERT_NE(mime, nullptr);
+
+            if (strncmp(mime, "audio/", 6) == 0) {
+                mTrackIndex = trackIndex;
+                AMediaExtractor_selectTrack(mExtractor, trackIndex);
+                break;
+            }
+
+            AMediaFormat_delete(trackFormat);
+        }
+    }
+
+    void TearDown() override {
+        LOG(DEBUG) << "PassthroughTrackTranscoderTests tear down";
+        if (mExtractor != nullptr) {
+            AMediaExtractor_delete(mExtractor);
+            mExtractor = nullptr;
+        }
+        if (mSourceFd > 0) {
+            close(mSourceFd);
+            mSourceFd = -1;
+        }
+    }
+
+    ~PassthroughTrackTranscoderTests() {
+        LOG(DEBUG) << "PassthroughTrackTranscoderTests destroyed";
+    }
+
+    int mSourceFd = -1;
+    size_t mSourceFileSize;
+    int mTrackIndex;
+    AMediaExtractor* mExtractor = nullptr;
+};
+
+/** Helper class for comparing sample data using checksums. */
+class SampleID {
+public:
+    SampleID(const uint8_t* sampleData, ssize_t sampleSize) : mSize{sampleSize} {
+        MD5_CTX md5Ctx;
+        MD5_Init(&md5Ctx);
+        MD5_Update(&md5Ctx, sampleData, sampleSize);
+        MD5_Final(mChecksum, &md5Ctx);
+    }
+
+    bool operator==(const SampleID& rhs) const {
+        return mSize == rhs.mSize && memcmp(mChecksum, rhs.mChecksum, MD5_DIGEST_LENGTH) == 0;
+    }
+
+    uint8_t mChecksum[MD5_DIGEST_LENGTH];
+    ssize_t mSize;
+};
+
+/**
+ * Tests that the output samples of PassthroughTrackTranscoder are identical to the source samples
+ * and in correct order.
+ */
+TEST_F(PassthroughTrackTranscoderTests, SampleEquality) {
+    LOG(DEBUG) << "Testing SampleEquality";
+
+    ssize_t bufferSize = 1024;
+    auto buffer = std::make_unique<uint8_t[]>(bufferSize);
+
+    initSourceAndExtractor();
+
+    // Loop through all samples of a track and store size and checksums.
+    std::vector<SampleID> sampleChecksums;
+
+    int64_t sampleTime = AMediaExtractor_getSampleTime(mExtractor);
+    while (sampleTime != -1) {
+        if (AMediaExtractor_getSampleTrackIndex(mExtractor) == mTrackIndex) {
+            ssize_t sampleSize = AMediaExtractor_getSampleSize(mExtractor);
+            if (bufferSize < sampleSize) {
+                bufferSize = sampleSize;
+                buffer = std::make_unique<uint8_t[]>(bufferSize);
+            }
+
+            ssize_t bytesRead =
+                    AMediaExtractor_readSampleData(mExtractor, buffer.get(), bufferSize);
+            ASSERT_EQ(bytesRead, sampleSize);
+
+            SampleID sampleId{buffer.get(), sampleSize};
+            sampleChecksums.push_back(sampleId);
+        }
+
+        AMediaExtractor_advance(mExtractor);
+        sampleTime = AMediaExtractor_getSampleTime(mExtractor);
+    }
+
+    // Create and start the transcoder.
+    std::shared_ptr<TestCallback> callback = std::make_shared<TestCallback>();
+    PassthroughTrackTranscoder transcoder{callback};
+
+    std::shared_ptr<MediaSampleReader> mediaSampleReader =
+            MediaSampleReaderNDK::createFromFd(mSourceFd, 0, mSourceFileSize);
+    EXPECT_NE(mediaSampleReader, nullptr);
+
+    EXPECT_EQ(transcoder.configure(mediaSampleReader, mTrackIndex, nullptr /* destinationFormat */),
+              AMEDIA_OK);
+    ASSERT_TRUE(transcoder.start());
+
+    // Pull transcoder's output samples and compare against input checksums.
+    uint64_t sampleCount = 0;
+    std::shared_ptr<MediaSample> sample;
+    std::shared_ptr<MediaSampleQueue> outputQueue = transcoder.getOutputQueue();
+    while (!outputQueue->dequeue(&sample)) {
+        ASSERT_NE(sample, nullptr);
+
+        if (sample->info.flags & SAMPLE_FLAG_END_OF_STREAM) {
+            break;
+        }
+
+        SampleID sampleId{sample->buffer, static_cast<ssize_t>(sample->info.size)};
+        EXPECT_TRUE(sampleId == sampleChecksums[sampleCount]);
+        ++sampleCount;
+    }
+
+    EXPECT_EQ(sampleCount, sampleChecksums.size());
+    EXPECT_TRUE(transcoder.stop());
+}
+
+/** Class for testing PassthroughTrackTranscoder's built in buffer pool. */
+class BufferPoolTests : public ::testing::Test {
+public:
+    static constexpr int kMaxBuffers = 5;
+
+    void SetUp() override {
+        LOG(DEBUG) << "BufferPoolTests set up";
+        mBufferPool = std::make_shared<PassthroughTrackTranscoder::BufferPool>(kMaxBuffers);
+    }
+
+    void TearDown() override {
+        LOG(DEBUG) << "BufferPoolTests tear down";
+        mBufferPool.reset();
+    }
+
+    std::shared_ptr<PassthroughTrackTranscoder::BufferPool> mBufferPool;
+};
+
+TEST_F(BufferPoolTests, BufferReuse) {
+    LOG(DEBUG) << "Testing BufferReuse";
+
+    uint8_t* buffer1 = mBufferPool->getBufferWithSize(10);
+    EXPECT_NE(buffer1, nullptr);
+
+    uint8_t* buffer2 = mBufferPool->getBufferWithSize(10);
+    EXPECT_NE(buffer2, nullptr);
+    EXPECT_NE(buffer2, buffer1);
+
+    mBufferPool->returnBuffer(buffer1);
+
+    uint8_t* buffer3 = mBufferPool->getBufferWithSize(10);
+    EXPECT_NE(buffer3, nullptr);
+    EXPECT_NE(buffer3, buffer2);
+    EXPECT_EQ(buffer3, buffer1);
+
+    mBufferPool->returnBuffer(buffer2);
+
+    uint8_t* buffer4 = mBufferPool->getBufferWithSize(10);
+    EXPECT_NE(buffer4, nullptr);
+    EXPECT_NE(buffer4, buffer1);
+    EXPECT_EQ(buffer4, buffer2);
+}
+
+TEST_F(BufferPoolTests, SmallestAvailableBuffer) {
+    LOG(DEBUG) << "Testing SmallestAvailableBuffer";
+
+    uint8_t* buffer1 = mBufferPool->getBufferWithSize(10);
+    EXPECT_NE(buffer1, nullptr);
+
+    uint8_t* buffer2 = mBufferPool->getBufferWithSize(15);
+    EXPECT_NE(buffer2, nullptr);
+    EXPECT_NE(buffer2, buffer1);
+
+    uint8_t* buffer3 = mBufferPool->getBufferWithSize(20);
+    EXPECT_NE(buffer3, nullptr);
+    EXPECT_NE(buffer3, buffer1);
+    EXPECT_NE(buffer3, buffer2);
+
+    mBufferPool->returnBuffer(buffer1);
+    mBufferPool->returnBuffer(buffer2);
+    mBufferPool->returnBuffer(buffer3);
+
+    uint8_t* buffer4 = mBufferPool->getBufferWithSize(11);
+    EXPECT_NE(buffer4, nullptr);
+    EXPECT_EQ(buffer4, buffer2);
+
+    uint8_t* buffer5 = mBufferPool->getBufferWithSize(11);
+    EXPECT_NE(buffer5, nullptr);
+    EXPECT_EQ(buffer5, buffer3);
+}
+
+TEST_F(BufferPoolTests, AddAfterAbort) {
+    LOG(DEBUG) << "Testing AddAfterAbort";
+
+    uint8_t* buffer1 = mBufferPool->getBufferWithSize(10);
+    EXPECT_NE(buffer1, nullptr);
+    mBufferPool->returnBuffer(buffer1);
+
+    mBufferPool->abort();
+    uint8_t* buffer2 = mBufferPool->getBufferWithSize(10);
+    EXPECT_EQ(buffer2, nullptr);
+}
+
+TEST_F(BufferPoolTests, MaximumBuffers) {
+    LOG(DEBUG) << "Testing MaximumBuffers";
+
+    static constexpr size_t kBufferBaseSize = 10;
+    std::unordered_map<uint8_t*, size_t> addressSizeMap;
+
+    // Get kMaxBuffers * 2 new buffers with increasing size.
+    // (Note: Once kMaxBuffers have been allocated, the pool will delete old buffers to accommodate
+    // new ones making the deleted buffers free to be reused by the system's heap memory allocator.
+    // So we cannot test that each new pointer is unique here.)
+    for (int i = 0; i < kMaxBuffers * 2; i++) {
+        size_t size = kBufferBaseSize + i;
+        uint8_t* buffer = mBufferPool->getBufferWithSize(size);
+        EXPECT_NE(buffer, nullptr);
+        addressSizeMap[buffer] = size;
+        mBufferPool->returnBuffer(buffer);
+    }
+
+    // Verify that the pool now contains the kMaxBuffers largest buffers allocated above and that
+    // the buffer of matching size is returned.
+    for (int i = kMaxBuffers; i < kMaxBuffers * 2; i++) {
+        size_t size = kBufferBaseSize + i;
+        uint8_t* buffer = mBufferPool->getBufferWithSize(size);
+        EXPECT_NE(buffer, nullptr);
+
+        auto it = addressSizeMap.find(buffer);
+        ASSERT_NE(it, addressSizeMap.end());
+        EXPECT_EQ(it->second, size);
+        mBufferPool->returnBuffer(buffer);
+    }
+}
+
+}  // namespace android
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libmediatranscoding/transcoder/tests/README.md b/media/libmediatranscoding/transcoder/tests/README.md
new file mode 100644
index 0000000..59417b0
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/README.md
@@ -0,0 +1,14 @@
+## Transcoder Testing ##
+---
+#### Transcoder unit tests :
+To run all transcoder unit tests, run the supplied script from this folder:
+
+```
+./build_and_run_all_unit_tests.sh
+```
+
+To run individual unit test modules, use atest:
+
+```
+atest MediaSampleReaderNDK
+```
diff --git a/media/libmediatranscoding/transcoder/tests/TrackTranscoderTestUtils.h b/media/libmediatranscoding/transcoder/tests/TrackTranscoderTestUtils.h
new file mode 100644
index 0000000..54b42fe
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/TrackTranscoderTestUtils.h
@@ -0,0 +1,91 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/MediaTrackTranscoder.h>
+#include <media/MediaTrackTranscoderCallback.h>
+
+#include <condition_variable>
+#include <memory>
+#include <mutex>
+
+namespace android {
+
+//
+// This file contains test utilities used by more than one track transcoder test.
+//
+
+class TrackTranscoderTestUtils {
+public:
+    static std::shared_ptr<AMediaFormat> getDefaultVideoDestinationFormat(
+            AMediaFormat* sourceFormat) {
+        // Default video destination format setup.
+        static constexpr float kFrameRate = 30.0f;
+        static constexpr float kIFrameInterval = 30.0f;
+        static constexpr int32_t kBitRate = 2 * 1000 * 1000;
+        static constexpr int32_t kColorFormatSurface = 0x7f000789;
+
+        AMediaFormat* destinationFormat = AMediaFormat_new();
+        AMediaFormat_copy(destinationFormat, sourceFormat);
+        AMediaFormat_setFloat(destinationFormat, AMEDIAFORMAT_KEY_FRAME_RATE, kFrameRate);
+        AMediaFormat_setFloat(destinationFormat, AMEDIAFORMAT_KEY_I_FRAME_INTERVAL,
+                              kIFrameInterval);
+        AMediaFormat_setInt32(destinationFormat, AMEDIAFORMAT_KEY_BIT_RATE, kBitRate);
+        AMediaFormat_setInt32(destinationFormat, AMEDIAFORMAT_KEY_COLOR_FORMAT,
+                              kColorFormatSurface);
+
+        return std::shared_ptr<AMediaFormat>(destinationFormat,
+                                             std::bind(AMediaFormat_delete, std::placeholders::_1));
+    }
+};
+
+class TestCallback : public MediaTrackTranscoderCallback {
+public:
+    TestCallback() = default;
+    ~TestCallback() = default;
+
+    // MediaTrackTranscoderCallback
+    void onTrackFormatAvailable(const MediaTrackTranscoder* transcoder __unused) {}
+
+    void onTrackFinished(const MediaTrackTranscoder* transcoder __unused) {
+        std::unique_lock<std::mutex> lock(mMutex);
+        mTranscodingFinished = true;
+        mCv.notify_all();
+    }
+
+    void onTrackError(const MediaTrackTranscoder* transcoder __unused, media_status_t status) {
+        std::unique_lock<std::mutex> lock(mMutex);
+        mTranscodingFinished = true;
+        mStatus = status;
+        mCv.notify_all();
+    }
+    // ~MediaTrackTranscoderCallback
+
+    media_status_t waitUntilFinished() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        while (!mTranscodingFinished) {
+            mCv.wait(lock);
+        }
+        return mStatus;
+    }
+
+private:
+    media_status_t mStatus = AMEDIA_OK;
+    std::mutex mMutex;
+    std::condition_variable mCv;
+    bool mTranscodingFinished = false;
+};
+
+};  // namespace android
diff --git a/media/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp b/media/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp
new file mode 100644
index 0000000..5f2cd12
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp
@@ -0,0 +1,221 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for VideoTrackTranscoder
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "VideoTrackTranscoderTests"
+
+#include <android-base/logging.h>
+#include <fcntl.h>
+#include <gtest/gtest.h>
+#include <media/MediaSampleReaderNDK.h>
+#include <media/VideoTrackTranscoder.h>
+#include <utils/Timers.h>
+
+#include "TrackTranscoderTestUtils.h"
+
+namespace android {
+
+// TODO(b/155304421): Implement more advanced video specific tests:
+//  - Codec conversions (HEVC -> AVC).
+//  - Bitrate validation.
+//  - Output frame validation through PSNR.
+
+class VideoTrackTranscoderTests : public ::testing::Test {
+public:
+    VideoTrackTranscoderTests() { LOG(DEBUG) << "VideoTrackTranscoderTests created"; }
+
+    void SetUp() override {
+        LOG(DEBUG) << "VideoTrackTranscoderTests set up";
+        const char* sourcePath =
+                "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+
+        const int sourceFd = open(sourcePath, O_RDONLY);
+        ASSERT_GT(sourceFd, 0);
+
+        const off_t fileSize = lseek(sourceFd, 0, SEEK_END);
+        lseek(sourceFd, 0, SEEK_SET);
+
+        mMediaSampleReader = MediaSampleReaderNDK::createFromFd(sourceFd, 0, fileSize);
+        ASSERT_NE(mMediaSampleReader, nullptr);
+        close(sourceFd);
+
+        for (size_t trackIndex = 0; trackIndex < mMediaSampleReader->getTrackCount();
+             ++trackIndex) {
+            AMediaFormat* trackFormat = mMediaSampleReader->getTrackFormat(trackIndex);
+            ASSERT_NE(trackFormat, nullptr);
+
+            const char* mime = nullptr;
+            AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+            ASSERT_NE(mime, nullptr);
+
+            if (strncmp(mime, "video/", 6) == 0) {
+                mTrackIndex = trackIndex;
+
+                mSourceFormat = std::shared_ptr<AMediaFormat>(
+                        trackFormat, std::bind(AMediaFormat_delete, std::placeholders::_1));
+                ASSERT_NE(mSourceFormat, nullptr);
+
+                mDestinationFormat =
+                        TrackTranscoderTestUtils::getDefaultVideoDestinationFormat(trackFormat);
+                ASSERT_NE(mDestinationFormat, nullptr);
+                break;
+            }
+
+            AMediaFormat_delete(trackFormat);
+        }
+
+        ASSERT_NE(mSourceFormat, nullptr);
+    }
+
+    void TearDown() override { LOG(DEBUG) << "VideoTrackTranscoderTests tear down"; }
+
+    ~VideoTrackTranscoderTests() { LOG(DEBUG) << "VideoTrackTranscoderTests destroyed"; }
+
+    std::shared_ptr<MediaSampleReader> mMediaSampleReader;
+    int mTrackIndex;
+    std::shared_ptr<AMediaFormat> mSourceFormat;
+    std::shared_ptr<AMediaFormat> mDestinationFormat;
+};
+
+TEST_F(VideoTrackTranscoderTests, SampleSanity) {
+    LOG(DEBUG) << "Testing SampleSanity";
+    std::shared_ptr<TestCallback> callback = std::make_shared<TestCallback>();
+    auto transcoder = VideoTrackTranscoder::create(callback);
+
+    EXPECT_EQ(transcoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    ASSERT_TRUE(transcoder->start());
+
+    std::shared_ptr<MediaSampleQueue> outputQueue = transcoder->getOutputQueue();
+    std::thread sampleConsumerThread{[&outputQueue] {
+        uint64_t sampleCount = 0;
+        std::shared_ptr<MediaSample> sample;
+        while (!outputQueue->dequeue(&sample)) {
+            ASSERT_NE(sample, nullptr);
+            const uint32_t flags = sample->info.flags;
+
+            if (sampleCount == 0) {
+                // Expect first sample to be a codec config.
+                EXPECT_TRUE((flags & SAMPLE_FLAG_CODEC_CONFIG) != 0);
+                EXPECT_TRUE((flags & SAMPLE_FLAG_SYNC_SAMPLE) == 0);
+                EXPECT_TRUE((flags & SAMPLE_FLAG_END_OF_STREAM) == 0);
+                EXPECT_TRUE((flags & SAMPLE_FLAG_PARTIAL_FRAME) == 0);
+            } else if (sampleCount == 1) {
+                // Expect second sample to be a sync sample.
+                EXPECT_TRUE((flags & SAMPLE_FLAG_CODEC_CONFIG) == 0);
+                EXPECT_TRUE((flags & SAMPLE_FLAG_SYNC_SAMPLE) != 0);
+                EXPECT_TRUE((flags & SAMPLE_FLAG_END_OF_STREAM) == 0);
+            }
+
+            if (!(flags & SAMPLE_FLAG_END_OF_STREAM)) {
+                // Expect a valid buffer unless it is EOS.
+                EXPECT_NE(sample->buffer, nullptr);
+                EXPECT_NE(sample->bufferId, 0xBAADF00D);
+                EXPECT_GT(sample->info.size, 0);
+            }
+
+            ++sampleCount;
+            if (sample->info.flags & SAMPLE_FLAG_END_OF_STREAM) {
+                break;
+            }
+            sample.reset();
+        }
+    }};
+
+    EXPECT_EQ(callback->waitUntilFinished(), AMEDIA_OK);
+    EXPECT_TRUE(transcoder->stop());
+
+    sampleConsumerThread.join();
+}
+
+// VideoTrackTranscoder needs a valid destination format.
+TEST_F(VideoTrackTranscoderTests, NullDestinationFormat) {
+    LOG(DEBUG) << "Testing NullDestinationFormat";
+    std::shared_ptr<TestCallback> callback = std::make_shared<TestCallback>();
+    std::shared_ptr<AMediaFormat> nullFormat;
+
+    auto transcoder = VideoTrackTranscoder::create(callback);
+    EXPECT_EQ(transcoder->configure(mMediaSampleReader, 0 /* trackIndex */, nullFormat),
+              AMEDIA_ERROR_INVALID_PARAMETER);
+}
+
+TEST_F(VideoTrackTranscoderTests, LingeringEncoder) {
+    struct {
+        void wait() {
+            std::unique_lock<std::mutex> lock(mMutex);
+            while (!mSignaled) {
+                mCondition.wait(lock);
+            }
+        }
+
+        void signal() {
+            std::unique_lock<std::mutex> lock(mMutex);
+            mSignaled = true;
+            mCondition.notify_all();
+        }
+
+        std::mutex mMutex;
+        std::condition_variable mCondition;
+        bool mSignaled = false;
+    } semaphore;
+
+    auto callback = std::make_shared<TestCallback>();
+    auto transcoder = VideoTrackTranscoder::create(callback);
+
+    EXPECT_EQ(transcoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    ASSERT_TRUE(transcoder->start());
+
+    std::shared_ptr<MediaSampleQueue> outputQueue = transcoder->getOutputQueue();
+    std::vector<std::shared_ptr<MediaSample>> samples;
+    std::thread sampleConsumerThread([&outputQueue, &samples, &semaphore] {
+        std::shared_ptr<MediaSample> sample;
+        while (samples.size() < 10 && !outputQueue->dequeue(&sample)) {
+            ASSERT_NE(sample, nullptr);
+            samples.push_back(sample);
+
+            if (sample->info.flags & SAMPLE_FLAG_END_OF_STREAM) {
+                break;
+            }
+            sample.reset();
+        }
+
+        semaphore.signal();
+    });
+
+    // Wait for the encoder to output samples before stopping and releasing the transcoder.
+    semaphore.wait();
+
+    EXPECT_TRUE(transcoder->stop());
+    transcoder.reset();
+    sampleConsumerThread.join();
+
+    // Return buffers to the codec so that it can resume processing, but keep one buffer to avoid
+    // the codec being released.
+    samples.resize(1);
+
+    // Wait for async codec events.
+    std::this_thread::sleep_for(std::chrono::seconds(1));
+}
+
+}  // namespace android
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libmediatranscoding/transcoder/tests/build_and_run_all_unit_tests.sh b/media/libmediatranscoding/transcoder/tests/build_and_run_all_unit_tests.sh
new file mode 100755
index 0000000..241178d
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/build_and_run_all_unit_tests.sh
@@ -0,0 +1,46 @@
+#!/bin/bash
+#
+# Run tests in this directory.
+#
+
+if [ "$SYNC_FINISHED" != true ]; then
+  if [ -z "$ANDROID_BUILD_TOP" ]; then
+      echo "Android build environment not set"
+      exit -1
+  fi
+
+  # ensure we have mm
+  . $ANDROID_BUILD_TOP/build/envsetup.sh
+
+  mm
+
+  echo "waiting for device"
+
+  adb root && adb wait-for-device remount && adb sync
+fi
+
+# Push the files onto the device.
+. $ANDROID_BUILD_TOP/frameworks/av/media/libmediatranscoding/tests/assets/push_assets.sh
+
+echo "========================================"
+
+echo "testing MediaSampleReaderNDK"
+adb shell /data/nativetest64/MediaSampleReaderNDKTests/MediaSampleReaderNDKTests
+
+echo "testing MediaSampleQueue"
+adb shell /data/nativetest64/MediaSampleQueueTests/MediaSampleQueueTests
+
+echo "testing MediaTrackTranscoder"
+adb shell /data/nativetest64/MediaTrackTranscoderTests/MediaTrackTranscoderTests
+
+echo "testing VideoTrackTranscoder"
+adb shell /data/nativetest64/VideoTrackTranscoderTests/VideoTrackTranscoderTests
+
+echo "testing PassthroughTrackTranscoder"
+adb shell /data/nativetest64/PassthroughTrackTranscoderTests/PassthroughTrackTranscoderTests
+
+echo "testing MediaSampleWriter"
+adb shell /data/nativetest64/MediaSampleWriterTests/MediaSampleWriterTests
+
+echo "testing MediaTranscoder"
+adb shell /data/nativetest64/MediaTranscoderTests/MediaTranscoderTests
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 63ab654..6941198 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -555,6 +555,7 @@
       mShutdownInProgress(false),
       mExplicitShutdown(false),
       mIsLegacyVP9Decoder(false),
+      mIsStreamCorruptFree(false),
       mEncoderDelay(0),
       mEncoderPadding(0),
       mRotationDegrees(0),
@@ -2323,6 +2324,12 @@
         mChannelMaskPresent = false;
     }
 
+    int32_t isCorruptFree = 0;
+    if (msg->findInt32("corrupt-free", &isCorruptFree)) {
+        mIsStreamCorruptFree = isCorruptFree == 1 ? true : false;
+        ALOGV("corrupt-free=[%d]", mIsStreamCorruptFree);
+    }
+
     int32_t maxInputSize;
     if (msg->findInt32("max-input-size", &maxInputSize)) {
         err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize);
@@ -4127,6 +4134,29 @@
         ALOGI("setupVideoEncoder succeeded");
     }
 
+    // Video should be encoded as stand straight because RTP protocol
+    // can provide rotation information only if CVO is supported.
+    // This needs to be added to support non-CVO case for video streaming scenario.
+    int32_t rotation = 0;
+    if (msg->findInt32("rotation-degrees", &rotation)) {
+        OMX_CONFIG_ROTATIONTYPE config;
+        InitOMXParams(&config);
+        config.nPortIndex = kPortIndexOutput;
+        status_t err = mOMXNode->getConfig(
+                (OMX_INDEXTYPE)OMX_IndexConfigCommonRotate, &config, sizeof(config));
+        if (err != OK) {
+            ALOGW("Failed to getConfig of OMX_IndexConfigCommonRotate(err %d)", err);
+        }
+        config.nRotation = rotation;
+        err = mOMXNode->setConfig(
+                (OMX_INDEXTYPE)OMX_IndexConfigCommonRotate, &config, sizeof(config));
+
+        ALOGD("Applying encoder-rotation=[%d] to video encoder.", config.nRotation);
+        if (err != OK) {
+            ALOGW("Failed to setConfig of OMX_IndexConfigCommonRotate(err %d)", err);
+        }
+    }
+
     return err;
 }
 
@@ -5974,6 +6004,12 @@
         return false;
     }
 
+    if (mCodec->mIsStreamCorruptFree && data1 == (OMX_U32)OMX_ErrorStreamCorrupt) {
+        ALOGV("[%s] handle OMX_ErrorStreamCorrupt as a normal operation",
+                mCodec->mComponentName.c_str());
+        return true;
+    }
+
     ALOGE("[%s] ERROR(0x%08x)", mCodec->mComponentName.c_str(), data1);
 
     // verify OMX component sends back an error we expect.
@@ -6081,6 +6117,13 @@
         return;
     }
 
+    int32_t cvo;
+    if (mCodec->mNativeWindow != NULL && buffer != NULL &&
+            buffer->meta()->findInt32("cvo", &cvo)) {
+        ALOGV("cvo(%d) found in buffer #%u", cvo, bufferID);
+        setNativeWindowRotation(mCodec->mNativeWindow.get(), cvo);
+    }
+
     info->mStatus = BufferInfo::OWNED_BY_US;
     info->mData = buffer;
 
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 9b3f420..bcf418a 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -46,88 +46,6 @@
 
 static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL;
 
-struct CameraSourceListener : public CameraListener {
-    explicit CameraSourceListener(const sp<CameraSource> &source);
-
-    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
-    virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr,
-                          camera_frame_metadata_t *metadata);
-
-    virtual void postDataTimestamp(
-            nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
-
-    virtual void postRecordingFrameHandleTimestamp(nsecs_t timestamp, native_handle_t* handle);
-
-    virtual void postRecordingFrameHandleTimestampBatch(
-                const std::vector<nsecs_t>& timestamps,
-                const std::vector<native_handle_t*>& handles);
-
-protected:
-    virtual ~CameraSourceListener();
-
-private:
-    wp<CameraSource> mSource;
-
-    CameraSourceListener(const CameraSourceListener &);
-    CameraSourceListener &operator=(const CameraSourceListener &);
-};
-
-CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source)
-    : mSource(source) {
-}
-
-CameraSourceListener::~CameraSourceListener() {
-}
-
-void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
-    UNUSED_UNLESS_VERBOSE(msgType);
-    UNUSED_UNLESS_VERBOSE(ext1);
-    UNUSED_UNLESS_VERBOSE(ext2);
-    ALOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
-}
-
-void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr,
-                                    camera_frame_metadata_t * /* metadata */) {
-    ALOGV("postData(%d, ptr:%p, size:%zu)",
-         msgType, dataPtr->unsecurePointer(), dataPtr->size());
-
-    sp<CameraSource> source = mSource.promote();
-    if (source.get() != NULL) {
-        source->dataCallback(msgType, dataPtr);
-    }
-}
-
-void CameraSourceListener::postDataTimestamp(
-        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
-
-    sp<CameraSource> source = mSource.promote();
-    if (source.get() != NULL) {
-        source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr);
-    }
-}
-
-void CameraSourceListener::postRecordingFrameHandleTimestamp(nsecs_t timestamp,
-        native_handle_t* handle) {
-    sp<CameraSource> source = mSource.promote();
-    if (source.get() != nullptr) {
-        source->recordingFrameHandleCallbackTimestamp(timestamp/1000, handle);
-    }
-}
-
-void CameraSourceListener::postRecordingFrameHandleTimestampBatch(
-        const std::vector<nsecs_t>& timestamps,
-        const std::vector<native_handle_t*>& handles) {
-    sp<CameraSource> source = mSource.promote();
-    if (source.get() != nullptr) {
-        int n = timestamps.size();
-        std::vector<nsecs_t> modifiedTimestamps(n);
-        for (int i = 0; i < n; i++) {
-            modifiedTimestamps[i] = timestamps[i] / 1000;
-        }
-        source->recordingFrameHandleCallbackTimestampBatch(modifiedTimestamps, handles);
-    }
-}
-
 static int32_t getColorFormat(const char* colorFormat) {
     if (!colorFormat) {
         ALOGE("Invalid color format");
@@ -169,16 +87,6 @@
     return -1;
 }
 
-CameraSource *CameraSource::Create(const String16 &clientName) {
-    Size size;
-    size.width = -1;
-    size.height = -1;
-
-    sp<hardware::ICamera> camera;
-    return new CameraSource(camera, NULL, 0, clientName, Camera::USE_CALLING_UID,
-            Camera::USE_CALLING_PID, size, -1, NULL, false);
-}
-
 // static
 CameraSource *CameraSource::CreateFromCamera(
     const sp<hardware::ICamera>& camera,
@@ -189,12 +97,10 @@
     pid_t clientPid,
     Size videoSize,
     int32_t frameRate,
-    const sp<IGraphicBufferProducer>& surface,
-    bool storeMetaDataInVideoBuffers) {
+    const sp<IGraphicBufferProducer>& surface) {
 
     CameraSource *source = new CameraSource(camera, proxy, cameraId,
-            clientName, clientUid, clientPid, videoSize, frameRate, surface,
-            storeMetaDataInVideoBuffers);
+            clientName, clientUid, clientPid, videoSize, frameRate, surface);
     return source;
 }
 
@@ -207,8 +113,7 @@
     pid_t clientPid,
     Size videoSize,
     int32_t frameRate,
-    const sp<IGraphicBufferProducer>& surface,
-    bool storeMetaDataInVideoBuffers)
+    const sp<IGraphicBufferProducer>& surface)
     : mCameraFlags(0),
       mNumInputBuffers(0),
       mVideoFrameRate(-1),
@@ -231,8 +136,7 @@
 
     mInitCheck = init(camera, proxy, cameraId,
                     clientName, clientUid, clientPid,
-                    videoSize, frameRate,
-                    storeMetaDataInVideoBuffers);
+                    videoSize, frameRate);
     if (mInitCheck != OK) releaseCamera();
 }
 
@@ -531,15 +435,13 @@
         uid_t clientUid,
         pid_t clientPid,
         Size videoSize,
-        int32_t frameRate,
-        bool storeMetaDataInVideoBuffers) {
+        int32_t frameRate) {
 
     ALOGV("init");
     status_t err = OK;
     int64_t token = IPCThreadState::self()->clearCallingIdentity();
     err = initWithCameraAccess(camera, proxy, cameraId, clientName, clientUid, clientPid,
-                               videoSize, frameRate,
-                               storeMetaDataInVideoBuffers);
+                               videoSize, frameRate);
     IPCThreadState::self()->restoreCallingIdentity(token);
     return err;
 }
@@ -626,8 +528,7 @@
         uid_t clientUid,
         pid_t clientPid,
         Size videoSize,
-        int32_t frameRate,
-        bool storeMetaDataInVideoBuffers) {
+        int32_t frameRate) {
     ALOGV("initWithCameraAccess");
     status_t err = OK;
 
@@ -667,24 +568,12 @@
         CHECK_EQ((status_t)OK, mCamera->setPreviewTarget(mSurface));
     }
 
-    // By default, store real data in video buffers.
-    mVideoBufferMode = hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV;
-    if (storeMetaDataInVideoBuffers) {
-        if (OK == mCamera->setVideoBufferMode(hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE)) {
-            mVideoBufferMode = hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE;
-        } else if (OK == mCamera->setVideoBufferMode(
-                hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA)) {
-            mVideoBufferMode = hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA;
-        }
-    }
-
-    if (mVideoBufferMode == hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV) {
-        err = mCamera->setVideoBufferMode(hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV);
-        if (err != OK) {
-            ALOGE("%s: Setting video buffer mode to VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV failed: "
-                    "%s (err=%d)", __FUNCTION__, strerror(-err), err);
-            return err;
-        }
+    // Use buffer queue to receive video buffers from camera
+    err = mCamera->setVideoBufferMode(hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE);
+    if (err != OK) {
+        ALOGE("%s: Setting video buffer mode to VIDEO_BUFFER_MODE_BUFFER_QUEUE failed: "
+                "%s (err=%d)", __FUNCTION__, strerror(-err), err);
+        return err;
     }
 
     int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
@@ -724,54 +613,26 @@
     int64_t token = IPCThreadState::self()->clearCallingIdentity();
     status_t err;
 
-    if (mVideoBufferMode == hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
-        // Initialize buffer queue.
-        err = initBufferQueue(mVideoSize.width, mVideoSize.height, mEncoderFormat,
-                (android_dataspace_t)mEncoderDataSpace,
-                mNumInputBuffers > 0 ? mNumInputBuffers : 1);
-        if (err != OK) {
-            ALOGE("%s: Failed to initialize buffer queue: %s (err=%d)", __FUNCTION__,
-                    strerror(-err), err);
-            return err;
-        }
-    } else {
-        if (mNumInputBuffers > 0) {
-            err = mCamera->sendCommand(
-                CAMERA_CMD_SET_VIDEO_BUFFER_COUNT, mNumInputBuffers, 0);
-
-            // This could happen for CameraHAL1 clients; thus the failure is
-            // not a fatal error
-            if (err != OK) {
-                ALOGW("Failed to set video buffer count to %d due to %d",
-                    mNumInputBuffers, err);
-            }
-        }
-
-        err = mCamera->sendCommand(
-            CAMERA_CMD_SET_VIDEO_FORMAT, mEncoderFormat, mEncoderDataSpace);
-
-        // This could happen for CameraHAL1 clients; thus the failure is
-        // not a fatal error
-        if (err != OK) {
-            ALOGW("Failed to set video encoder format/dataspace to %d, %d due to %d",
-                    mEncoderFormat, mEncoderDataSpace, err);
-        }
-
-        // Create memory heap to store buffers as VideoNativeMetadata.
-        createVideoBufferMemoryHeap(sizeof(VideoNativeHandleMetadata), kDefaultVideoBufferCount);
+    // Initialize buffer queue.
+    err = initBufferQueue(mVideoSize.width, mVideoSize.height, mEncoderFormat,
+            (android_dataspace_t)mEncoderDataSpace,
+            mNumInputBuffers > 0 ? mNumInputBuffers : 1);
+    if (err != OK) {
+        ALOGE("%s: Failed to initialize buffer queue: %s (err=%d)", __FUNCTION__,
+                strerror(-err), err);
+        return err;
     }
 
+    // Start data flow
     err = OK;
     if (mCameraFlags & FLAGS_HOT_CAMERA) {
         mCamera->unlock();
         mCamera.clear();
-        if ((err = mCameraRecordingProxy->startRecording(
-                new ProxyListener(this))) != OK) {
+        if ((err = mCameraRecordingProxy->startRecording()) != OK) {
             ALOGE("Failed to start recording, received error: %s (%d)",
                     strerror(-err), err);
         }
     } else {
-        mCamera->setListener(new CameraSourceListener(this));
         mCamera->startRecording();
         if (!mCamera->recordingEnabled()) {
             err = -EINVAL;
@@ -836,7 +697,6 @@
         }
     } else {
         if (mCamera != 0) {
-            mCamera->setListener(NULL);
             mCamera->stopRecording();
         }
     }
@@ -935,97 +795,31 @@
 void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
     ALOGV("releaseRecordingFrame");
 
-    if (mVideoBufferMode == hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
-        // Return the buffer to buffer queue in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode.
-        ssize_t offset;
-        size_t size;
-        sp<IMemoryHeap> heap = frame->getMemory(&offset, &size);
-        if (heap->getHeapID() != mMemoryHeapBase->getHeapID()) {
-            ALOGE("%s: Mismatched heap ID, ignoring release (got %x, expected %x)", __FUNCTION__,
-                    heap->getHeapID(), mMemoryHeapBase->getHeapID());
-            return;
-        }
-
-        VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
-                (uint8_t*)heap->getBase() + offset);
-
-        // Find the corresponding buffer item for the native window buffer.
-        ssize_t index = mReceivedBufferItemMap.indexOfKey(payload->pBuffer);
-        if (index == NAME_NOT_FOUND) {
-            ALOGE("%s: Couldn't find buffer item for %p", __FUNCTION__, payload->pBuffer);
-            return;
-        }
-
-        BufferItem buffer = mReceivedBufferItemMap.valueAt(index);
-        mReceivedBufferItemMap.removeItemsAt(index);
-        mVideoBufferConsumer->releaseBuffer(buffer);
-        mMemoryBases.push_back(frame);
-        mMemoryBaseAvailableCond.signal();
-    } else {
-        native_handle_t* handle = nullptr;
-
-        // Check if frame contains a VideoNativeHandleMetadata.
-        if (frame->size() == sizeof(VideoNativeHandleMetadata)) {
-          // TODO: Using unsecurePointer() has some associated security pitfalls
-          //       (see declaration for details).
-          //       Either document why it is safe in this case or address the
-          //       issue (e.g. by copying).
-           VideoNativeHandleMetadata *metadata =
-                (VideoNativeHandleMetadata*)(frame->unsecurePointer());
-            if (metadata->eType == kMetadataBufferTypeNativeHandleSource) {
-                handle = metadata->pHandle;
-            }
-        }
-
-        if (handle != nullptr) {
-            ssize_t offset;
-            size_t size;
-            sp<IMemoryHeap> heap = frame->getMemory(&offset, &size);
-            if (heap->getHeapID() != mMemoryHeapBase->getHeapID()) {
-                ALOGE("%s: Mismatched heap ID, ignoring release (got %x, expected %x)",
-		     __FUNCTION__, heap->getHeapID(), mMemoryHeapBase->getHeapID());
-                return;
-            }
-            uint32_t batchSize = 0;
-            {
-                Mutex::Autolock autoLock(mBatchLock);
-                if (mInflightBatchSizes.size() > 0) {
-                    batchSize = mInflightBatchSizes[0];
-                }
-            }
-            if (batchSize == 0) { // return buffers one by one
-                // Frame contains a VideoNativeHandleMetadata. Send the handle back to camera.
-                releaseRecordingFrameHandle(handle);
-                mMemoryBases.push_back(frame);
-                mMemoryBaseAvailableCond.signal();
-            } else { // Group buffers in batch then return
-                Mutex::Autolock autoLock(mBatchLock);
-                mInflightReturnedHandles.push_back(handle);
-                mInflightReturnedMemorys.push_back(frame);
-                if (mInflightReturnedHandles.size() == batchSize) {
-                    releaseRecordingFrameHandleBatch(mInflightReturnedHandles);
-
-                    mInflightBatchSizes.pop_front();
-                    mInflightReturnedHandles.clear();
-                    for (const auto& mem : mInflightReturnedMemorys) {
-                        mMemoryBases.push_back(mem);
-                        mMemoryBaseAvailableCond.signal();
-                    }
-                    mInflightReturnedMemorys.clear();
-                }
-            }
-
-        } else if (mCameraRecordingProxy != nullptr) {
-            // mCamera is created by application. Return the frame back to camera via camera
-            // recording proxy.
-            mCameraRecordingProxy->releaseRecordingFrame(frame);
-        } else if (mCamera != nullptr) {
-            // mCamera is created by CameraSource. Return the frame directly back to camera.
-            int64_t token = IPCThreadState::self()->clearCallingIdentity();
-            mCamera->releaseRecordingFrame(frame);
-            IPCThreadState::self()->restoreCallingIdentity(token);
-        }
+    // Return the buffer to buffer queue in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode.
+    ssize_t offset;
+    size_t size;
+    sp<IMemoryHeap> heap = frame->getMemory(&offset, &size);
+    if (heap->getHeapID() != mMemoryHeapBase->getHeapID()) {
+        ALOGE("%s: Mismatched heap ID, ignoring release (got %x, expected %x)", __FUNCTION__,
+                heap->getHeapID(), mMemoryHeapBase->getHeapID());
+        return;
     }
+
+    VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
+        (uint8_t*)heap->getBase() + offset);
+
+    // Find the corresponding buffer item for the native window buffer.
+    ssize_t index = mReceivedBufferItemMap.indexOfKey(payload->pBuffer);
+    if (index == NAME_NOT_FOUND) {
+        ALOGE("%s: Couldn't find buffer item for %p", __FUNCTION__, payload->pBuffer);
+        return;
+    }
+
+    BufferItem buffer = mReceivedBufferItemMap.valueAt(index);
+    mReceivedBufferItemMap.removeItemsAt(index);
+    mVideoBufferConsumer->releaseBuffer(buffer);
+    mMemoryBases.push_back(frame);
+    mMemoryBaseAvailableCond.signal();
 }
 
 void CameraSource::releaseQueuedFrames() {
@@ -1181,152 +975,6 @@
     return false;
 }
 
-void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
-        int32_t msgType __unused, const sp<IMemory> &data) {
-    ALOGV("dataCallbackTimestamp: timestamp %lld us", (long long)timestampUs);
-    Mutex::Autolock autoLock(mLock);
-
-    if (shouldSkipFrameLocked(timestampUs)) {
-        releaseOneRecordingFrame(data);
-        return;
-    }
-
-    ++mNumFramesReceived;
-
-    CHECK(data != NULL && data->size() > 0);
-    mFramesReceived.push_back(data);
-    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
-    mFrameTimes.push_back(timeUs);
-    ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64,
-        mStartTimeUs, timeUs);
-    mFrameAvailableCondition.signal();
-}
-
-void CameraSource::releaseRecordingFrameHandle(native_handle_t* handle) {
-    if (mCameraRecordingProxy != nullptr) {
-        mCameraRecordingProxy->releaseRecordingFrameHandle(handle);
-    } else if (mCamera != nullptr) {
-        int64_t token = IPCThreadState::self()->clearCallingIdentity();
-        mCamera->releaseRecordingFrameHandle(handle);
-        IPCThreadState::self()->restoreCallingIdentity(token);
-    } else {
-        native_handle_close(handle);
-        native_handle_delete(handle);
-    }
-}
-
-void CameraSource::releaseRecordingFrameHandleBatch(const std::vector<native_handle_t*>& handles) {
-    if (mCameraRecordingProxy != nullptr) {
-        mCameraRecordingProxy->releaseRecordingFrameHandleBatch(handles);
-    } else if (mCamera != nullptr) {
-        int64_t token = IPCThreadState::self()->clearCallingIdentity();
-        mCamera->releaseRecordingFrameHandleBatch(handles);
-        IPCThreadState::self()->restoreCallingIdentity(token);
-    } else {
-        for (auto& handle : handles) {
-            native_handle_close(handle);
-            native_handle_delete(handle);
-        }
-    }
-}
-
-void CameraSource::recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
-                native_handle_t* handle) {
-    ALOGV("%s: timestamp %lld us", __FUNCTION__, (long long)timestampUs);
-    Mutex::Autolock autoLock(mLock);
-    if (handle == nullptr) return;
-
-    if (shouldSkipFrameLocked(timestampUs)) {
-        releaseRecordingFrameHandle(handle);
-        return;
-    }
-
-    while (mMemoryBases.empty()) {
-        if (mMemoryBaseAvailableCond.waitRelative(mLock, kMemoryBaseAvailableTimeoutNs) ==
-                TIMED_OUT) {
-            ALOGW("Waiting on an available memory base timed out. Dropping a recording frame.");
-            releaseRecordingFrameHandle(handle);
-            return;
-        }
-    }
-
-    ++mNumFramesReceived;
-
-    sp<IMemory> data = *mMemoryBases.begin();
-    mMemoryBases.erase(mMemoryBases.begin());
-
-    // Wrap native handle in sp<IMemory> so it can be pushed to mFramesReceived.
-    VideoNativeHandleMetadata *metadata = (VideoNativeHandleMetadata*)(data->unsecurePointer());
-    metadata->eType = kMetadataBufferTypeNativeHandleSource;
-    metadata->pHandle = handle;
-
-    mFramesReceived.push_back(data);
-    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
-    mFrameTimes.push_back(timeUs);
-    ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64, mStartTimeUs, timeUs);
-    mFrameAvailableCondition.signal();
-}
-
-void CameraSource::recordingFrameHandleCallbackTimestampBatch(
-        const std::vector<int64_t>& timestampsUs,
-        const std::vector<native_handle_t*>& handles) {
-    size_t n = timestampsUs.size();
-    if (n != handles.size()) {
-        ALOGE("%s: timestampsUs(%zu) and handles(%zu) size mismatch!",
-                __FUNCTION__, timestampsUs.size(), handles.size());
-    }
-
-    Mutex::Autolock autoLock(mLock);
-    int batchSize = 0;
-    for (size_t i = 0; i < n; i++) {
-        int64_t timestampUs = timestampsUs[i];
-        native_handle_t* handle = handles[i];
-
-        ALOGV("%s: timestamp %lld us", __FUNCTION__, (long long)timestampUs);
-        if (handle == nullptr) continue;
-
-        if (shouldSkipFrameLocked(timestampUs)) {
-            releaseRecordingFrameHandle(handle);
-            continue;
-        }
-
-        while (mMemoryBases.empty()) {
-            if (mMemoryBaseAvailableCond.waitRelative(mLock, kMemoryBaseAvailableTimeoutNs) ==
-                    TIMED_OUT) {
-                ALOGW("Waiting on an available memory base timed out. Dropping a recording frame.");
-                releaseRecordingFrameHandle(handle);
-                continue;
-            }
-        }
-        ++batchSize;
-        ++mNumFramesReceived;
-        sp<IMemory> data = *mMemoryBases.begin();
-        mMemoryBases.erase(mMemoryBases.begin());
-
-        // Wrap native handle in sp<IMemory> so it can be pushed to mFramesReceived.
-        // TODO: Using unsecurePointer() has some associated security pitfalls
-        //       (see declaration for details).
-        //       Either document why it is safe in this case or address the
-        //       issue (e.g. by copying).
-        VideoNativeHandleMetadata *metadata = (VideoNativeHandleMetadata*)(data->unsecurePointer());
-        metadata->eType = kMetadataBufferTypeNativeHandleSource;
-        metadata->pHandle = handle;
-
-        mFramesReceived.push_back(data);
-        int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
-        mFrameTimes.push_back(timeUs);
-        ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64, mStartTimeUs, timeUs);
-
-    }
-    if (batchSize > 0) {
-        Mutex::Autolock autoLock(mBatchLock);
-        mInflightBatchSizes.push_back(batchSize);
-    }
-    for (int i = 0; i < batchSize; i++) {
-        mFrameAvailableCondition.signal();
-    }
-}
-
 CameraSource::BufferQueueListener::BufferQueueListener(const sp<BufferItemConsumer>& consumer,
         const sp<CameraSource>& cameraSource) {
     mConsumer = consumer;
@@ -1417,41 +1065,7 @@
 MetadataBufferType CameraSource::metaDataStoredInVideoBuffers() const {
     ALOGV("metaDataStoredInVideoBuffers");
 
-    // Output buffers will contain metadata if camera sends us buffer in metadata mode or via
-    // buffer queue.
-    switch (mVideoBufferMode) {
-        case hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA:
-            return kMetadataBufferTypeNativeHandleSource;
-        case hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE:
-            return kMetadataBufferTypeANWBuffer;
-        default:
-            return kMetadataBufferTypeInvalid;
-    }
-}
-
-CameraSource::ProxyListener::ProxyListener(const sp<CameraSource>& source) {
-    mSource = source;
-}
-
-void CameraSource::ProxyListener::dataCallbackTimestamp(
-        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
-    mSource->dataCallbackTimestamp(timestamp / 1000, msgType, dataPtr);
-}
-
-void CameraSource::ProxyListener::recordingFrameHandleCallbackTimestamp(nsecs_t timestamp,
-        native_handle_t* handle) {
-    mSource->recordingFrameHandleCallbackTimestamp(timestamp / 1000, handle);
-}
-
-void CameraSource::ProxyListener::recordingFrameHandleCallbackTimestampBatch(
-        const std::vector<int64_t>& timestampsUs,
-        const std::vector<native_handle_t*>& handles) {
-    int n = timestampsUs.size();
-    std::vector<nsecs_t> modifiedTimestamps(n);
-    for (int i = 0; i < n; i++) {
-        modifiedTimestamps[i] = timestampsUs[i] / 1000;
-    }
-    mSource->recordingFrameHandleCallbackTimestampBatch(modifiedTimestamps, handles);
+    return kMetadataBufferTypeANWBuffer;
 }
 
 void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who __unused) {
diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp
index e0a6eb3..50a512f 100644
--- a/media/libstagefright/CameraSourceTimeLapse.cpp
+++ b/media/libstagefright/CameraSourceTimeLapse.cpp
@@ -45,15 +45,13 @@
         Size videoSize,
         int32_t videoFrameRate,
         const sp<IGraphicBufferProducer>& surface,
-        int64_t timeBetweenFrameCaptureUs,
-        bool storeMetaDataInVideoBuffers) {
+        int64_t timeBetweenFrameCaptureUs) {
 
     CameraSourceTimeLapse *source = new
             CameraSourceTimeLapse(camera, proxy, cameraId,
                 clientName, clientUid, clientPid,
                 videoSize, videoFrameRate, surface,
-                timeBetweenFrameCaptureUs,
-                storeMetaDataInVideoBuffers);
+                timeBetweenFrameCaptureUs);
 
     if (source != NULL) {
         if (source->initCheck() != OK) {
@@ -74,11 +72,9 @@
         Size videoSize,
         int32_t videoFrameRate,
         const sp<IGraphicBufferProducer>& surface,
-        int64_t timeBetweenFrameCaptureUs,
-        bool storeMetaDataInVideoBuffers)
+        int64_t timeBetweenFrameCaptureUs)
       : CameraSource(camera, proxy, cameraId, clientName, clientUid, clientPid,
-                videoSize, videoFrameRate, surface,
-                storeMetaDataInVideoBuffers),
+                videoSize, videoFrameRate, surface),
       mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
       mLastTimeLapseFrameRealTimestampUs(0),
       mSkipCurrentFrame(false) {
@@ -173,12 +169,6 @@
     ALOGV("signalBufferReturned");
     Mutex::Autolock autoLock(mQuickStopLock);
     if (mQuickStop && (buffer == mLastReadBufferCopy)) {
-        if (metaDataStoredInVideoBuffers() == kMetadataBufferTypeNativeHandleSource) {
-            native_handle_t* handle = (
-                (VideoNativeHandleMetadata*)(mLastReadBufferCopy->data()))->pHandle;
-            native_handle_close(handle);
-            native_handle_delete(handle);
-        }
         buffer->setObserver(NULL);
         buffer->release();
         mLastReadBufferCopy = NULL;
@@ -191,8 +181,7 @@
 void createMediaBufferCopy(
         const MediaBufferBase& sourceBuffer,
         int64_t frameTime,
-        MediaBufferBase **newBuffer,
-        int32_t videoBufferMode) {
+        MediaBufferBase **newBuffer) {
 
     ALOGV("createMediaBufferCopy");
     size_t sourceSize = sourceBuffer.size();
@@ -203,19 +192,13 @@
 
     (*newBuffer)->meta_data().setInt64(kKeyTime, frameTime);
 
-    if (videoBufferMode == kMetadataBufferTypeNativeHandleSource) {
-        ((VideoNativeHandleMetadata*)((*newBuffer)->data()))->pHandle =
-            native_handle_clone(
-                ((VideoNativeHandleMetadata*)(sourceBuffer.data()))->pHandle);
-    }
 }
 
 void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBufferBase& sourceBuffer) {
     ALOGV("fillLastReadBufferCopy");
     int64_t frameTime;
     CHECK(sourceBuffer.meta_data().findInt64(kKeyTime, &frameTime));
-    createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy,
-        metaDataStoredInVideoBuffers());
+    createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy);
     mLastReadBufferCopy->add_ref();
     mLastReadBufferCopy->setObserver(this);
 }
@@ -240,19 +223,6 @@
     }
 }
 
-sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(
-        const sp<IMemory> &source_data) {
-
-    ALOGV("createIMemoryCopy");
-    size_t source_size = source_data->size();
-    void* source_pointer = source_data->unsecurePointer();
-
-    sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size);
-    sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size);
-    memcpy(newMemory->unsecurePointer(), source_pointer, source_size);
-    return newMemory;
-}
-
 bool CameraSourceTimeLapse::skipCurrentFrame(int64_t /* timestampUs */) {
     ALOGV("skipCurrentFrame");
     if (mSkipCurrentFrame) {
@@ -318,31 +288,6 @@
     return false;
 }
 
-void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
-            const sp<IMemory> &data) {
-    ALOGV("dataCallbackTimestamp");
-    mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
-    CameraSource::dataCallbackTimestamp(timestampUs, msgType, data);
-}
-
-void CameraSourceTimeLapse::recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
-            native_handle_t* handle) {
-    ALOGV("recordingFrameHandleCallbackTimestamp");
-    mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
-    CameraSource::recordingFrameHandleCallbackTimestamp(timestampUs, handle);
-}
-
-void CameraSourceTimeLapse::recordingFrameHandleCallbackTimestampBatch(
-        const std::vector<int64_t>& timestampsUs,
-        const std::vector<native_handle_t*>& handles) {
-    ALOGV("recordingFrameHandleCallbackTimestampBatch");
-    int n = timestampsUs.size();
-    for (int i = 0; i < n; i++) {
-        // Don't do batching for CameraSourceTimeLapse for now
-        recordingFrameHandleCallbackTimestamp(timestampsUs[i], handles[i]);
-    }
-}
-
 void CameraSourceTimeLapse::processBufferQueueFrame(BufferItem& buffer) {
     ALOGV("processBufferQueueFrame");
     int64_t timestampUs = buffer.mTimestamp / 1000;
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 734f5bb..965b6dd 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -19,6 +19,7 @@
 
 #include "include/FrameDecoder.h"
 #include "include/FrameCaptureLayer.h"
+#include "include/HevcUtils.h"
 #include <binder/MemoryBase.h>
 #include <binder/MemoryHeapBase.h>
 #include <gui/Surface.h>
@@ -456,7 +457,8 @@
         const sp<IMediaSource> &source)
     : FrameDecoder(componentName, trackMeta, source),
       mFrame(NULL),
-      mIsAvcOrHevc(false),
+      mIsAvc(false),
+      mIsHevc(false),
       mSeekMode(MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC),
       mTargetTimeUs(-1LL),
       mDefaultSampleDurationUs(0) {
@@ -479,8 +481,8 @@
         return NULL;
     }
 
-    mIsAvcOrHevc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)
-            || !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC);
+    mIsAvc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC);
+    mIsHevc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC);
 
     if (frameTimeUs < 0) {
         int64_t thumbNailTime = -1ll;
@@ -543,8 +545,10 @@
         ALOGV("Seeking closest: targetTimeUs=%lld", (long long)mTargetTimeUs);
     }
 
-    if (mIsAvcOrHevc && !isSeekingClosest
-            && IsIDR(codecBuffer->data(), codecBuffer->size())) {
+    if (!isSeekingClosest
+            && ((mIsAvc && IsIDR(codecBuffer->data(), codecBuffer->size()))
+            || (mIsHevc && IsIDR(
+            codecBuffer->data(), codecBuffer->size())))) {
         // Only need to decode one IDR frame, unless we're seeking with CLOSEST
         // option, in which case we need to actually decode to targetTimeUs.
         *flags |= MediaCodec::BUFFER_FLAG_EOS;
diff --git a/media/libstagefright/HevcUtils.cpp b/media/libstagefright/HevcUtils.cpp
index 0e4eae2..aac656a 100644
--- a/media/libstagefright/HevcUtils.cpp
+++ b/media/libstagefright/HevcUtils.cpp
@@ -34,7 +34,10 @@
 
 namespace android {
 
-static const uint8_t kHevcNalUnitTypes[5] = {
+static const uint8_t kHevcNalUnitTypes[8] = {
+    kHevcNalUnitTypeCodedSliceIdr,
+    kHevcNalUnitTypeCodedSliceIdrNoLP,
+    kHevcNalUnitTypeCodedSliceCra,
     kHevcNalUnitTypeVps,
     kHevcNalUnitTypeSps,
     kHevcNalUnitTypePps,
@@ -491,4 +494,26 @@
     return OK;
 }
 
+bool HevcParameterSets::IsHevcIDR(const uint8_t *data, size_t size) {
+    bool foundIDR = false;
+    const uint8_t *nalStart;
+    size_t nalSize;
+    while (!foundIDR && getNextNALUnit(&data, &size, &nalStart, &nalSize, true) == OK) {
+        if (nalSize == 0) {
+            ALOGE("Encountered zero-length HEVC NAL");
+            return false;
+        }
+
+        uint8_t nalType = (nalStart[0] & 0x7E) >> 1;
+        switch(nalType) {
+            case kHevcNalUnitTypeCodedSliceIdr:
+            case kHevcNalUnitTypeCodedSliceIdrNoLP:
+            case kHevcNalUnitTypeCodedSliceCra:
+                foundIDR = true;
+                break;
+        }
+    }
+
+    return foundIDR;
+}
 }  // namespace android
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 8f7d4bf..7c2a5ff 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -542,6 +542,7 @@
     mNumGrids = 0;
     mNextItemId = kItemIdBase;
     mHasRefs = false;
+    mResetStatus = OK;
     mPreAllocFirstTime = true;
     mPrevAllTracksTotalMetaDataSizeEstimate = 0;
 
@@ -1027,6 +1028,11 @@
     return OK;
 }
 
+status_t MPEG4Writer::stop() {
+    // If reset was in progress, wait for it to complete.
+    return reset(true, true);
+}
+
 status_t MPEG4Writer::pause() {
     ALOGW("MPEG4Writer: pause is not supported");
     return ERROR_UNSUPPORTED;
@@ -1159,8 +1165,12 @@
     return err;
 }
 
-void MPEG4Writer::finishCurrentSession() {
-    reset(false /* stopSource */);
+status_t MPEG4Writer::finishCurrentSession() {
+    ALOGV("finishCurrentSession");
+    /* Don't wait if reset is in progress already, that avoids deadlock
+     * as finishCurrentSession() is called from control looper thread.
+     */
+    return reset(false, false);
 }
 
 status_t MPEG4Writer::switchFd() {
@@ -1182,11 +1192,32 @@
     return err;
 }
 
-status_t MPEG4Writer::reset(bool stopSource) {
+status_t MPEG4Writer::reset(bool stopSource, bool waitForAnyPreviousCallToComplete) {
     ALOGD("reset()");
-    std::lock_guard<std::mutex> l(mResetMutex);
+    std::unique_lock<std::mutex> lk(mResetMutex, std::defer_lock);
+    if (waitForAnyPreviousCallToComplete) {
+        /* stop=>reset from client needs the return value of reset call, hence wait here
+         * if a reset was in process already.
+         */
+        lk.lock();
+    } else if (!lk.try_lock()) {
+        /* Internal reset from control looper thread shouldn't wait for any reset in
+         * process already.
+         */
+        return INVALID_OPERATION;
+    }
+
+    if (mResetStatus != OK) {
+        /* Don't have to proceed if reset has finished with an error before.
+         * If there was no error before, proceeding reset would be harmless, as the
+         * the call would return from the mInitCheck condition below.
+         */
+        return mResetStatus;
+    }
+
     if (mInitCheck != OK) {
-        return OK;
+        mResetStatus = OK;
+        return mResetStatus;
     } else {
         if (!mWriterThreadStarted ||
             !mStarted) {
@@ -1198,7 +1229,8 @@
             if (writerErr != OK) {
                 retErr = writerErr;
             }
-            return retErr;
+            mResetStatus = retErr;
+            return mResetStatus;
         }
     }
 
@@ -1245,7 +1277,8 @@
     if (err != OK && err != ERROR_MALFORMED) {
         // Ignoring release() return value as there was an "err" already.
         release();
-        return err;
+        mResetStatus = err;
+        return mResetStatus;
     }
 
     // Fix up the size of the 'mdat' chunk.
@@ -1303,7 +1336,8 @@
     if (err == OK) {
         err = errRelease;
     }
-    return err;
+    mResetStatus = err;
+    return mResetStatus;
 }
 
 /*
@@ -2454,31 +2488,27 @@
             int fd = mNextFd;
             mNextFd = -1;
             mLock.unlock();
-            finishCurrentSession();
-            initInternal(fd, false /*isFirstSession*/);
-            start(mStartMeta.get());
-            mSwitchPending = false;
-            notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_NEXT_OUTPUT_FILE_STARTED, 0);
+            if (finishCurrentSession() == OK) {
+                initInternal(fd, false /*isFirstSession*/);
+                status_t status = start(mStartMeta.get());
+                mSwitchPending = false;
+                if (status == OK)  {
+                    notify(MEDIA_RECORDER_EVENT_INFO,
+                           MEDIA_RECORDER_INFO_NEXT_OUTPUT_FILE_STARTED, 0);
+                }
+            }
             break;
         }
-        // ::write() or lseek64() wasn't a success, file could be malformed
+        /* ::write() or lseek64() wasn't a success, file could be malformed.
+         * Or fallocate() failed. reset() and notify client on both the cases.
+         */
+        case kWhatFallocateError: // fallthrough
         case kWhatIOError: {
-            ALOGE("kWhatIOError");
             int32_t err;
             CHECK(msg->findInt32("err", &err));
-            // Stop tracks' threads and main writer thread.
-            stop();
-            notify(MEDIA_RECORDER_EVENT_ERROR, MEDIA_RECORDER_ERROR_UNKNOWN, err);
-            break;
-        }
-        // fallocate() failed, hence stop() and notify app.
-        case kWhatFallocateError: {
-            ALOGE("kWhatFallocateError");
-            int32_t err;
-            CHECK(msg->findInt32("err", &err));
-            // Stop tracks' threads and main writer thread.
-            stop();
-            //TODO: introduce a suitable MEDIA_RECORDER_ERROR_* instead MEDIA_RECORDER_ERROR_UNKNOWN?
+            // If reset already in process, don't wait for it complete to avoid deadlock.
+            reset(true, false);
+            //TODO: new MEDIA_RECORDER_ERROR_**** instead MEDIA_RECORDER_ERROR_UNKNOWN ?
             notify(MEDIA_RECORDER_EVENT_ERROR, MEDIA_RECORDER_ERROR_UNKNOWN, err);
             break;
         }
@@ -2486,7 +2516,7 @@
          * Responding with other options could be added later if required.
          */
         case kWhatNoIOErrorSoFar: {
-            ALOGD("kWhatNoIOErrorSoFar");
+            ALOGV("kWhatNoIOErrorSoFar");
             sp<AMessage> response = new AMessage;
             response->setInt32("err", OK);
             sp<AReplyToken> replyID;
@@ -4715,10 +4745,18 @@
 
 // This is useful if the pixel is not square
 void MPEG4Writer::Track::writePaspBox() {
-    mOwner->beginBox("pasp");
-    mOwner->writeInt32(1 << 16);  // hspacing
-    mOwner->writeInt32(1 << 16);  // vspacing
-    mOwner->endBox();  // pasp
+    // Do not write 'pasp' box unless the track format specifies it.
+    // According to ISO/IEC 14496-12 (ISO base media file format), 'pasp' box
+    // is optional. If present, it overrides the SAR from the video CSD. Only
+    // set it if the track format specifically requests that.
+    int32_t hSpacing, vSpacing;
+    if (mMeta->findInt32(kKeySARWidth, &hSpacing) && (hSpacing > 0)
+            && mMeta->findInt32(kKeySARHeight, &vSpacing) && (vSpacing > 0)) {
+        mOwner->beginBox("pasp");
+        mOwner->writeInt32(hSpacing);  // hspacing
+        mOwner->writeInt32(vSpacing);  // vspacing
+        mOwner->endBox();  // pasp
+    }
 }
 
 int64_t MPEG4Writer::Track::getStartTimeOffsetTimeUs() const {
diff --git a/media/libstagefright/MediaAdapter.cpp b/media/libstagefright/MediaAdapter.cpp
index f1b6e8c..5a2a910 100644
--- a/media/libstagefright/MediaAdapter.cpp
+++ b/media/libstagefright/MediaAdapter.cpp
@@ -114,6 +114,13 @@
         return -EINVAL;
     }
 
+    /* As mAdapterLock is unlocked while waiting for signalBufferReturned,
+     * a new buffer for the same track could be pushed from another thread
+     * in the client process, mBufferGatingMutex will help to hold that
+     * until the previous buffer is processed.
+     */
+    std::unique_lock<std::mutex> lk(mBufferGatingMutex);
+
     Mutex::Autolock autoLock(mAdapterLock);
     if (!mStarted) {
         ALOGE("pushBuffer called before start");
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 5d17f97..e975ee6 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -59,6 +59,7 @@
 #include <media/stagefright/MediaCodec.h>
 #include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaCodecList.h>
+#include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/MediaFilter.h>
@@ -2315,7 +2316,7 @@
                     if (mSurface != nullptr && !mAllowFrameDroppingBySurface) {
                         // signal frame dropping mode in the input format as this may also be
                         // meaningful and confusing for an encoder in a transcoder scenario
-                        mInputFormat->setInt32("allow-frame-drop", mAllowFrameDroppingBySurface);
+                        mInputFormat->setInt32(KEY_ALLOW_FRAME_DROP, mAllowFrameDroppingBySurface);
                     }
                     sp<AMessage> interestingFormat =
                             (mFlags & kFlagIsEncoder) ? mOutputFormat : mInputFormat;
@@ -2819,7 +2820,7 @@
             }
 
             if (obj != NULL) {
-                if (!format->findInt32("allow-frame-drop", &mAllowFrameDroppingBySurface)) {
+                if (!format->findInt32(KEY_ALLOW_FRAME_DROP, &mAllowFrameDroppingBySurface)) {
                     // allow frame dropping by surface by default
                     mAllowFrameDroppingBySurface = true;
                 }
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
index 1395c27..bc656a2 100644
--- a/media/libstagefright/MediaCodecSource.cpp
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -435,6 +435,30 @@
     buffer->release();
 }
 
+status_t MediaCodecSource::setEncodingBitrate(int32_t bitRate) {
+    ALOGV("setEncodingBitrate (%d)", bitRate);
+
+    if (mEncoder == NULL) {
+        ALOGW("setEncodingBitrate (%d) : mEncoder is null", bitRate);
+        return BAD_VALUE;
+    }
+
+    sp<AMessage> params = new AMessage;
+    params->setInt32("video-bitrate", bitRate);
+
+    return mEncoder->setParameters(params);
+}
+
+status_t MediaCodecSource::requestIDRFrame() {
+    if (mEncoder == NULL) {
+        ALOGW("requestIDRFrame : mEncoder is null");
+        return BAD_VALUE;
+    } else {
+        mEncoder->requestIDRFrame();
+        return OK;
+    }
+}
+
 MediaCodecSource::MediaCodecSource(
         const sp<ALooper> &looper,
         const sp<AMessage> &outputFormat,
diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp
index cab4ebd..8bbffd4 100644
--- a/media/libstagefright/MediaMuxer.cpp
+++ b/media/libstagefright/MediaMuxer.cpp
@@ -175,16 +175,23 @@
 
 status_t MediaMuxer::writeSampleData(const sp<ABuffer> &buffer, size_t trackIndex,
                                      int64_t timeUs, uint32_t flags) {
-    Mutex::Autolock autoLock(mMuxerLock);
-
     if (buffer.get() == NULL) {
         ALOGE("WriteSampleData() get an NULL buffer.");
         return -EINVAL;
     }
-
-    if (mState != STARTED) {
-        ALOGE("WriteSampleData() is called in invalid state %d", mState);
-        return INVALID_OPERATION;
+    {
+        /* As MediaMuxer's writeSampleData handles inputs from multiple tracks,
+         * limited the scope of mMuxerLock to this inner block so that the
+         * current track's buffer does not wait until the completion
+         * of processing of previous buffer of the same or another track.
+         * It's the responsibility of individual track - MediaAdapter object
+         * to gate its buffers.
+         */
+        Mutex::Autolock autoLock(mMuxerLock);
+        if (mState != STARTED) {
+            ALOGE("WriteSampleData() is called in invalid state %d", mState);
+            return INVALID_OPERATION;
+        }
     }
 
     if (trackIndex >= mTrackList.size()) {
diff --git a/media/libstagefright/SurfaceUtils.cpp b/media/libstagefright/SurfaceUtils.cpp
index 4c94baa..85ff474 100644
--- a/media/libstagefright/SurfaceUtils.cpp
+++ b/media/libstagefright/SurfaceUtils.cpp
@@ -165,6 +165,22 @@
     ALOGW_IF(err != 0, "failed to set cta861_3 metadata on surface (%d)", err);
 }
 
+status_t setNativeWindowRotation(
+        ANativeWindow *nativeWindow /* nonnull */, int rotation) {
+
+    int transform = 0;
+    if ((rotation % 90) == 0) {
+        switch ((rotation / 90) & 3) {
+            case 1:  transform = HAL_TRANSFORM_ROT_90;  break;
+            case 2:  transform = HAL_TRANSFORM_ROT_180; break;
+            case 3:  transform = HAL_TRANSFORM_ROT_270; break;
+            default: transform = 0;                     break;
+        }
+    }
+
+    return native_window_set_buffers_transform(nativeWindow, transform);
+}
+
 status_t pushBlankBuffersToNativeWindow(ANativeWindow *nativeWindow /* nonnull */) {
     status_t err = NO_ERROR;
     ANativeWindowBuffer* anb = NULL;
diff --git a/media/libstagefright/TEST_MAPPING b/media/libstagefright/TEST_MAPPING
index 8b36ea5..3dceef7 100644
--- a/media/libstagefright/TEST_MAPPING
+++ b/media/libstagefright/TEST_MAPPING
@@ -1,5 +1,12 @@
 {
   "presubmit": [
+    // TODO(b/148094059): unit tests not allowed to download content
+    // { "name": "HEVCUtilsUnitTest" },
+    //{ "name": "ExtractorFactoryTest" },
+    // writerTest fails about 5 out of 66
+    // in addition to needing the download ability
+    //{ "name": "writerTest" },
+
     {
       "name": "CtsMediaTestCases",
       "options": [
diff --git a/media/libstagefright/bqhelper/TEST_MAPPING b/media/libstagefright/bqhelper/TEST_MAPPING
new file mode 100644
index 0000000..c7f2fd8
--- /dev/null
+++ b/media/libstagefright/bqhelper/TEST_MAPPING
@@ -0,0 +1,6 @@
+// mappings for frameworks/av/media/libstagefright/bqhelper
+{
+  "presubmit": [
+    { "name": "FrameDropper_test"}
+  ]
+}
diff --git a/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/FrameDropper.h b/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/FrameDropper.h
index 4e83059..7e2909a 100644
--- a/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/FrameDropper.h
+++ b/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/FrameDropper.h
@@ -30,6 +30,8 @@
     FrameDropper();
 
     // maxFrameRate required to be positive.
+    // maxFrameRate negative causes shouldDrop() to always return false
+    // maxFrameRate == 0 is illegal
     status_t setMaxFrameRate(float maxFrameRate);
 
     // Returns false if max frame rate has not been set via setMaxFrameRate.
diff --git a/media/libstagefright/bqhelper/tests/Android.bp b/media/libstagefright/bqhelper/tests/Android.bp
index 2fbc3d6..3897689 100644
--- a/media/libstagefright/bqhelper/tests/Android.bp
+++ b/media/libstagefright/bqhelper/tests/Android.bp
@@ -1,5 +1,6 @@
 cc_test {
     name: "FrameDropper_test",
+    test_suites: ["device-tests"],
 
     srcs: ["FrameDropper_test.cpp"],
 
diff --git a/media/libstagefright/bqhelper/tests/FrameDropper_test.cpp b/media/libstagefright/bqhelper/tests/FrameDropper_test.cpp
index 55ae77c..b18067f 100644
--- a/media/libstagefright/bqhelper/tests/FrameDropper_test.cpp
+++ b/media/libstagefright/bqhelper/tests/FrameDropper_test.cpp
@@ -110,7 +110,7 @@
 };
 
 TEST_F(FrameDropperTest, TestInvalidMaxFrameRate) {
-    EXPECT_NE(OK, mFrameDropper->setMaxFrameRate(-1.0));
+    EXPECT_EQ(OK, mFrameDropper->setMaxFrameRate(-1.0));
     EXPECT_NE(OK, mFrameDropper->setMaxFrameRate(0));
 }
 
diff --git a/media/libstagefright/codecs/amrnb/TEST_MAPPING b/media/libstagefright/codecs/amrnb/TEST_MAPPING
new file mode 100644
index 0000000..2909099
--- /dev/null
+++ b/media/libstagefright/codecs/amrnb/TEST_MAPPING
@@ -0,0 +1,11 @@
+// mappings for frameworks/av/media/libstagefright/codecs/amrnb
+{
+  "presubmit": [
+    // TODO(b/148094059): unit tests not allowed to download content
+    // { "name": "AmrnbDecoderTest"},
+
+    // TODO(b/148094059): unit tests not allowed to download content
+    // { "name": "AmrnbEncoderTest"}
+
+  ]
+}
diff --git a/media/libstagefright/codecs/amrnb/dec/test/Android.bp b/media/libstagefright/codecs/amrnb/dec/test/Android.bp
index 7a95cfa..91c9f86 100644
--- a/media/libstagefright/codecs/amrnb/dec/test/Android.bp
+++ b/media/libstagefright/codecs/amrnb/dec/test/Android.bp
@@ -17,6 +17,7 @@
 cc_test {
     name: "AmrnbDecoderTest",
     gtest: true,
+    test_suites: ["device-tests"],
 
     srcs: [
         "AmrnbDecoderTest.cpp",
diff --git a/media/libstagefright/codecs/amrnb/enc/test/Android.bp b/media/libstagefright/codecs/amrnb/enc/test/Android.bp
index e8982fe..7e1b561 100644
--- a/media/libstagefright/codecs/amrnb/enc/test/Android.bp
+++ b/media/libstagefright/codecs/amrnb/enc/test/Android.bp
@@ -17,6 +17,7 @@
 cc_test {
     name: "AmrnbEncoderTest",
     gtest: true,
+    test_suites: ["device-tests"],
 
     srcs: [
         "AmrnbEncoderTest.cpp",
diff --git a/media/libstagefright/codecs/amrwb/TEST_MAPPING b/media/libstagefright/codecs/amrwb/TEST_MAPPING
new file mode 100644
index 0000000..3d58ba2
--- /dev/null
+++ b/media/libstagefright/codecs/amrwb/TEST_MAPPING
@@ -0,0 +1,8 @@
+// mappings for frameworks/av/media/libstagefright/codecs/amrwb
+{
+  "presubmit": [
+    // TODO(b/148094059): unit tests not allowed to download content
+    // { "name": "AmrwbDecoderTest"}
+
+  ]
+}
diff --git a/media/libstagefright/codecs/amrwbenc/TEST_MAPPING b/media/libstagefright/codecs/amrwbenc/TEST_MAPPING
new file mode 100644
index 0000000..d53d665
--- /dev/null
+++ b/media/libstagefright/codecs/amrwbenc/TEST_MAPPING
@@ -0,0 +1,8 @@
+// mappings for frameworks/av/media/libstagefright/codecs/amrwbenc
+{
+  "presubmit": [
+    // TODO(b/148094059): unit tests not allowed to download content
+    // { "name": "AmrwbEncoderTest"}
+
+  ]
+}
diff --git a/media/libstagefright/codecs/m4v_h263/TEST_MAPPING b/media/libstagefright/codecs/m4v_h263/TEST_MAPPING
new file mode 100644
index 0000000..6b42847
--- /dev/null
+++ b/media/libstagefright/codecs/m4v_h263/TEST_MAPPING
@@ -0,0 +1,16 @@
+// mappings for frameworks/av/media/libstagefright/codecs/m4v_h263
+{
+  "presubmit": [
+    // TODO(b/148094059): unit tests not allowed to download content
+
+    // the decoder reports something bad about an unexpected newline in the *config file
+    // and the config file looks like the AndroidTest.xml file that we put in there.
+    // I don't get this from the Encoder -- and I don't see any substantive difference
+    // between decode and encode AndroidTest.xml files -- except that encode does NOT
+    // finish with a newline.
+    // strange.
+    // { "name": "Mpeg4H263DecoderTest"},
+    // { "name": "Mpeg4H263EncoderTest"}
+
+  ]
+}
diff --git a/media/libstagefright/codecs/m4v_h263/dec/test/Android.bp b/media/libstagefright/codecs/m4v_h263/dec/test/Android.bp
index 655491a..9c753e6 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/test/Android.bp
+++ b/media/libstagefright/codecs/m4v_h263/dec/test/Android.bp
@@ -17,6 +17,7 @@
 cc_test {
     name: "Mpeg4H263DecoderTest",
     gtest: true,
+    test_suites: ["device-tests"],
 
     srcs: [
         "Mpeg4H263DecoderTest.cpp",
diff --git a/media/libstagefright/codecs/m4v_h263/enc/test/Android.bp b/media/libstagefright/codecs/m4v_h263/enc/test/Android.bp
index b9a8117..d2982da 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/test/Android.bp
+++ b/media/libstagefright/codecs/m4v_h263/enc/test/Android.bp
@@ -17,6 +17,7 @@
 cc_test {
     name: "Mpeg4H263EncoderTest",
     gtest: true,
+    test_suites: ["device-tests"],
 
     srcs : [ "Mpeg4H263EncoderTest.cpp" ],
 
diff --git a/media/libstagefright/codecs/mp3dec/TEST_MAPPING b/media/libstagefright/codecs/mp3dec/TEST_MAPPING
new file mode 100644
index 0000000..b237d65
--- /dev/null
+++ b/media/libstagefright/codecs/mp3dec/TEST_MAPPING
@@ -0,0 +1,7 @@
+// mappings for frameworks/av/media/libstagefright/codecs/mp3dec
+{
+  "presubmit": [
+    // TODO(b/148094059): unit tests not allowed to download content
+    // { "name": "Mp3DecoderTest"}
+  ]
+}
diff --git a/media/libstagefright/codecs/mp3dec/test/Android.bp b/media/libstagefright/codecs/mp3dec/test/Android.bp
index 0ff8b12..6b92ae9 100644
--- a/media/libstagefright/codecs/mp3dec/test/Android.bp
+++ b/media/libstagefright/codecs/mp3dec/test/Android.bp
@@ -17,6 +17,7 @@
 cc_test {
     name: "Mp3DecoderTest",
     gtest: true,
+    test_suites: ["device-tests"],
 
     srcs: [
         "mp3reader.cpp",
diff --git a/media/libstagefright/foundation/TEST_MAPPING b/media/libstagefright/foundation/TEST_MAPPING
index 3301c4b..0d6a6da 100644
--- a/media/libstagefright/foundation/TEST_MAPPING
+++ b/media/libstagefright/foundation/TEST_MAPPING
@@ -1,5 +1,10 @@
+// mappings for frameworks/av/media/libstagefright/foundation
 {
   "presubmit": [
-    { "name": "sf_foundation_test" }
+    // TODO(b/148094059): unit tests not allowed to download content
+    //{ "name": "OpusHeaderTest" },
+
+    { "name": "sf_foundation_test" },
+    { "name": "MetaDataBaseUnitTest"}
   ]
 }
diff --git a/media/libstagefright/foundation/tests/Android.bp b/media/libstagefright/foundation/tests/Android.bp
index 45e81e8..9e67189 100644
--- a/media/libstagefright/foundation/tests/Android.bp
+++ b/media/libstagefright/foundation/tests/Android.bp
@@ -28,6 +28,7 @@
 
 cc_test {
     name: "MetaDataBaseUnitTest",
+    test_suites: ["device-tests"],
     gtest: true,
 
     srcs: [
diff --git a/media/libstagefright/id3/TEST_MAPPING b/media/libstagefright/id3/TEST_MAPPING
new file mode 100644
index 0000000..e4454c1
--- /dev/null
+++ b/media/libstagefright/id3/TEST_MAPPING
@@ -0,0 +1,20 @@
+// frameworks/av/media/libstagefright/id3
+{
+  "presubmit": [
+    // TODO(b/148094059): unit tests not allowed to download content
+    //{ "name": "ID3Test" },
+
+    // this doesn't seem to run any tests.
+    // but: cts-tradefed run -m CtsMediaTestCases -t android.media.cts.MediaMetadataRetrieverTest
+    // does run he 32 and 64 bit tests, but not the instant tests
+    // but all I know is that with 'atest', it's not running
+    {
+      "name": "CtsMediaTestCases",
+      "options": [
+          {
+            "include-filter": "android.media.cts.MediaMetadataRetrieverTest"
+          }
+      ]
+    }
+  ]
+}
diff --git a/media/libstagefright/include/FrameDecoder.h b/media/libstagefright/include/FrameDecoder.h
index 19ae0e3..bca7f01 100644
--- a/media/libstagefright/include/FrameDecoder.h
+++ b/media/libstagefright/include/FrameDecoder.h
@@ -135,7 +135,8 @@
 private:
     sp<FrameCaptureLayer> mCaptureLayer;
     VideoFrame *mFrame;
-    bool mIsAvcOrHevc;
+    bool mIsAvc;
+    bool mIsHevc;
     MediaSource::ReadOptions::SeekMode mSeekMode;
     int64_t mTargetTimeUs;
     List<int64_t> mSampleDurations;
diff --git a/media/libstagefright/include/HevcUtils.h b/media/libstagefright/include/HevcUtils.h
index 0f59631..d2a86eb 100644
--- a/media/libstagefright/include/HevcUtils.h
+++ b/media/libstagefright/include/HevcUtils.h
@@ -30,6 +30,10 @@
 namespace android {
 
 enum {
+    kHevcNalUnitTypeCodedSliceIdr = 19,
+    kHevcNalUnitTypeCodedSliceIdrNoLP = 20,
+    kHevcNalUnitTypeCodedSliceCra = 21,
+
     kHevcNalUnitTypeVps = 32,
     kHevcNalUnitTypeSps = 33,
     kHevcNalUnitTypePps = 34,
@@ -92,6 +96,7 @@
     status_t makeHvcc(uint8_t *hvcc, size_t *hvccSize, size_t nalSizeLength);
 
     Info getInfo() const { return mInfo; }
+    static bool IsHevcIDR(const uint8_t *data, size_t size);
 
 private:
     status_t parseVps(const uint8_t* data, size_t size);
diff --git a/media/libstagefright/include/media/stagefright/ACodec.h b/media/libstagefright/include/media/stagefright/ACodec.h
index 83e92b9..cc40f76 100644
--- a/media/libstagefright/include/media/stagefright/ACodec.h
+++ b/media/libstagefright/include/media/stagefright/ACodec.h
@@ -272,6 +272,7 @@
     bool mShutdownInProgress;
     bool mExplicitShutdown;
     bool mIsLegacyVP9Decoder;
+    bool mIsStreamCorruptFree;
 
     // If "mKeepComponentAllocated" we only transition back to Loaded state
     // and do not release the component instance.
diff --git a/media/libstagefright/include/media/stagefright/CameraSource.h b/media/libstagefright/include/media/stagefright/CameraSource.h
index 6f0d3b5..16e7d89 100644
--- a/media/libstagefright/include/media/stagefright/CameraSource.h
+++ b/media/libstagefright/include/media/stagefright/CameraSource.h
@@ -23,7 +23,6 @@
 #include <media/stagefright/MediaBuffer.h>
 #include <camera/android/hardware/ICamera.h>
 #include <camera/ICameraRecordingProxy.h>
-#include <camera/ICameraRecordingProxyListener.h>
 #include <camera/CameraParameters.h>
 #include <gui/BufferItemConsumer.h>
 #include <utils/List.h>
@@ -40,17 +39,6 @@
 class CameraSource : public MediaSource, public MediaBufferObserver {
 public:
     /**
-     * Factory method to create a new CameraSource using the current
-     * settings (such as video size, frame rate, color format, etc)
-     * from the default camera.
-     *
-     * @param clientName The package/process name of the client application.
-     *    This is used for permissions checking.
-     * @return NULL on error.
-     */
-    static CameraSource *Create(const String16 &clientName);
-
-    /**
      * Factory method to create a new CameraSource.
      *
      * @param camera the video input frame data source. If it is NULL,
@@ -89,8 +77,7 @@
                                           pid_t clientPid,
                                           Size videoSize,
                                           int32_t frameRate,
-                                          const sp<IGraphicBufferProducer>& surface,
-                                          bool storeMetaDataInVideoBuffers = true);
+                                          const sp<IGraphicBufferProducer>& surface);
 
     virtual ~CameraSource();
 
@@ -132,26 +119,6 @@
 protected:
 
     /**
-     * The class for listening to BnCameraRecordingProxyListener. This is used to receive video
-     * buffers in VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV and VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA
-     * mode. When a frame is available, CameraSource::dataCallbackTimestamp() will be called.
-     */
-    class ProxyListener: public BnCameraRecordingProxyListener {
-    public:
-        ProxyListener(const sp<CameraSource>& source);
-        virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
-                const sp<IMemory> &data);
-        virtual void recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
-                native_handle_t* handle);
-        virtual void recordingFrameHandleCallbackTimestampBatch(
-                const std::vector<int64_t>& timestampsUs,
-                const std::vector<native_handle_t*>& handles);
-
-    private:
-        sp<CameraSource> mSource;
-    };
-
-    /**
      * The class for listening to BufferQueue's onFrameAvailable. This is used to receive video
      * buffers in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode. When a frame is available,
      * CameraSource::processBufferQueueFrame() will be called.
@@ -213,32 +180,15 @@
     CameraSource(const sp<hardware::ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
                  int32_t cameraId, const String16& clientName, uid_t clientUid, pid_t clientPid,
                  Size videoSize, int32_t frameRate,
-                 const sp<IGraphicBufferProducer>& surface,
-                 bool storeMetaDataInVideoBuffers);
+                 const sp<IGraphicBufferProducer>& surface);
 
     virtual status_t startCameraRecording();
     virtual void releaseRecordingFrame(const sp<IMemory>& frame);
-    virtual void releaseRecordingFrameHandle(native_handle_t* handle);
-    // stagefright recorder not using this for now
-    virtual void releaseRecordingFrameHandleBatch(const std::vector<native_handle_t*>& handles);
 
     // Returns true if need to skip the current frame.
     // Called from dataCallbackTimestamp.
     virtual bool skipCurrentFrame(int64_t /*timestampUs*/) {return false;}
 
-    // Callback called when still camera raw data is available.
-    virtual void dataCallback(int32_t /*msgType*/, const sp<IMemory>& /*data*/) {}
-
-    virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
-            const sp<IMemory> &data);
-
-    virtual void recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
-            native_handle_t* handle);
-
-    virtual void recordingFrameHandleCallbackTimestampBatch(
-            const std::vector<int64_t>& timestampsUs,
-            const std::vector<native_handle_t*>& handles);
-
     // Process a buffer item received in BufferQueueListener.
     virtual void processBufferQueueFrame(BufferItem& buffer);
 
@@ -261,9 +211,6 @@
     int64_t mGlitchDurationThresholdUs;
     bool mCollectStats;
 
-    // The mode video buffers are received from camera. One of VIDEO_BUFFER_MODE_*.
-    int32_t mVideoBufferMode;
-
     static const uint32_t kDefaultVideoBufferCount = 32;
 
     /**
@@ -297,12 +244,12 @@
 
     status_t init(const sp<hardware::ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
                   int32_t cameraId, const String16& clientName, uid_t clientUid, pid_t clientPid,
-                  Size videoSize, int32_t frameRate, bool storeMetaDataInVideoBuffers);
+                  Size videoSize, int32_t frameRate);
 
     status_t initWithCameraAccess(
                   const sp<hardware::ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
                   int32_t cameraId, const String16& clientName, uid_t clientUid, pid_t clientPid,
-                  Size videoSize, int32_t frameRate, bool storeMetaDataInVideoBuffers);
+                  Size videoSize, int32_t frameRate);
 
     // Initialize the buffer queue used in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode.
     status_t initBufferQueue(uint32_t width, uint32_t height, uint32_t format,
diff --git a/media/libstagefright/include/media/stagefright/CameraSourceTimeLapse.h b/media/libstagefright/include/media/stagefright/CameraSourceTimeLapse.h
index 533e33b..3c311cf 100644
--- a/media/libstagefright/include/media/stagefright/CameraSourceTimeLapse.h
+++ b/media/libstagefright/include/media/stagefright/CameraSourceTimeLapse.h
@@ -45,8 +45,7 @@
         Size videoSize,
         int32_t videoFrameRate,
         const sp<IGraphicBufferProducer>& surface,
-        int64_t timeBetweenTimeLapseFrameCaptureUs,
-        bool storeMetaDataInVideoBuffers = true);
+        int64_t timeBetweenTimeLapseFrameCaptureUs);
 
     virtual ~CameraSourceTimeLapse();
 
@@ -122,8 +121,7 @@
         Size videoSize,
         int32_t videoFrameRate,
         const sp<IGraphicBufferProducer>& surface,
-        int64_t timeBetweenTimeLapseFrameCaptureUs,
-        bool storeMetaDataInVideoBuffers = true);
+        int64_t timeBetweenTimeLapseFrameCaptureUs);
 
     // Wrapper over CameraSource::signalBufferReturned() to implement quick stop.
     // It only handles the case when mLastReadBufferCopy is signalled. Otherwise
@@ -137,33 +135,6 @@
     // frame needs to be skipped and this function just returns the value of mSkipCurrentFrame.
     virtual bool skipCurrentFrame(int64_t timestampUs);
 
-    // In the video camera case calls skipFrameAndModifyTimeStamp() to modify
-    // timestamp and set mSkipCurrentFrame.
-    // Then it calls the base CameraSource::dataCallbackTimestamp()
-    // This will be called in VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV and
-    // VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA mode.
-    virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
-            const sp<IMemory> &data);
-
-    // In the video camera case calls skipFrameAndModifyTimeStamp() to modify
-    // timestamp and set mSkipCurrentFrame.
-    // Then it calls the base CameraSource::recordingFrameHandleCallbackTimestamp() or
-    // CameraSource::recordingFrameHandleCallbackTimestampBatch()
-    // This will be called in VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA mode when
-    // the metadata is VideoNativeHandleMetadata.
-    virtual void recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
-            native_handle_t* handle);
-
-    // In the video camera case calls skipFrameAndModifyTimeStamp() to modify
-    // timestamp and set mSkipCurrentFrame.
-    // Then it calls the base CameraSource::recordingFrameHandleCallbackTimestamp() or
-    // CameraSource::recordingFrameHandleCallbackTimestampBatch()
-    // This will be called in VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA mode when
-    // the metadata is VideoNativeHandleMetadata.
-    virtual void recordingFrameHandleCallbackTimestampBatch(
-            const std::vector<int64_t>& timestampsUs,
-            const std::vector<native_handle_t*>& handles);
-
     // Process a buffer item received in CameraSource::BufferQueueListener.
     // This will be called in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode.
     virtual void processBufferQueueFrame(BufferItem& buffer);
@@ -187,9 +158,6 @@
     // Wrapper to enter threadTimeLapseEntry()
     static void *ThreadTimeLapseWrapper(void *me);
 
-    // Creates a copy of source_data into a new memory of final type MemoryBase.
-    sp<IMemory> createIMemoryCopy(const sp<IMemory> &source_data);
-
     CameraSourceTimeLapse(const CameraSourceTimeLapse &);
     CameraSourceTimeLapse &operator=(const CameraSourceTimeLapse &);
 };
diff --git a/media/libstagefright/include/media/stagefright/MPEG4Writer.h b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
index 501cf2c..2582ed0 100644
--- a/media/libstagefright/include/media/stagefright/MPEG4Writer.h
+++ b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
@@ -46,7 +46,7 @@
 
     // Returns INVALID_OPERATION if there is no source or track.
     virtual status_t start(MetaData *param = NULL);
-    virtual status_t stop() { return reset(); }
+    virtual status_t stop();
     virtual status_t pause();
     virtual bool reachedEOS();
     virtual status_t dump(int fd, const Vector<String16>& args);
@@ -126,6 +126,7 @@
     bool mWriteSeekErr;
     bool mFallocateErr;
     bool mPreAllocationEnabled;
+    status_t mResetStatus;
     // Queue to hold top long write durations
     std::priority_queue<std::chrono::microseconds, std::vector<std::chrono::microseconds>,
                         std::greater<std::chrono::microseconds>> mWriteDurationPQ;
@@ -135,7 +136,9 @@
     sp<AHandlerReflector<MPEG4Writer> > mReflector;
 
     Mutex mLock;
+    // Serialize reset calls from client of MPEG4Writer and MP4WtrCtrlHlpLooper.
     std::mutex mResetMutex;
+    // Serialize preallocation calls from different track threads.
     std::mutex mFallocMutex;
     bool mPreAllocFirstTime; // Pre-allocate space for file and track headers only once per file.
     uint64_t mPrevAllTracksTotalMetaDataSizeEstimate;
@@ -304,7 +307,7 @@
     void writeGeoDataBox();
     void writeLatitude(int degreex10000);
     void writeLongitude(int degreex10000);
-    void finishCurrentSession();
+    status_t finishCurrentSession();
 
     void addDeviceMeta();
     void writeHdlr(const char *handlerType);
@@ -337,7 +340,7 @@
     void sendSessionSummary();
     status_t release();
     status_t switchFd();
-    status_t reset(bool stopSource = true);
+    status_t reset(bool stopSource = true, bool waitForAnyPreviousCallToComplete = true);
 
     static uint32_t getMpeg4Time();
 
diff --git a/media/libstagefright/include/media/stagefright/MediaAdapter.h b/media/libstagefright/include/media/stagefright/MediaAdapter.h
index 177a9e9..c7d7765 100644
--- a/media/libstagefright/include/media/stagefright/MediaAdapter.h
+++ b/media/libstagefright/include/media/stagefright/MediaAdapter.h
@@ -58,6 +58,7 @@
 
 private:
     Mutex mAdapterLock;
+    std::mutex mBufferGatingMutex;
     // Make sure the read() wait for the incoming buffer.
     Condition mBufferReadCond;
     // Make sure the pushBuffer() wait for the current buffer consumed.
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index f7e6c27..1f8e780 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -81,6 +81,13 @@
         BUFFER_FLAG_MUXER_DATA    = 16,
     };
 
+    enum CVODegree {
+        CVO_DEGREE_0   = 0,
+        CVO_DEGREE_90  = 90,
+        CVO_DEGREE_180 = 180,
+        CVO_DEGREE_270 = 270,
+    };
+
     enum {
         CB_INPUT_AVAILABLE = 1,
         CB_OUTPUT_AVAILABLE = 2,
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index 178d334..9c67338 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -744,6 +744,7 @@
 constexpr char KEY_AAC_MAX_OUTPUT_CHANNEL_COUNT[] = "aac-max-output-channel_count";
 constexpr char KEY_AAC_PROFILE[] = "aac-profile";
 constexpr char KEY_AAC_SBR_MODE[] = "aac-sbr-mode";
+constexpr char KEY_ALLOW_FRAME_DROP[] = "allow-frame-drop";
 constexpr char KEY_AUDIO_SESSION_ID[] = "audio-session-id";
 constexpr char KEY_BIT_RATE[] = "bitrate";
 constexpr char KEY_BITRATE_MODE[] = "bitrate-mode";
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecSource.h b/media/libstagefright/include/media/stagefright/MediaCodecSource.h
index 2f98af1..0f7b535 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecSource.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecSource.h
@@ -64,12 +64,15 @@
 
     // MediaBufferObserver
     virtual void signalBufferReturned(MediaBufferBase *buffer);
+    virtual status_t setEncodingBitrate(int32_t bitRate /* bps */);
 
     // for AHandlerReflector
     void onMessageReceived(const sp<AMessage> &msg);
 
 
 
+    status_t requestIDRFrame();
+
 protected:
     virtual ~MediaCodecSource();
 
diff --git a/media/libstagefright/include/media/stagefright/MediaWriter.h b/media/libstagefright/include/media/stagefright/MediaWriter.h
index 1f4fbcb..17b1abf 100644
--- a/media/libstagefright/include/media/stagefright/MediaWriter.h
+++ b/media/libstagefright/include/media/stagefright/MediaWriter.h
@@ -54,6 +54,10 @@
     virtual void setStartTimeOffsetMs(int /*ms*/) {}
     virtual int32_t getStartTimeOffsetMs() const { return 0; }
     virtual status_t setNextFd(int /*fd*/) { return INVALID_OPERATION; }
+    virtual void updateCVODegrees(int32_t /*cvoDegrees*/) {}
+    virtual void updatePayloadType(int32_t /*payloadType*/) {}
+    virtual void updateSocketNetwork(int64_t /*socketNetwork*/) {}
+    virtual uint32_t getSequenceNum() { return 0; }
 
 protected:
     virtual ~MediaWriter() {}
diff --git a/media/libstagefright/include/media/stagefright/MetaDataBase.h b/media/libstagefright/include/media/stagefright/MetaDataBase.h
index 64eb8b4..173b701 100644
--- a/media/libstagefright/include/media/stagefright/MetaDataBase.h
+++ b/media/libstagefright/include/media/stagefright/MetaDataBase.h
@@ -247,6 +247,15 @@
 
     // Treat empty track as malformed for MediaRecorder.
     kKeyEmptyTrackMalFormed = 'nemt', // bool (int32_t)
+
+    kKeySps              = 'sSps', // int32_t, indicates that a buffer is sps (value ignored).
+    kKeyPps              = 'sPps', // int32_t, indicates that a buffer is pps (value ignored).
+    kKeySelfID           = 'sfid', // int32_t, source ID to identify itself on RTP protocol.
+    kKeyPayloadType      = 'pTyp', // int32_t, SDP negotiated payload type.
+    kKeyRtpExtMap        = 'extm', // int32_t, rtp extension ID for cvo on RTP protocol.
+    kKeyRtpCvoDegrees    = 'cvod', // int32_t, rtp cvo degrees as per 3GPP 26.114.
+    kKeyRtpDscp          = 'dscp', // int32_t, DSCP(Differentiated services codepoint) of RFC 2474.
+    kKeySocketNetwork    = 'sNet', // int64_t, socket will be bound to network handle.
 };
 
 enum {
diff --git a/media/libstagefright/include/media/stagefright/RemoteDataSource.h b/media/libstagefright/include/media/stagefright/RemoteDataSource.h
index d82be8a..d605cda 100644
--- a/media/libstagefright/include/media/stagefright/RemoteDataSource.h
+++ b/media/libstagefright/include/media/stagefright/RemoteDataSource.h
@@ -41,6 +41,11 @@
         close();
     }
     virtual sp<IMemory> getIMemory() {
+        Mutex::Autolock lock(mLock);
+        if (mMemory.get() == nullptr) {
+            ALOGE("getIMemory() failed, mMemory is nullptr");
+            return nullptr;
+        }
         return mMemory;
     }
     virtual ssize_t readAt(off64_t offset, size_t size) {
@@ -48,19 +53,35 @@
         if (size > kBufferSize) {
             size = kBufferSize;
         }
+
+        Mutex::Autolock lock(mLock);
+        if (mSource.get() == nullptr) {
+            ALOGE("readAt() failed, mSource is nullptr");
+            return 0;
+        }
         return mSource->readAt(offset, mMemory->unsecurePointer(), size);
     }
     virtual status_t getSize(off64_t *size) {
+        Mutex::Autolock lock(mLock);
+        if (mSource.get() == nullptr) {
+            ALOGE("getSize() failed, mSource is nullptr");
+            return INVALID_OPERATION;
+        }
         return mSource->getSize(size);
     }
     virtual void close() {
         // Protect strong pointer assignments. This also can be called from the binder
         // clean-up procedure which is running on a separate thread.
-        Mutex::Autolock lock(mCloseLock);
+        Mutex::Autolock lock(mLock);
         mSource = nullptr;
         mMemory = nullptr;
     }
     virtual uint32_t getFlags() {
+        Mutex::Autolock lock(mLock);
+        if (mSource.get() == nullptr) {
+            ALOGE("getSize() failed, mSource is nullptr");
+            return 0;
+        }
         return mSource->flags();
     }
     virtual String8 toString()  {
@@ -75,9 +96,10 @@
     sp<IMemory> mMemory;
     sp<DataSource> mSource;
     String8 mName;
-    Mutex mCloseLock;
+    Mutex mLock;
 
     explicit RemoteDataSource(const sp<DataSource> &source) {
+        Mutex::Autolock lock(mLock);
         mSource = source;
         sp<MemoryDealer> memoryDealer = new MemoryDealer(kBufferSize, "RemoteDataSource");
         mMemory = memoryDealer->allocate(kBufferSize);
diff --git a/media/libstagefright/include/media/stagefright/SurfaceUtils.h b/media/libstagefright/include/media/stagefright/SurfaceUtils.h
index ae55c65..35b3fa2 100644
--- a/media/libstagefright/include/media/stagefright/SurfaceUtils.h
+++ b/media/libstagefright/include/media/stagefright/SurfaceUtils.h
@@ -38,6 +38,8 @@
         int width, int height, int format, int rotation, int usage, bool reconnect);
 void setNativeWindowHdrMetadata(
         ANativeWindow *nativeWindow /* nonnull */, HDRStaticInfo *info /* nonnull */);
+status_t setNativeWindowRotation(
+        ANativeWindow *nativeWindow /* nonnull */, int rotation);
 status_t pushBlankBuffersToNativeWindow(ANativeWindow *nativeWindow /* nonnull */);
 status_t nativeWindowConnect(ANativeWindow *surface, const char *reason);
 status_t nativeWindowDisconnect(ANativeWindow *surface, const char *reason);
diff --git a/media/libstagefright/mpeg2ts/TEST_MAPPING b/media/libstagefright/mpeg2ts/TEST_MAPPING
new file mode 100644
index 0000000..b25d732
--- /dev/null
+++ b/media/libstagefright/mpeg2ts/TEST_MAPPING
@@ -0,0 +1,7 @@
+// frameworks/av/media/libstagefright/mpeg2ts
+{
+  "presubmit": [
+    // TODO(b/148094059): unit tests not allowed to download content
+    //{ "name": "Mpeg2tsUnitTest" }
+  ]
+}
diff --git a/media/libstagefright/mpeg2ts/test/Android.bp b/media/libstagefright/mpeg2ts/test/Android.bp
index 4e4832a..d8b0304 100644
--- a/media/libstagefright/mpeg2ts/test/Android.bp
+++ b/media/libstagefright/mpeg2ts/test/Android.bp
@@ -17,6 +17,7 @@
 cc_test{
     name: "Mpeg2tsUnitTest",
     gtest: true,
+    test_suites: ["device-tests"],
 
     srcs: [
         "Mpeg2tsUnitTest.cpp"
diff --git a/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp b/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp
index 7d217eb..f7bf3ba 100644
--- a/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp
+++ b/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp
@@ -67,7 +67,7 @@
             int32_t dataSpace, int32_t aspects, int32_t pixelFormat) override {
         Message tMsg;
         tMsg.type = Message::Type::EVENT;
-        tMsg.fence = native_handle_create(0, 0);
+        tMsg.fence.setTo(native_handle_create(0, 0), /* shouldOwn = */ true);
         tMsg.data.eventData.event = uint32_t(OMX_EventDataSpaceChanged);
         tMsg.data.eventData.data1 = dataSpace;
         tMsg.data.eventData.data2 = aspects;
diff --git a/media/libstagefright/rtsp/AAVCAssembler.cpp b/media/libstagefright/rtsp/AAVCAssembler.cpp
index 4bc67e8..0164040 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AAVCAssembler.cpp
@@ -51,7 +51,66 @@
         return NOT_ENOUGH_DATA;
     }
 
+    sp<ABuffer> buffer = *queue->begin();
+    int32_t rtpTime;
+    CHECK(buffer->meta()->findInt32("rtp-time", &rtpTime));
+    int64_t startTime = source->mFirstSysTime / 1000;
+    int64_t nowTime = ALooper::GetNowUs() / 1000;
+    int64_t playedTime = nowTime - startTime;
+    int32_t playedTimeRtp = source->mFirstRtpTime +
+        (((uint32_t)playedTime) * (source->mClockRate / 1000));
+    const int32_t jitterTime = source->mClockRate / 5;  // 200ms
+    int32_t expiredTimeInJb = rtpTime + jitterTime;
+    bool isExpired = expiredTimeInJb <= (playedTimeRtp);
+    bool isTooLate200 = expiredTimeInJb < (playedTimeRtp - jitterTime);
+    bool isTooLate300 = expiredTimeInJb < (playedTimeRtp - (jitterTime * 3 / 2));
+
+    if (mShowQueue && mShowQueueCnt < 20) {
+        showCurrentQueue(queue);
+        ALOGD("start=%lld, now=%lld, played=%lld", (long long)startTime,
+                (long long)nowTime, (long long)playedTime);
+        ALOGD("rtp-time(JB)=%d, played-rtp-time(JB)=%d, expired-rtp-time(JB)=%d isExpired=%d",
+                rtpTime, playedTimeRtp, expiredTimeInJb, isExpired);
+        mShowQueueCnt++;
+    }
+
+    ALOGV("start=%lld, now=%lld, played=%lld", (long long)startTime,
+            (long long)nowTime, (long long)playedTime);
+    ALOGV("rtp-time(JB)=%d, played-rtp-time(JB)=%d, expired-rtp-time(JB)=%d isExpired=%d",
+            rtpTime, playedTimeRtp, expiredTimeInJb, isExpired);
+
+    if (!isExpired) {
+        ALOGV("buffering in jitter buffer.");
+        return NOT_ENOUGH_DATA;
+    }
+
+    if (isTooLate200)
+        ALOGW("=== WARNING === buffer arrived 200ms late. === WARNING === ");
+
+    if (isTooLate300) {
+        ALOGW("buffer arrived too late. 300ms..");
+        ALOGW("start=%lld, now=%lld, played=%lld", (long long)startTime,
+                (long long)nowTime, (long long)playedTime);
+        ALOGW("rtp-time(JB)=%d, plyed-rtp-time(JB)=%d, exp-rtp-time(JB)=%d diff=%lld isExpired=%d",
+                rtpTime, playedTimeRtp, expiredTimeInJb,
+                ((long long)playedTimeRtp) - expiredTimeInJb, isExpired);
+        ALOGW("expected Seq. NO =%d", buffer->int32Data());
+
+        List<sp<ABuffer> >::iterator it = queue->begin();
+        while (it != queue->end()) {
+            CHECK((*it)->meta()->findInt32("rtp-time", &rtpTime));
+            if (rtpTime + jitterTime >= playedTimeRtp) {
+                mNextExpectedSeqNo = (*it)->int32Data();
+                break;
+            }
+            it++;
+        }
+        source->noticeAbandonBuffer();
+    }
+
     if (mNextExpectedSeqNoValid) {
+        int32_t size = queue->size();
+        int32_t cnt = 0;
         List<sp<ABuffer> >::iterator it = queue->begin();
         while (it != queue->end()) {
             if ((uint32_t)(*it)->int32Data() >= mNextExpectedSeqNo) {
@@ -59,14 +118,19 @@
             }
 
             it = queue->erase(it);
+            cnt++;
         }
 
+        if (cnt > 0) {
+            source->noticeAbandonBuffer(cnt);
+            ALOGW("delete %d of %d buffers", cnt, size);
+        }
         if (queue->empty()) {
             return NOT_ENOUGH_DATA;
         }
     }
 
-    sp<ABuffer> buffer = *queue->begin();
+    buffer = *queue->begin();
 
     if (!mNextExpectedSeqNoValid) {
         mNextExpectedSeqNoValid = true;
@@ -290,6 +354,7 @@
     unit->data()[0] = (nri << 5) | nalType;
 
     size_t offset = 1;
+    int32_t cvo = -1;
     List<sp<ABuffer> >::iterator it = queue->begin();
     for (size_t i = 0; i < totalCount; ++i) {
         const sp<ABuffer> &buffer = *it;
@@ -300,6 +365,8 @@
 #endif
 
         memcpy(unit->data() + offset, buffer->data() + 2, buffer->size() - 2);
+
+        buffer->meta()->findInt32("cvo", &cvo);
         offset += buffer->size() - 2;
 
         it = queue->erase(it);
@@ -307,6 +374,10 @@
 
     unit->setRange(0, totalSize);
 
+    if (cvo >= 0) {
+        unit->meta()->setInt32("cvo", cvo);
+    }
+
     addSingleNALUnit(unit);
 
     ALOGV("successfully assembled a NAL unit from fragments.");
@@ -327,6 +398,7 @@
 
     sp<ABuffer> accessUnit = new ABuffer(totalSize);
     size_t offset = 0;
+    int32_t cvo = -1;
     for (List<sp<ABuffer> >::iterator it = mNALUnits.begin();
          it != mNALUnits.end(); ++it) {
         memcpy(accessUnit->data() + offset, "\x00\x00\x00\x01", 4);
@@ -335,6 +407,8 @@
         sp<ABuffer> nal = *it;
         memcpy(accessUnit->data() + offset, nal->data(), nal->size());
         offset += nal->size();
+
+        nal->meta()->findInt32("cvo", &cvo);
     }
 
     CopyTimes(accessUnit, *mNALUnits.begin());
@@ -343,6 +417,9 @@
     printf(mAccessUnitDamaged ? "X" : ".");
     fflush(stdout);
 #endif
+    if (cvo >= 0) {
+        accessUnit->meta()->setInt32("cvo", cvo);
+    }
 
     if (mAccessUnitDamaged) {
         accessUnit->meta()->setInt32("damaged", true);
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.cpp b/media/libstagefright/rtsp/AHEVCAssembler.cpp
new file mode 100644
index 0000000..93869fb
--- /dev/null
+++ b/media/libstagefright/rtsp/AHEVCAssembler.cpp
@@ -0,0 +1,428 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "AHEVCAssembler"
+#include <utils/Log.h>
+
+#include "AHEVCAssembler.h"
+
+#include "ARTPSource.h"
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/hexdump.h>
+
+#include <stdint.h>
+
+#define H265_NALU_MASK 0x3F
+#define H265_NALU_VPS 0x20
+#define H265_NALU_SPS 0x21
+#define H265_NALU_PPS 0x22
+#define H265_NALU_AP 0x30
+#define H265_NALU_FU 0x31
+#define H265_NALU_PACI 0x32
+
+
+namespace android {
+
+// static
+AHEVCAssembler::AHEVCAssembler(const sp<AMessage> &notify)
+    : mNotifyMsg(notify),
+      mAccessUnitRTPTime(0),
+      mNextExpectedSeqNoValid(false),
+      mNextExpectedSeqNo(0),
+      mAccessUnitDamaged(false) {
+
+      ALOGV("Constructor");
+}
+
+AHEVCAssembler::~AHEVCAssembler() {
+}
+
+ARTPAssembler::AssemblyStatus AHEVCAssembler::addNALUnit(
+        const sp<ARTPSource> &source) {
+    List<sp<ABuffer> > *queue = source->queue();
+
+    if (queue->empty()) {
+        return NOT_ENOUGH_DATA;
+    }
+
+    sp<ABuffer> buffer = *queue->begin();
+    int32_t rtpTime;
+    CHECK(buffer->meta()->findInt32("rtp-time", &rtpTime));
+    int64_t startTime = source->mFirstSysTime / 1000;
+    int64_t nowTime = ALooper::GetNowUs() / 1000;
+    int64_t playedTime = nowTime - startTime;
+    int32_t playedTimeRtp = source->mFirstRtpTime +
+        (((uint32_t)playedTime) * (source->mClockRate / 1000));
+    int32_t expiredTimeInJb = rtpTime + (source->mClockRate / 5);
+    bool isExpired = expiredTimeInJb <= (playedTimeRtp);
+    ALOGV("start=%lld, now=%lld, played=%lld", (long long)startTime,
+            (long long)nowTime, (long long)playedTime);
+    ALOGV("rtp-time(JB)=%d, played-rtp-time(JB)=%d, expired-rtp-time(JB)=%d isExpired=%d",
+            rtpTime, playedTimeRtp, expiredTimeInJb, isExpired);
+
+    if (!isExpired) {
+        ALOGV("buffering in jitter buffer.");
+        return NOT_ENOUGH_DATA;
+    }
+
+    if (mNextExpectedSeqNoValid) {
+        List<sp<ABuffer> >::iterator it = queue->begin();
+        while (it != queue->end()) {
+            if ((uint32_t)(*it)->int32Data() >= mNextExpectedSeqNo) {
+                break;
+            }
+
+            it = queue->erase(it);
+        }
+
+        if (queue->empty()) {
+            return NOT_ENOUGH_DATA;
+        }
+    }
+
+    buffer = *queue->begin();
+
+    if (!mNextExpectedSeqNoValid) {
+        mNextExpectedSeqNoValid = true;
+        mNextExpectedSeqNo = (uint32_t)buffer->int32Data();
+    } else if ((uint32_t)buffer->int32Data() != mNextExpectedSeqNo) {
+        ALOGV("Not the sequence number I expected");
+
+        return WRONG_SEQUENCE_NUMBER;
+    }
+
+    const uint8_t *data = buffer->data();
+    size_t size = buffer->size();
+
+    if (size < 1 || (data[0] & 0x80)) {
+        // Corrupt.
+
+        ALOGV("Ignoring corrupt buffer.");
+        queue->erase(queue->begin());
+
+        ++mNextExpectedSeqNo;
+        return MALFORMED_PACKET;
+    }
+
+    unsigned nalType = (data[0] >> 1) & H265_NALU_MASK;
+    if (nalType > 0 && nalType < H265_NALU_AP) {
+        addSingleNALUnit(buffer);
+        queue->erase(queue->begin());
+        ++mNextExpectedSeqNo;
+        return OK;
+    } else if (nalType == H265_NALU_FU) {
+        // FU-A
+        return addFragmentedNALUnit(queue);
+    } else if (nalType == H265_NALU_AP) {
+        // STAP-A
+        bool success = addSingleTimeAggregationPacket(buffer);
+        queue->erase(queue->begin());
+        ++mNextExpectedSeqNo;
+
+        return success ? OK : MALFORMED_PACKET;
+    } else if (nalType == 0) {
+        ALOGV("Ignoring undefined nal type.");
+
+        queue->erase(queue->begin());
+        ++mNextExpectedSeqNo;
+
+        return OK;
+    } else {
+        ALOGV("Ignoring unsupported buffer (nalType=%d)", nalType);
+
+        queue->erase(queue->begin());
+        ++mNextExpectedSeqNo;
+
+        return MALFORMED_PACKET;
+    }
+}
+
+void AHEVCAssembler::addSingleNALUnit(const sp<ABuffer> &buffer) {
+    ALOGV("addSingleNALUnit of size %zu", buffer->size());
+#if !LOG_NDEBUG
+    hexdump(buffer->data(), buffer->size());
+#endif
+
+    uint32_t rtpTime;
+    CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+
+    if (!mNALUnits.empty() && rtpTime != mAccessUnitRTPTime) {
+        submitAccessUnit();
+    }
+    mAccessUnitRTPTime = rtpTime;
+
+    mNALUnits.push_back(buffer);
+}
+
+bool AHEVCAssembler::addSingleTimeAggregationPacket(const sp<ABuffer> &buffer) {
+    const uint8_t *data = buffer->data();
+    size_t size = buffer->size();
+
+    if (size < 3) {
+        ALOGV("Discarding too small STAP-A packet.");
+        return false;
+    }
+
+    ++data;
+    --size;
+    while (size >= 2) {
+        size_t nalSize = (data[0] << 8) | data[1];
+
+        if (size < nalSize + 2) {
+            ALOGV("Discarding malformed STAP-A packet.");
+            return false;
+        }
+
+        sp<ABuffer> unit = new ABuffer(nalSize);
+        memcpy(unit->data(), &data[2], nalSize);
+
+        CopyTimes(unit, buffer);
+
+        addSingleNALUnit(unit);
+
+        data += 2 + nalSize;
+        size -= 2 + nalSize;
+    }
+
+    if (size != 0) {
+        ALOGV("Unexpected padding at end of STAP-A packet.");
+    }
+
+    return true;
+}
+
+ARTPAssembler::AssemblyStatus AHEVCAssembler::addFragmentedNALUnit(
+        List<sp<ABuffer> > *queue) {
+    CHECK(!queue->empty());
+
+    sp<ABuffer> buffer = *queue->begin();
+    const uint8_t *data = buffer->data();
+    size_t size = buffer->size();
+
+    CHECK(size > 0);
+    /*   H265 payload header is 16 bit
+        0 1 2 3 4 5 6 7 0 1 2 3 4 5 6 7
+       +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+       |F|     Type  |  Layer ID | TID |
+       +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+     */
+    unsigned indicator = (data[0] >> 1);
+
+    CHECK((indicator & H265_NALU_MASK) == H265_NALU_FU);
+
+    if (size < 3) {
+        ALOGV("Ignoring malformed FU buffer (size = %zu)", size);
+
+        queue->erase(queue->begin());
+        ++mNextExpectedSeqNo;
+        return MALFORMED_PACKET;
+    }
+
+    if (!(data[2] & 0x80)) {
+        // Start bit not set on the first buffer.
+
+        ALOGV("Start bit not set on first buffer");
+
+        queue->erase(queue->begin());
+        ++mNextExpectedSeqNo;
+        return MALFORMED_PACKET;
+    }
+
+    /*  FU INDICATOR HDR
+        0 1 2 3 4 5 6 7
+       +-+-+-+-+-+-+-+-+
+       |S|E|   Type    |
+       +-+-+-+-+-+-+-+-+
+     */
+    uint32_t nalType = data[2] & H265_NALU_MASK;
+    uint32_t tid = data[1] & 0x7;
+    ALOGV("nalType =%u, tid =%u", nalType, tid);
+
+    uint32_t expectedSeqNo = (uint32_t)buffer->int32Data() + 1;
+    size_t totalSize = size - 3;
+    size_t totalCount = 1;
+    bool complete = false;
+
+    if (data[2] & 0x40) {
+        // Huh? End bit also set on the first buffer.
+
+        ALOGV("Grrr. This isn't fragmented at all.");
+
+        complete = true;
+    } else {
+        List<sp<ABuffer> >::iterator it = ++queue->begin();
+        while (it != queue->end()) {
+            ALOGV("sequence length %zu", totalCount);
+
+            const sp<ABuffer> &buffer = *it;
+
+            const uint8_t *data = buffer->data();
+            size_t size = buffer->size();
+
+            if ((uint32_t)buffer->int32Data() != expectedSeqNo) {
+                ALOGV("sequence not complete, expected seqNo %d, got %d",
+                     expectedSeqNo, (uint32_t)buffer->int32Data());
+
+                return WRONG_SEQUENCE_NUMBER;
+            }
+
+            if (size < 3
+                    || ((data[0] >> 1) & H265_NALU_MASK) != indicator
+                    || (data[2] & H265_NALU_MASK) != nalType
+                    || (data[2] & 0x80)) {
+                ALOGV("Ignoring malformed FU buffer.");
+
+                // Delete the whole start of the FU.
+
+                it = queue->begin();
+                for (size_t i = 0; i <= totalCount; ++i) {
+                    it = queue->erase(it);
+                }
+
+                mNextExpectedSeqNo = expectedSeqNo + 1;
+
+                return MALFORMED_PACKET;
+            }
+
+            totalSize += size - 3;
+            ++totalCount;
+
+            expectedSeqNo = expectedSeqNo + 1;
+
+            if (data[2] & 0x40) {
+                // This is the last fragment.
+                complete = true;
+                break;
+            }
+
+            ++it;
+        }
+    }
+
+    if (!complete) {
+        return NOT_ENOUGH_DATA;
+    }
+
+    mNextExpectedSeqNo = expectedSeqNo;
+
+    // We found all the fragments that make up the complete NAL unit.
+
+    // Leave room for the header. So far totalSize did not include the
+    // header byte.
+    totalSize += 2;
+
+    sp<ABuffer> unit = new ABuffer(totalSize);
+    CopyTimes(unit, *queue->begin());
+
+    unit->data()[0] = (nalType << 1);
+    unit->data()[1] = tid;
+
+    size_t offset = 2;
+    List<sp<ABuffer> >::iterator it = queue->begin();
+    for (size_t i = 0; i < totalCount; ++i) {
+        const sp<ABuffer> &buffer = *it;
+
+        ALOGV("piece #%zu/%zu", i + 1, totalCount);
+#if !LOG_NDEBUG
+        hexdump(buffer->data(), buffer->size());
+#endif
+
+        memcpy(unit->data() + offset, buffer->data() + 3, buffer->size() - 3);
+        offset += buffer->size() - 3;
+
+        it = queue->erase(it);
+    }
+
+    unit->setRange(0, totalSize);
+
+    addSingleNALUnit(unit);
+
+    ALOGV("successfully assembled a NAL unit from fragments.");
+
+    return OK;
+}
+
+void AHEVCAssembler::submitAccessUnit() {
+    CHECK(!mNALUnits.empty());
+
+    ALOGV("Access unit complete (%zu nal units)", mNALUnits.size());
+
+    size_t totalSize = 0;
+    for (List<sp<ABuffer> >::iterator it = mNALUnits.begin();
+         it != mNALUnits.end(); ++it) {
+        totalSize += 4 + (*it)->size();
+    }
+
+    sp<ABuffer> accessUnit = new ABuffer(totalSize);
+    size_t offset = 0;
+    for (List<sp<ABuffer> >::iterator it = mNALUnits.begin();
+         it != mNALUnits.end(); ++it) {
+        memcpy(accessUnit->data() + offset, "\x00\x00\x00\x01", 4);
+        offset += 4;
+
+        sp<ABuffer> nal = *it;
+        memcpy(accessUnit->data() + offset, nal->data(), nal->size());
+        offset += nal->size();
+    }
+
+    CopyTimes(accessUnit, *mNALUnits.begin());
+
+#if 0
+    printf(mAccessUnitDamaged ? "X" : ".");
+    fflush(stdout);
+#endif
+
+    if (mAccessUnitDamaged) {
+        accessUnit->meta()->setInt32("damaged", true);
+    }
+
+    mNALUnits.clear();
+    mAccessUnitDamaged = false;
+
+    sp<AMessage> msg = mNotifyMsg->dup();
+    msg->setBuffer("access-unit", accessUnit);
+    msg->post();
+}
+
+ARTPAssembler::AssemblyStatus AHEVCAssembler::assembleMore(
+        const sp<ARTPSource> &source) {
+    AssemblyStatus status = addNALUnit(source);
+    if (status == MALFORMED_PACKET) {
+        mAccessUnitDamaged = true;
+    }
+    return status;
+}
+
+void AHEVCAssembler::packetLost() {
+    CHECK(mNextExpectedSeqNoValid);
+    ALOGV("packetLost (expected %d)", mNextExpectedSeqNo);
+
+    ++mNextExpectedSeqNo;
+
+    mAccessUnitDamaged = true;
+}
+
+void AHEVCAssembler::onByeReceived() {
+    sp<AMessage> msg = mNotifyMsg->dup();
+    msg->setInt32("eos", true);
+    msg->post();
+}
+
+}  // namespace android
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.h b/media/libstagefright/rtsp/AHEVCAssembler.h
new file mode 100644
index 0000000..cc20622
--- /dev/null
+++ b/media/libstagefright/rtsp/AHEVCAssembler.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef A_HEVC_ASSEMBLER_H_
+
+#define A_HEVC_ASSEMBLER_H_
+
+#include "ARTPAssembler.h"
+
+#include <utils/List.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+struct ABuffer;
+struct AMessage;
+
+struct AHEVCAssembler : public ARTPAssembler {
+    AHEVCAssembler(const sp<AMessage> &notify);
+
+protected:
+    virtual ~AHEVCAssembler();
+
+    virtual AssemblyStatus assembleMore(const sp<ARTPSource> &source);
+    virtual void onByeReceived();
+    virtual void packetLost();
+
+private:
+    sp<AMessage> mNotifyMsg;
+
+    uint32_t mAccessUnitRTPTime;
+    bool mNextExpectedSeqNoValid;
+    uint32_t mNextExpectedSeqNo;
+    bool mAccessUnitDamaged;
+    List<sp<ABuffer> > mNALUnits;
+
+    AssemblyStatus addNALUnit(const sp<ARTPSource> &source);
+    void addSingleNALUnit(const sp<ABuffer> &buffer);
+    AssemblyStatus addFragmentedNALUnit(List<sp<ABuffer> > *queue);
+    bool addSingleTimeAggregationPacket(const sp<ABuffer> &buffer);
+
+    void submitAccessUnit();
+
+    DISALLOW_EVIL_CONSTRUCTORS(AHEVCAssembler);
+};
+
+}  // namespace android
+
+#endif  // A_HEVC_ASSEMBLER_H_
diff --git a/media/libstagefright/rtsp/APacketSource.cpp b/media/libstagefright/rtsp/APacketSource.cpp
index 574bd7a..8f4df8e 100644
--- a/media/libstagefright/rtsp/APacketSource.cpp
+++ b/media/libstagefright/rtsp/APacketSource.cpp
@@ -454,6 +454,17 @@
 
         mFormat->setInt32(kKeyWidth, width);
         mFormat->setInt32(kKeyHeight, height);
+    } else if (!strncmp(desc.c_str(), "H265/", 5)) {
+        mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_HEVC);
+
+        int32_t width, height;
+        if (!sessionDesc->getDimensions(index, PT, &width, &height)) {
+            width = -1;
+            height = -1;
+        }
+
+        mFormat->setInt32(kKeyWidth, width);
+        mFormat->setInt32(kKeyHeight, height);
     } else if (!strncmp(desc.c_str(), "H263-2000/", 10)
             || !strncmp(desc.c_str(), "H263-1998/", 10)) {
         mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263);
diff --git a/media/libstagefright/rtsp/ARTPAssembler.cpp b/media/libstagefright/rtsp/ARTPAssembler.cpp
index befc226..52aa3a0 100644
--- a/media/libstagefright/rtsp/ARTPAssembler.cpp
+++ b/media/libstagefright/rtsp/ARTPAssembler.cpp
@@ -14,6 +14,7 @@
  * limitations under the License.
  */
 
+#define LOG_TAG "ARTPAssembler"
 #include "ARTPAssembler.h"
 
 #include <media/stagefright/foundation/ABuffer.h>
@@ -21,12 +22,16 @@
 #include <media/stagefright/foundation/ALooper.h>
 #include <media/stagefright/foundation/AMessage.h>
 
+#include <android-base/properties.h>
+
 #include <stdint.h>
 
 namespace android {
 
 ARTPAssembler::ARTPAssembler()
-    : mFirstFailureTimeUs(-1) {
+    : mShowQueueCnt(0),
+      mFirstFailureTimeUs(-1) {
+    mShowQueue = android::base::GetBoolProperty("debug.stagefright.rtp", false);
 }
 
 void ARTPAssembler::onPacketReceived(const sp<ARTPSource> &source) {
@@ -141,4 +146,15 @@
     return accessUnit;
 }
 
+void ARTPAssembler::showCurrentQueue(List<sp<ABuffer> > *queue) {
+    AString temp("Queue elem size : ");
+    List<sp<ABuffer> >::iterator it = queue->begin();
+    while (it != queue->end()) {
+        temp.append((*it)->size());
+        temp.append("  \t");
+        it++;
+    }
+    ALOGD("%s",temp.c_str());
+};
+
 }  // namespace android
diff --git a/media/libstagefright/rtsp/ARTPAssembler.h b/media/libstagefright/rtsp/ARTPAssembler.h
index 4082d4c..191f08e 100644
--- a/media/libstagefright/rtsp/ARTPAssembler.h
+++ b/media/libstagefright/rtsp/ARTPAssembler.h
@@ -56,6 +56,11 @@
     static sp<ABuffer> MakeCompoundFromPackets(
             const List<sp<ABuffer> > &frames);
 
+    void showCurrentQueue(List<sp<ABuffer> > *queue);
+
+    bool mShowQueue;
+    int32_t mShowQueueCnt;
+
 private:
     int64_t mFirstFailureTimeUs;
 
diff --git a/media/libstagefright/rtsp/ARTPConnection.cpp b/media/libstagefright/rtsp/ARTPConnection.cpp
index 6a4706d..1346c9a 100644
--- a/media/libstagefright/rtsp/ARTPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTPConnection.cpp
@@ -30,6 +30,8 @@
 #include <media/stagefright/foundation/AString.h>
 #include <media/stagefright/foundation/hexdump.h>
 
+#include <android/multinetwork.h>
+
 #include <arpa/inet.h>
 #include <sys/socket.h>
 
@@ -53,6 +55,7 @@
 const int64_t ARTPConnection::kSelectTimeoutUs = 1000LL;
 
 struct ARTPConnection::StreamInfo {
+    bool isIPv6;
     int mRTPSocket;
     int mRTCPSocket;
     sp<ASessionDescription> mSessionDesc;
@@ -63,14 +66,19 @@
     int64_t mNumRTCPPacketsReceived;
     int64_t mNumRTPPacketsReceived;
     struct sockaddr_in mRemoteRTCPAddr;
+    struct sockaddr_in6 mRemoteRTCPAddr6;
 
     bool mIsInjected;
+
+    // RTCP Extension for CVO
+    int mCVOExtMap; // will be set to 0 if cvo is not negotiated in sdp
 };
 
 ARTPConnection::ARTPConnection(uint32_t flags)
     : mFlags(flags),
       mPollEventPending(false),
-      mLastReceiverReportTimeUs(-1) {
+      mLastReceiverReportTimeUs(-1),
+      mLastBitrateReportTimeUs(-1) {
 }
 
 ARTPConnection::~ARTPConnection() {
@@ -145,6 +153,117 @@
     TRESPASS();
 }
 
+// static
+void ARTPConnection::MakeRTPSocketPair(
+        int *rtpSocket, int *rtcpSocket, const char *localIp, const char *remoteIp,
+        unsigned localPort, unsigned remotePort, int64_t socketNetwork) {
+    bool isIPv6 = false;
+    if (strchr(localIp, ':') != NULL)
+        isIPv6 = true;
+
+    *rtpSocket = socket(isIPv6 ? AF_INET6 : AF_INET, SOCK_DGRAM, 0);
+    CHECK_GE(*rtpSocket, 0);
+
+    bumpSocketBufferSize(*rtpSocket);
+
+    *rtcpSocket = socket(isIPv6 ? AF_INET6 : AF_INET, SOCK_DGRAM, 0);
+    CHECK_GE(*rtcpSocket, 0);
+
+    if (socketNetwork != 0) {
+        ALOGD("trying to bind rtp socket(%d) to network(%llu).",
+                *rtpSocket, (unsigned long long)socketNetwork);
+
+        int result = android_setsocknetwork((net_handle_t)socketNetwork, *rtpSocket);
+        if (result != 0) {
+            ALOGW("failed(%d) to bind rtp socket(%d) to network(%llu)",
+                    result, *rtpSocket, (unsigned long long)socketNetwork);
+        }
+        result = android_setsocknetwork((net_handle_t)socketNetwork, *rtcpSocket);
+        if (result != 0) {
+            ALOGW("failed(%d) to bind rtcp socket(%d) to network(%llu)",
+                    result, *rtcpSocket, (unsigned long long)socketNetwork);
+        }
+    }
+
+    bumpSocketBufferSize(*rtcpSocket);
+
+    struct sockaddr *addr;
+    struct sockaddr_in addr4;
+    struct sockaddr_in6 addr6;
+
+    if (isIPv6) {
+        addr = (struct sockaddr *)&addr6;
+        memset(&addr6, 0, sizeof(addr6));
+        addr6.sin6_family = AF_INET6;
+        inet_pton(AF_INET6, localIp, &addr6.sin6_addr);
+        addr6.sin6_port = htons((uint16_t)localPort);
+    } else {
+        addr = (struct sockaddr *)&addr4;
+        memset(&addr4, 0, sizeof(addr4));
+        addr4.sin_family = AF_INET;
+        addr4.sin_addr.s_addr = inet_addr(localIp);
+        addr4.sin_port = htons((uint16_t)localPort);
+    }
+
+    int sockopt = 1;
+    setsockopt(*rtpSocket, SOL_SOCKET, SO_REUSEADDR, (int *)&sockopt, sizeof(sockopt));
+    setsockopt(*rtcpSocket, SOL_SOCKET, SO_REUSEADDR, (int *)&sockopt, sizeof(sockopt));
+
+    int sizeSockSt = isIPv6 ? sizeof(addr6) : sizeof(addr4);
+
+    if (bind(*rtpSocket, addr, sizeSockSt) == 0) {
+        ALOGI("rtp socket successfully binded. addr=%s:%d", localIp, localPort);
+    } else {
+        ALOGE("failed to bind rtp socket addr=%s:%d err=%s", localIp, localPort, strerror(errno));
+        return;
+    }
+
+    if (isIPv6)
+        addr6.sin6_port = htons(localPort + 1);
+    else
+        addr4.sin_port = htons(localPort + 1);
+
+    if (bind(*rtcpSocket, addr, sizeSockSt) == 0) {
+        ALOGI("rtcp socket successfully binded. addr=%s:%d", localIp, localPort + 1);
+    } else {
+        ALOGE("failed to bind rtcp socket addr=%s:%d err=%s", localIp,
+                localPort + 1, strerror(errno));
+    }
+
+    // Re uses addr variable as remote addr.
+    if (isIPv6) {
+        memset(&addr6, 0, sizeof(addr6));
+        addr6.sin6_family = AF_INET6;
+        inet_pton(AF_INET6, remoteIp, &addr6.sin6_addr);
+        addr6.sin6_port = htons((uint16_t)remotePort);
+    } else {
+        memset(&addr4, 0, sizeof(addr4));
+        addr4.sin_family = AF_INET;
+        addr4.sin_addr.s_addr = inet_addr(remoteIp);
+        addr4.sin_port = htons((uint16_t)remotePort);
+    }
+    if (connect(*rtpSocket, addr, sizeSockSt) == 0) {
+        ALOGI("rtp socket successfully connected to remote=%s:%d", remoteIp, remotePort);
+    } else {
+        ALOGE("failed to connect rtp socket to remote addr=%s:%d err=%s", remoteIp,
+                remotePort, strerror(errno));
+        return;
+    }
+
+    if (isIPv6)
+        addr6.sin6_port = htons(remotePort + 1);
+    else
+        addr4.sin_port = htons(remotePort + 1);
+
+    if (connect(*rtcpSocket, addr, sizeSockSt) == 0) {
+        ALOGI("rtcp socket successfully connected to remote=%s:%d", remoteIp, remotePort + 1);
+    } else {
+        ALOGE("failed to connect rtcp socket addr=%s:%d err=%s", remoteIp,
+                remotePort + 1, strerror(errno));
+        return;
+    }
+}
+
 void ARTPConnection::onMessageReceived(const sp<AMessage> &msg) {
     switch (msg->what()) {
         case kWhatAddStream:
@@ -204,6 +323,19 @@
     info->mNumRTCPPacketsReceived = 0;
     info->mNumRTPPacketsReceived = 0;
     memset(&info->mRemoteRTCPAddr, 0, sizeof(info->mRemoteRTCPAddr));
+    memset(&info->mRemoteRTCPAddr6, 0, sizeof(info->mRemoteRTCPAddr6));
+
+    sp<ASessionDescription> sessionDesc = info->mSessionDesc;
+    info->mCVOExtMap = 0;
+    for (size_t i = 1; i < sessionDesc->countTracks(); ++i) {
+        int32_t cvoExtMap;
+        if (sessionDesc->getCvoExtMap(i, &cvoExtMap)) {
+            info->mCVOExtMap = cvoExtMap;
+            ALOGI("urn:3gpp:video-orientation(cvo) found as extmap:%d", info->mCVOExtMap);
+        } else {
+            ALOGI("urn:3gpp:video-orientation(cvo) not found :%d", info->mCVOExtMap);
+        }
+    }
 
     if (!injected) {
         postPollEvent();
@@ -295,6 +427,12 @@
 
             if (err == -ECONNRESET) {
                 // socket failure, this stream is dead, Jim.
+                sp<AMessage> notify = it->mNotifyMsg->dup();
+                notify->setInt32("rtcp-event", 1);
+                notify->setInt32("payload-type", 400);
+                notify->setInt32("feedback-type", 1);
+                notify->setInt32("sender", it->mSources.valueAt(0)->getSelfID());
+                notify->post();
 
                 ALOGW("failed to receive RTP/RTCP datagram.");
                 it = mStreams.erase(it);
@@ -306,6 +444,8 @@
     }
 
     int64_t nowUs = ALooper::GetNowUs();
+    checkRxBitrate(nowUs);
+
     if (mLastReceiverReportTimeUs <= 0
             || mLastReceiverReportTimeUs + 5000000LL <= nowUs) {
         sp<ABuffer> buffer = new ABuffer(kMaxUDPSize);
@@ -340,13 +480,7 @@
             if (buffer->size() > 0) {
                 ALOGV("Sending RR...");
 
-                ssize_t n;
-                do {
-                    n = sendto(
-                        s->mRTCPSocket, buffer->data(), buffer->size(), 0,
-                        (const struct sockaddr *)&s->mRemoteRTCPAddr,
-                        sizeof(s->mRemoteRTCPAddr));
-                } while (n < 0 && errno == EINTR);
+                ssize_t n = send(s, buffer);
 
                 if (n <= 0) {
                     ALOGW("failed to send RTCP receiver report (%s).",
@@ -377,9 +511,21 @@
 
     sp<ABuffer> buffer = new ABuffer(65536);
 
+    struct sockaddr *pRemoteRTCPAddr;
+    int sizeSockSt;
+    if (s->isIPv6) {
+        pRemoteRTCPAddr = (struct sockaddr *)&s->mRemoteRTCPAddr6;
+        sizeSockSt = sizeof(struct sockaddr_in6);
+    } else {
+        pRemoteRTCPAddr = (struct sockaddr *)&s->mRemoteRTCPAddr;
+        sizeSockSt = sizeof(struct sockaddr_in);
+    }
     socklen_t remoteAddrLen =
         (!receiveRTP && s->mNumRTCPPacketsReceived == 0)
-            ? sizeof(s->mRemoteRTCPAddr) : 0;
+            ? sizeSockSt : 0;
+
+    if (mFlags & kViLTEConnection)
+        remoteAddrLen = 0;
 
     ssize_t nbytes;
     do {
@@ -388,8 +534,9 @@
             buffer->data(),
             buffer->capacity(),
             0,
-            remoteAddrLen > 0 ? (struct sockaddr *)&s->mRemoteRTCPAddr : NULL,
+            remoteAddrLen > 0 ? pRemoteRTCPAddr : NULL,
             remoteAddrLen > 0 ? &remoteAddrLen : NULL);
+        mCumulativeBytes += nbytes;
     } while (nbytes < 0 && errno == EINTR);
 
     if (nbytes <= 0) {
@@ -410,6 +557,36 @@
     return err;
 }
 
+ssize_t ARTPConnection::send(const StreamInfo *info, const sp<ABuffer> buffer) {
+        struct sockaddr* pRemoteRTCPAddr;
+        int sizeSockSt;
+
+        /* It seems this isIPv6 variable is useless.
+         * We should remove it to prevent confusion */
+        if (info->isIPv6) {
+            pRemoteRTCPAddr = (struct sockaddr *)&info->mRemoteRTCPAddr6;
+            sizeSockSt = sizeof(struct sockaddr_in6);
+        } else {
+            pRemoteRTCPAddr = (struct sockaddr *)&info->mRemoteRTCPAddr;
+            sizeSockSt = sizeof(struct sockaddr_in);
+        }
+
+        if (mFlags & kViLTEConnection) {
+            ALOGV("ViLTE RTCP");
+            pRemoteRTCPAddr = NULL;
+            sizeSockSt = 0;
+        }
+
+        ssize_t n;
+        do {
+            n = sendto(
+                    info->mRTCPSocket, buffer->data(), buffer->size(), 0,
+                    pRemoteRTCPAddr, sizeSockSt);
+        } while (n < 0 && errno == EINTR);
+
+        return n;
+}
+
 status_t ARTPConnection::parseRTP(StreamInfo *s, const sp<ABuffer> &buffer) {
     if (s->mNumRTPPacketsReceived++ == 0) {
         sp<AMessage> notify = s->mNotifyMsg->dup();
@@ -431,6 +608,11 @@
         return -1;
     }
 
+    if ((data[1] & 0x7f) == 20 /* decimal */) {
+        // Unassigned payload type
+        return -1;
+    }
+
     if (data[0] & 0x20) {
         // Padding present.
 
@@ -454,6 +636,7 @@
         return -1;
     }
 
+    int32_t cvoDegrees = -1;
     if (data[0] & 0x10) {
         // Header eXtension present.
 
@@ -473,6 +656,7 @@
             return -1;
         }
 
+        parseRTPExt(s, (const uint8_t *)extensionData, extensionLength, &cvoDegrees);
         payloadOffset += 4 + extensionLength;
     }
 
@@ -487,6 +671,8 @@
     meta->setInt32("rtp-time", rtpTime);
     meta->setInt32("PT", data[1] & 0x7f);
     meta->setInt32("M", data[1] >> 7);
+    if (cvoDegrees >= 0)
+        meta->setInt32("cvo", cvoDegrees);
 
     buffer->setInt32Data(u16at(&data[2]));
     buffer->setRange(payloadOffset, size - payloadOffset);
@@ -496,11 +682,65 @@
     return OK;
 }
 
+status_t ARTPConnection::parseRTPExt(StreamInfo *s,
+        const uint8_t *extHeader, size_t extLen, int32_t *cvoDegrees) {
+    if (extLen < 4)
+        return -1;
+
+    uint16_t header = (extHeader[0] << 8) | (extHeader[1]);
+    bool isOnebyteHeader = false;
+
+    if (header == 0xBEDE) {
+        isOnebyteHeader = true;
+    } else if (header == 0x1000) {
+        ALOGW("parseRTPExt: two-byte header is not implemented yet");
+        return -1;
+    } else {
+        ALOGW("parseRTPExt: can not recognize header");
+        return -1;
+    }
+
+    const uint8_t *extPayload = extHeader + 4;
+    extLen -= 4;
+    size_t offset = 0; //start from first payload of rtp extension.
+    // one-byte header parser
+    while (isOnebyteHeader && offset < extLen) {
+        uint8_t extmapId = extPayload[offset] >> 4;
+        uint8_t length = (extPayload[offset] & 0xF) + 1;
+        offset++;
+
+        // padding case
+        if (extmapId == 0)
+            continue;
+
+        uint8_t data[16]; // maximum length value
+        for (uint8_t j = 0; offset + j <= extLen && j < length; j++) {
+            data[j] = extPayload[offset + j];
+        }
+
+        offset += length;
+
+        if (extmapId == s->mCVOExtMap) {
+            *cvoDegrees = (int32_t)data[0];
+            return OK;
+        }
+    }
+
+    return BAD_VALUE;
+}
+
 status_t ARTPConnection::parseRTCP(StreamInfo *s, const sp<ABuffer> &buffer) {
     if (s->mNumRTCPPacketsReceived++ == 0) {
         sp<AMessage> notify = s->mNotifyMsg->dup();
         notify->setInt32("first-rtcp", true);
         notify->post();
+
+        ALOGI("send first-rtcp event to upper layer as ImsRxNotice");
+        sp<AMessage> imsNotify = s->mNotifyMsg->dup();
+        imsNotify->setInt32("rtcp-event", 1);
+        imsNotify->setInt32("payload-type", 101);
+        imsNotify->setInt32("feedback-type", 0);
+        imsNotify->post();
     }
 
     const uint8_t *data = buffer->data();
@@ -551,8 +791,12 @@
                 break;
 
             case 205:  // TSFB (transport layer specific feedback)
+                parseTSFB(s, data, headerLength);
+                break;
             case 206:  // PSFB (payload specific feedback)
                 // hexdump(data, headerLength);
+                parsePSFB(s, data, headerLength);
+                ALOGI("RTCP packet type %u of size %zu", (unsigned)data[1], headerLength);
                 break;
 
             case 203:
@@ -621,6 +865,144 @@
     return 0;
 }
 
+status_t ARTPConnection::parseTSFB(
+        StreamInfo *s, const uint8_t *data, size_t size) {
+    if (size < 12) {
+        // broken packet
+        return -1;
+    }
+
+    uint8_t msgType = data[0] & 0x1f;
+    uint32_t id = u32at(&data[4]);
+
+    const uint8_t *ptr = &data[12];
+    size -= 12;
+
+    using namespace std;
+    size_t FCISize;
+    switch(msgType) {
+        case 1:     // Generic NACK
+        {
+            FCISize = 4;
+            while (size >= FCISize) {
+                uint16_t PID = u16at(&ptr[0]);  // lost packet RTP number
+                uint16_t BLP = u16at(&ptr[2]);  // Bitmask of following Lost Packets
+
+                size -= FCISize;
+                ptr += FCISize;
+
+                AString list_of_losts;
+                list_of_losts.append(PID);
+                for (int i=0 ; i<16 ; i++) {
+                    bool is_lost = BLP & (0x1 << i);
+                    if (is_lost) {
+                        list_of_losts.append(", ");
+                        list_of_losts.append(PID + i);
+                    }
+                }
+                ALOGI("Opponent losts packet of RTP %s", list_of_losts.c_str());
+            }
+            break;
+        }
+        case 3:     // TMMBR
+        case 4:     // TMMBN
+        {
+            FCISize = 8;
+            while (size >= FCISize) {
+                uint32_t MxTBR = u32at(&ptr[4]);
+                uint32_t MxTBRExp = MxTBR >> 26;
+                uint32_t MxTBRMantissa = (MxTBR >> 9) & 0x01FFFF;
+                uint32_t overhead = MxTBR & 0x01FF;
+
+                size -= FCISize;
+                ptr += FCISize;
+
+                uint32_t bitRate = (1 << MxTBRExp) * MxTBRMantissa;
+
+                if (msgType == 3)
+                    ALOGI("Op -> UE Req Tx bitrate : %d X 2^%d = %d",
+                        MxTBRMantissa, MxTBRExp, bitRate);
+                else if (msgType == 4)
+                    ALOGI("OP -> UE Noti Rx bitrate : %d X 2^%d = %d",
+                        MxTBRMantissa, MxTBRExp, bitRate);
+
+                sp<AMessage> notify = s->mNotifyMsg->dup();
+                notify->setInt32("rtcp-event", 1);
+                notify->setInt32("payload-type", 205);
+                notify->setInt32("feedback-type", msgType);
+                notify->setInt32("sender", id);
+                notify->setInt32("bit-rate", bitRate);
+                notify->post();
+                ALOGI("overhead : %d", overhead);
+            }
+            break;
+        }
+        default:
+        {
+            ALOGI("Not supported TSFB type %d", msgType);
+            break;
+        }
+    }
+
+    return 0;
+}
+
+status_t ARTPConnection::parsePSFB(
+        StreamInfo *s, const uint8_t *data, size_t size) {
+    if (size < 12) {
+        // broken packet
+        return -1;
+    }
+
+    uint8_t msgType = data[0] & 0x1f;
+    uint32_t id = u32at(&data[4]);
+
+    const uint8_t *ptr = &data[12];
+    size -= 12;
+
+    using namespace std;
+    switch(msgType) {
+        case 1:     // Picture Loss Indication (PLI)
+        {
+            if (size > 0) {
+                // PLI does not need parameters
+                break;
+            };
+            sp<AMessage> notify = s->mNotifyMsg->dup();
+            notify->setInt32("rtcp-event", 1);
+            notify->setInt32("payload-type", 206);
+            notify->setInt32("feedback-type", msgType);
+            notify->setInt32("sender", id);
+            notify->post();
+            ALOGI("PLI detected.");
+            break;
+        }
+        case 4:     // Full Intra Request (FIR)
+        {
+            if (size < 4) {
+                break;
+            }
+            uint32_t requestedId = u32at(&ptr[0]);
+            if (requestedId == (uint32_t)mSelfID) {
+                sp<AMessage> notify = s->mNotifyMsg->dup();
+                notify->setInt32("rtcp-event", 1);
+                notify->setInt32("payload-type", 206);
+                notify->setInt32("feedback-type", msgType);
+                notify->setInt32("sender", id);
+                notify->post();
+                ALOGI("FIR detected.");
+            }
+            break;
+        }
+        default:
+        {
+            ALOGI("Not supported PSFB type %d", msgType);
+            break;
+        }
+    }
+
+    return 0;
+}
 sp<ARTPSource> ARTPConnection::findSource(StreamInfo *info, uint32_t srcId) {
     sp<ARTPSource> source;
     ssize_t index = info->mSources.indexOfKey(srcId);
@@ -630,6 +1012,8 @@
         source = new ARTPSource(
                 srcId, info->mSessionDesc, info->mIndex, info->mNotifyMsg);
 
+        source->setSelfID(mSelfID);
+        source->setMinMaxBitrate(mMinBitrate, mMaxBitrate);
         info->mSources.add(srcId, source);
     } else {
         source = info->mSources.valueAt(index);
@@ -645,6 +1029,78 @@
     msg->post();
 }
 
+void ARTPConnection::setSelfID(const uint32_t selfID) {
+    mSelfID = selfID;
+}
+
+void ARTPConnection::setMinMaxBitrate(int32_t min, int32_t max) {
+    mMinBitrate = min;
+    mMaxBitrate = max;
+}
+
+void ARTPConnection::checkRxBitrate(int64_t nowUs) {
+    if (mLastBitrateReportTimeUs <= 0) {
+        mCumulativeBytes = 0;
+        mLastBitrateReportTimeUs = nowUs;
+    }
+    else if (mLastBitrateReportTimeUs + 1000000ll <= nowUs) {
+        int32_t timeDiff = (nowUs - mLastBitrateReportTimeUs) / 1000000ll;
+        int32_t bitrate = mCumulativeBytes * 8 / timeDiff;
+        ALOGI("Actual Rx bitrate : %d bits/sec", bitrate);
+
+        sp<ABuffer> buffer = new ABuffer(kMaxUDPSize);
+        List<StreamInfo>::iterator it = mStreams.begin();
+        while (it != mStreams.end()) {
+            StreamInfo *s = &*it;
+            if (s->mIsInjected) {
+                ++it;
+                continue;
+            }
+
+            if (s->mNumRTCPPacketsReceived == 0) {
+                // We have never received any RTCP packets on this stream,
+                // we don't even know where to send a report.
+                ++it;
+                continue;
+            }
+
+            buffer->setRange(0, 0);
+
+            for (size_t i = 0; i < s->mSources.size(); ++i) {
+                sp<ARTPSource> source = s->mSources.valueAt(i);
+                source->setBitrateData(bitrate, nowUs);
+                source->setTargetBitrate();
+                source->addTMMBR(buffer);
+                if (source->isNeedToDowngrade()) {
+                    sp<AMessage> notify = s->mNotifyMsg->dup();
+                    notify->setInt32("rtcp-event", 1);
+                    notify->setInt32("payload-type", 400);
+                    notify->setInt32("feedback-type", 1);
+                    notify->setInt32("sender", source->getSelfID());
+                    notify->post();
+                }
+            }
+            if (buffer->size() > 0) {
+                ALOGV("Sending TMMBR...");
+
+                ssize_t n = send(s, buffer);
+
+                if (n <= 0) {
+                    ALOGW("failed to send RTCP TMMBR (%s).",
+                         n == 0 ? "connection gone" : strerror(errno));
+
+                    it = mStreams.erase(it);
+                    continue;
+                }
+
+                CHECK_EQ(n, (ssize_t)buffer->size());
+            }
+            ++it;
+        }
+        mCumulativeBytes = 0;
+        mLastBitrateReportTimeUs = nowUs;
+    }
+}
 void ARTPConnection::onInjectPacket(const sp<AMessage> &msg) {
     int32_t index;
     CHECK(msg->findInt32("index", &index));
@@ -672,4 +1128,3 @@
 }
 
 }  // namespace android
-
diff --git a/media/libstagefright/rtsp/ARTPConnection.h b/media/libstagefright/rtsp/ARTPConnection.h
index d5f7c2e..712eec5 100644
--- a/media/libstagefright/rtsp/ARTPConnection.h
+++ b/media/libstagefright/rtsp/ARTPConnection.h
@@ -30,6 +30,7 @@
 struct ARTPConnection : public AHandler {
     enum Flags {
         kRegularlyRequestFIR = 2,
+        kViLTEConnection = 4,
     };
 
     explicit ARTPConnection(uint32_t flags = 0);
@@ -44,11 +45,21 @@
 
     void injectPacket(int index, const sp<ABuffer> &buffer);
 
+    void setSelfID(const uint32_t selfID);
+    void setMinMaxBitrate(int32_t min, int32_t max);
+
     // Creates a pair of UDP datagram sockets bound to adjacent ports
     // (the rtpSocket is bound to an even port, the rtcpSocket to the
     // next higher port).
     static void MakePortPair(
             int *rtpSocket, int *rtcpSocket, unsigned *rtpPort);
+    // Creates a pair of UDP datagram sockets bound to assigned ip and
+    // ports (the rtpSocket is bound to an even port, the rtcpSocket
+    // to the next higher port).
+    static void MakeRTPSocketPair(
+            int *rtpSocket, int *rtcpSocket,
+            const char *localIp, const char *remoteIp,
+            unsigned localPort, unsigned remotePort, int64_t socketNetwork = 0);
 
 protected:
     virtual ~ARTPConnection();
@@ -71,18 +82,30 @@
 
     bool mPollEventPending;
     int64_t mLastReceiverReportTimeUs;
+    int64_t mLastBitrateReportTimeUs;
+
+    int32_t mSelfID;
+
+    int32_t mMinBitrate;
+    int32_t mMaxBitrate;
+    int32_t mCumulativeBytes;
 
     void onAddStream(const sp<AMessage> &msg);
     void onRemoveStream(const sp<AMessage> &msg);
     void onPollStreams();
     void onInjectPacket(const sp<AMessage> &msg);
     void onSendReceiverReports();
+    void checkRxBitrate(int64_t nowUs);
 
     status_t receive(StreamInfo *info, bool receiveRTP);
+    ssize_t send(const StreamInfo *info, const sp<ABuffer> buffer);
 
     status_t parseRTP(StreamInfo *info, const sp<ABuffer> &buffer);
+    status_t parseRTPExt(StreamInfo *s, const uint8_t *extData, size_t extLen, int32_t *cvoDegrees);
     status_t parseRTCP(StreamInfo *info, const sp<ABuffer> &buffer);
     status_t parseSR(StreamInfo *info, const uint8_t *data, size_t size);
+    status_t parseTSFB(StreamInfo *info, const uint8_t *data, size_t size);
+    status_t parsePSFB(StreamInfo *info, const uint8_t *data, size_t size);
     status_t parseBYE(StreamInfo *info, const uint8_t *data, size_t size);
 
     sp<ARTPSource> findSource(StreamInfo *info, uint32_t id);
diff --git a/media/libstagefright/rtsp/ARTPSource.cpp b/media/libstagefright/rtsp/ARTPSource.cpp
index f5f8128..bbe9d94 100644
--- a/media/libstagefright/rtsp/ARTPSource.cpp
+++ b/media/libstagefright/rtsp/ARTPSource.cpp
@@ -22,6 +22,7 @@
 
 #include "AAMRAssembler.h"
 #include "AAVCAssembler.h"
+#include "AHEVCAssembler.h"
 #include "AH263Assembler.h"
 #include "AMPEG2TSAssembler.h"
 #include "AMPEG4AudioAssembler.h"
@@ -35,13 +36,17 @@
 
 namespace android {
 
-static const uint32_t kSourceID = 0xdeadbeef;
+static uint32_t kSourceID = 0xdeadbeef;
 
 ARTPSource::ARTPSource(
         uint32_t id,
         const sp<ASessionDescription> &sessionDesc, size_t index,
         const sp<AMessage> &notify)
-    : mID(id),
+    : mFirstSeqNumber(0),
+      mFirstRtpTime(0),
+      mFirstSysTime(0),
+      mClockRate(0),
+      mID(id),
       mHighestSeqNumber(0),
       mPrevExpected(0),
       mBaseSeqNumber(0),
@@ -61,6 +66,9 @@
     if (!strncmp(desc.c_str(), "H264/", 5)) {
         mAssembler = new AAVCAssembler(notify);
         mIssueFIRRequests = true;
+    } else if (!strncmp(desc.c_str(), "H265/", 5)) {
+        mAssembler = new AHEVCAssembler(notify);
+        mIssueFIRRequests = true;
     } else if (!strncmp(desc.c_str(), "MP4A-LATM/", 10)) {
         mAssembler = new AMPEG4AudioAssembler(notify, params);
     } else if (!strncmp(desc.c_str(), "H263-1998/", 10)
@@ -112,9 +120,16 @@
 bool ARTPSource::queuePacket(const sp<ABuffer> &buffer) {
     uint32_t seqNum = (uint32_t)buffer->int32Data();
 
-    if (mNumBuffersReceived++ == 0) {
+    if (mNumBuffersReceived++ == 0 && mFirstSysTime == 0) {
+        int32_t firstRtpTime;
+        CHECK(buffer->meta()->findInt32("rtp-time", &firstRtpTime));
+        mFirstSysTime = ALooper::GetNowUs();
         mHighestSeqNumber = seqNum;
         mBaseSeqNumber = seqNum;
+        mFirstRtpTime = firstRtpTime;
+        ALOGV("first-rtp arrived: first-rtp-time=%d, sys-time=%lld, seq-num=%u",
+                mFirstRtpTime, (long long)mFirstSysTime, mHighestSeqNumber);
+        mClockRate = 90000;
         mQueue.push_back(buffer);
         return true;
     }
@@ -306,6 +321,97 @@
     buffer->setRange(buffer->offset(), buffer->size() + 32);
 }
 
+void ARTPSource::addTMMBR(const sp<ABuffer> &buffer) {
+    if (buffer->size() + 20 > buffer->capacity()) {
+        ALOGW("RTCP buffer too small to accommodate RR.");
+        return;
+    }
+
+    int32_t targetBitrate = mQualManager.getTargetBitrate();
+    if (targetBitrate <= 0)
+        return;
+
+    uint8_t *data = buffer->data() + buffer->size();
+
+    data[0] = 0x80 | 3; // TMMBR
+    data[1] = 205;      // TSFB
+    data[2] = 0;
+    data[3] = 4;        // total (4+1) * sizeof(int32_t) = 20 bytes
+    data[4] = kSourceID >> 24;
+    data[5] = (kSourceID >> 16) & 0xff;
+    data[6] = (kSourceID >> 8) & 0xff;
+    data[7] = kSourceID & 0xff;
+
+    *(int32_t*)(&data[8]) = 0;  // 4 bytes blank
+
+    data[12] = mID >> 24;
+    data[13] = (mID >> 16) & 0xff;
+    data[14] = (mID >> 8) & 0xff;
+    data[15] = mID & 0xff;
+
+    int32_t exp, mantissa;
+
+    // Round off to the nearest 2^4th
+    ALOGI("UE -> Op Req Rx bitrate : %d ", targetBitrate & 0xfffffff0);
+    for (exp=4 ; exp < 32 ; exp++)
+        if (((targetBitrate >> exp) & 0x01) != 0)
+            break;
+    mantissa = targetBitrate >> exp;
+
+    data[16] = ((exp << 2) & 0xfc) | ((mantissa & 0x18000) >> 15);
+    data[17] =                        (mantissa & 0x07f80) >> 7;
+    data[18] =                        (mantissa & 0x0007f) << 1;
+    data[19] = 40;              // 40 bytes overhead;
+
+    buffer->setRange(buffer->offset(), buffer->size() + 20);
+}
+
+uint32_t ARTPSource::getSelfID() {
+    return kSourceID;
+}
+void ARTPSource::setSelfID(const uint32_t selfID) {
+    kSourceID = selfID;
+}
+
+void ARTPSource::setMinMaxBitrate(int32_t min, int32_t max) {
+    mQualManager.setMinMaxBitrate(min, max);
+}
+
+void ARTPSource::setBitrateData(int32_t bitrate, int64_t time) {
+    mQualManager.setBitrateData(bitrate, time);
+}
+
+void ARTPSource::setTargetBitrate() {
+    uint8_t fraction = 0;
+
+    // According to appendix A.3 in RFC 3550
+    uint32_t expected = mHighestSeqNumber - mBaseSeqNumber + 1;
+    int64_t intervalExpected = expected - mPrevExpected;
+    int64_t intervalReceived = mNumBuffersReceived - mPrevNumBuffersReceived;
+    int64_t intervalPacketLost = intervalExpected - intervalReceived;
+
+    ALOGI("UID %p expectedPkts %lld lostPkts %lld", this, (long long)intervalExpected, (long long)intervalPacketLost);
+
+    if (intervalPacketLost < 0 || intervalExpected == 0)
+        fraction = 0;
+    else if (intervalExpected <= intervalPacketLost)
+        fraction = 255;
+    else
+        fraction = (intervalPacketLost << 8) / intervalExpected;
+
+    mQualManager.setTargetBitrate(fraction, ALooper::GetNowUs(), intervalExpected < 5);
+}
+
+bool ARTPSource::isNeedToReport() {
+    int64_t intervalReceived = mNumBuffersReceived - mPrevNumBuffersReceived;
+    return (intervalReceived > 0) ? true : false;
+}
+
+bool ARTPSource::isNeedToDowngrade() {
+    return mQualManager.isNeedToDowngrade();
+}
+
+void ARTPSource::noticeAbandonBuffer(int cnt) {
+    mNumBuffersReceived -= cnt;
+}
 }  // namespace android
-
-
diff --git a/media/libstagefright/rtsp/ARTPSource.h b/media/libstagefright/rtsp/ARTPSource.h
index f44e83f..652e753 100644
--- a/media/libstagefright/rtsp/ARTPSource.h
+++ b/media/libstagefright/rtsp/ARTPSource.h
@@ -23,6 +23,7 @@
 #include <media/stagefright/foundation/ABase.h>
 #include <utils/List.h>
 #include <utils/RefBase.h>
+#include <QualManager.h>
 
 namespace android {
 
@@ -45,8 +46,25 @@
 
     void addReceiverReport(const sp<ABuffer> &buffer);
     void addFIR(const sp<ABuffer> &buffer);
+    void addTMMBR(const sp<ABuffer> &buffer);
+    uint32_t getSelfID();
+    void setSelfID(const uint32_t selfID);
+    void setMinMaxBitrate(int32_t min, int32_t max);
+    void setBitrateData(int32_t bitrate, int64_t time);
+    void setTargetBitrate();
+
+    bool isNeedToReport();
+    bool isNeedToDowngrade();
+
+    void noticeAbandonBuffer(int cnt=1);
+
+    int32_t mFirstSeqNumber;
+    int32_t mFirstRtpTime;
+    int64_t mFirstSysTime;
+    int32_t mClockRate;
 
 private:
+
     uint32_t mID;
     uint32_t mHighestSeqNumber;
     uint32_t mPrevExpected;
@@ -66,6 +84,8 @@
 
     sp<AMessage> mNotify;
 
+    QualManager mQualManager;
+
     bool queuePacket(const sp<ABuffer> &buffer);
 
     DISALLOW_EVIL_CONSTRUCTORS(ARTPSource);
diff --git a/media/libstagefright/rtsp/ARTPWriter.cpp b/media/libstagefright/rtsp/ARTPWriter.cpp
index 58d6086..70d34de 100644
--- a/media/libstagefright/rtsp/ARTPWriter.cpp
+++ b/media/libstagefright/rtsp/ARTPWriter.cpp
@@ -35,10 +35,29 @@
 #define PT      97
 #define PT_STR  "97"
 
+#define H264_NALU_MASK 0x1F
+#define H264_NALU_SPS 0x7
+#define H264_NALU_PPS 0x8
+#define H264_NALU_IFRAME 0x5
+#define H264_NALU_PFRAME 0x1
+
+#define H265_NALU_MASK 0x3F
+#define H265_NALU_VPS 0x40
+#define H265_NALU_SPS 0x42
+#define H265_NALU_PPS 0x44
+
+#define UDP_HEADER_SIZE 8
+#define RTP_HEADER_SIZE 12
+#define RTP_HEADER_EXT_SIZE 1
+#define RTP_FU_HEADER_SIZE 2
+#define RTP_PAYLOAD_ROOM_SIZE 140
+
+
 namespace android {
 
 // static const size_t kMaxPacketSize = 65507;  // maximum payload in UDP over IP
 static const size_t kMaxPacketSize = 1500;
+static char kCNAME[255] = "someone@somewhere";
 
 static int UniformRand(int limit) {
     return ((double)rand() * limit) / RAND_MAX;
@@ -50,13 +69,16 @@
       mLooper(new ALooper),
       mReflector(new AHandlerReflector<ARTPWriter>(this)) {
     CHECK_GE(fd, 0);
+    mIsIPv6 = false;
 
     mLooper->setName("rtp writer");
     mLooper->registerHandler(mReflector);
     mLooper->start();
 
-    mSocket = socket(AF_INET, SOCK_DGRAM, 0);
-    CHECK_GE(mSocket, 0);
+    mRTPSocket = socket(AF_INET, SOCK_DGRAM, 0);
+    CHECK_GE(mRTPSocket, 0);
+    mRTCPSocket = socket(AF_INET, SOCK_DGRAM, 0);
+    CHECK_GE(mRTCPSocket, 0);
 
     memset(mRTPAddr.sin_zero, 0, sizeof(mRTPAddr.sin_zero));
     mRTPAddr.sin_family = AF_INET;
@@ -72,6 +94,42 @@
 
     mRTCPAddr = mRTPAddr;
     mRTCPAddr.sin_port = htons(ntohs(mRTPAddr.sin_port) | 1);
+    mSPSBuf = NULL;
+    mPPSBuf = NULL;
+
+#if LOG_TO_FILES
+    mRTPFd = open(
+            "/data/misc/rtpout.bin",
+            O_WRONLY | O_CREAT | O_TRUNC,
+            0644);
+    CHECK_GE(mRTPFd, 0);
+
+    mRTCPFd = open(
+            "/data/misc/rtcpout.bin",
+            O_WRONLY | O_CREAT | O_TRUNC,
+            0644);
+    CHECK_GE(mRTCPFd, 0);
+#endif
+}
+
+ARTPWriter::ARTPWriter(int fd, String8& localIp, int localPort, String8& remoteIp,
+    int remotePort, uint32_t seqNo)
+    : mFlags(0),
+      mFd(dup(fd)),
+      mLooper(new ALooper),
+      mReflector(new AHandlerReflector<ARTPWriter>(this)) {
+    CHECK_GE(fd, 0);
+    mIsIPv6 = false;
+
+    mLooper->setName("rtp writer");
+    mLooper->registerHandler(mReflector);
+    mLooper->start();
+
+    makeSocketPairAndBind(localIp, localPort, remoteIp , remotePort);
+    mSPSBuf = NULL;
+    mPPSBuf = NULL;
+
+    mSeqNo = seqNo;
 
 #if LOG_TO_FILES
     mRTPFd = open(
@@ -89,6 +147,16 @@
 }
 
 ARTPWriter::~ARTPWriter() {
+    if (mSPSBuf != NULL) {
+        mSPSBuf->release();
+        mSPSBuf = NULL;
+    }
+
+    if (mPPSBuf != NULL) {
+        mPPSBuf->release();
+        mPPSBuf = NULL;
+    }
+
 #if LOG_TO_FILES
     close(mRTCPFd);
     mRTCPFd = -1;
@@ -97,8 +165,11 @@
     mRTPFd = -1;
 #endif
 
-    close(mSocket);
-    mSocket = -1;
+    close(mRTPSocket);
+    mRTPSocket = -1;
+
+    close(mRTCPSocket);
+    mRTCPSocket = -1;
 
     close(mFd);
     mFd = -1;
@@ -114,28 +185,61 @@
     return (mFlags & kFlagEOS) != 0;
 }
 
-status_t ARTPWriter::start(MetaData * /* params */) {
+status_t ARTPWriter::start(MetaData * params) {
     Mutex::Autolock autoLock(mLock);
     if (mFlags & kFlagStarted) {
         return INVALID_OPERATION;
     }
 
     mFlags &= ~kFlagEOS;
-    mSourceID = rand();
-    mSeqNo = UniformRand(65536);
-    mRTPTimeBase = rand();
+    if (mSourceID == 0)
+        mSourceID = rand();
+    if (mSeqNo == 0)
+        mSeqNo = UniformRand(65536);
+    mRTPTimeBase = 0;
     mNumRTPSent = 0;
     mNumRTPOctetsSent = 0;
     mLastRTPTime = 0;
     mLastNTPTime = 0;
+    mOpponentID = 0;
+    mBitrate = 192000;
     mNumSRsSent = 0;
+    mRTPCVOExtMap = -1;
+    mRTPCVODegrees = 0;
+    mRTPSockNetwork = 0;
 
     const char *mime;
     CHECK(mSource->getFormat()->findCString(kKeyMIMEType, &mime));
 
+    int32_t selfID = 0;
+    if (params->findInt32(kKeySelfID, &selfID))
+        mSourceID = selfID;
+
+    int32_t payloadType = 0;
+    if (params->findInt32(kKeyPayloadType, &payloadType))
+        mPayloadType = payloadType;
+
+    int32_t rtpExtMap = 0;
+    if (params->findInt32(kKeyRtpExtMap, &rtpExtMap))
+        mRTPCVOExtMap = rtpExtMap;
+
+    int32_t rtpCVODegrees = 0;
+    if (params->findInt32(kKeyRtpCvoDegrees, &rtpCVODegrees))
+        mRTPCVODegrees = rtpCVODegrees;
+
+    int32_t dscp = 0;
+    if (params->findInt32(kKeyRtpDscp, &dscp))
+        updateSocketDscp(dscp);
+
+    int64_t sockNetwork = 0;
+    if (params->findInt64(kKeySocketNetwork, &sockNetwork))
+        updateSocketNetwork(sockNetwork);
+
     mMode = INVALID;
     if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
         mMode = H264;
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC)) {
+        mMode = H265;
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_H263)) {
         mMode = H263;
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) {
@@ -173,9 +277,12 @@
     return OK;
 }
 
-static void StripStartcode(MediaBufferBase *buffer) {
+// return size of SPS if there is more NAL unit found following to SPS.
+static uint32_t StripStartcode(MediaBufferBase *buffer) {
+    uint32_t nalSize = 0;
+
     if (buffer->range_length() < 4) {
-        return;
+        return 0;
     }
 
     const uint8_t *ptr =
@@ -185,13 +292,65 @@
         buffer->set_range(
                 buffer->range_offset() + 4, buffer->range_length() - 4);
     }
+
+    ptr = (const uint8_t *)buffer->data() + buffer->range_offset();
+
+    if (buffer->range_length() > 0 && (*ptr & H264_NALU_MASK) == H264_NALU_SPS) {
+        for (uint32_t i = 1; i + 4 <= buffer->range_length(); i++) {
+
+            if (!memcmp(ptr + i, "\x00\x00\x00\x01", 4)) {
+                // Now, we found one more NAL unit in the media buffer.
+                // Mostly, it will be a PPS.
+                nalSize = i;
+                ALOGV("SPS found. size=%d", nalSize);
+            }
+        }
+    }
+
+    return nalSize;
+}
+
+static void SpsPpsParser(MediaBufferBase *mediaBuffer,
+    MediaBufferBase **spsBuffer, MediaBufferBase **ppsBuffer, uint32_t spsSize) {
+
+    if (mediaBuffer == NULL || mediaBuffer->range_length() < 4)
+        return;
+
+    if ((*spsBuffer) != NULL) {
+        (*spsBuffer)->release();
+        (*spsBuffer) = NULL;
+    }
+
+    if ((*ppsBuffer) != NULL) {
+        (*ppsBuffer)->release();
+        (*ppsBuffer) = NULL;
+    }
+
+    // we got sps/pps but startcode of sps is striped.
+    (*spsBuffer) = MediaBufferBase::Create(spsSize);
+    memcpy((*spsBuffer)->data(),
+            (const uint8_t *)mediaBuffer->data() + mediaBuffer->range_offset(),
+            spsSize);
+
+    int32_t ppsSize = mediaBuffer->range_length() - spsSize - 4 /*startcode*/;
+    if (ppsSize > 0) {
+        (*ppsBuffer) = MediaBufferBase::Create(ppsSize);
+        ALOGV("PPS found. size=%d", (int)ppsSize);
+        mediaBuffer->set_range(mediaBuffer->range_offset() + spsSize + 4 /*startcode*/,
+                mediaBuffer->range_length() - spsSize - 4 /*startcode*/);
+        memcpy((*ppsBuffer)->data(),
+                (const uint8_t *)mediaBuffer->data() + mediaBuffer->range_offset(),
+                ppsSize);
+    }
 }
 
 void ARTPWriter::onMessageReceived(const sp<AMessage> &msg) {
     switch (msg->what()) {
         case kWhatStart:
         {
-            CHECK_EQ(mSource->start(), (status_t)OK);
+            sp<MetaData> meta = new MetaData();
+            meta->setInt64(kKeyTime, 10ll);
+            CHECK_EQ(mSource->start(meta.get()), (status_t)OK);
 
 #if 0
             if (mMode == H264) {
@@ -264,6 +423,18 @@
     }
 }
 
+void ARTPWriter::setTMMBNInfo(uint32_t opponentID, uint32_t bitrate) {
+    mOpponentID = opponentID;
+    mBitrate = bitrate;
+
+    sp<ABuffer> buffer = new ABuffer(65536);
+    buffer->setRange(0, 0);
+
+    addTMMBN(buffer);
+
+    send(buffer, true /* isRTCP */);
+}
+
 void ARTPWriter::onRead(const sp<AMessage> &msg) {
     MediaBufferBase *mediaBuf;
     status_t err = mSource->read(&mediaBuf);
@@ -280,8 +451,15 @@
         ALOGV("read buffer of size %zu", mediaBuf->range_length());
 
         if (mMode == H264) {
+            uint32_t spsSize = 0;
+            if ((spsSize = StripStartcode(mediaBuf)) > 0) {
+                SpsPpsParser(mediaBuf, &mSPSBuf, &mPPSBuf, spsSize);
+            } else {
+                sendAVCData(mediaBuf);
+            }
+        } else if (mMode == H265) {
             StripStartcode(mediaBuf);
-            sendAVCData(mediaBuf);
+            sendHEVCData(mediaBuf);
         } else if (mMode == H263) {
             sendH263Data(mediaBuf);
         } else if (mMode == AMR_NB || mMode == AMR_WB) {
@@ -309,12 +487,29 @@
 }
 
 void ARTPWriter::send(const sp<ABuffer> &buffer, bool isRTCP) {
-    ssize_t n = sendto(
-            mSocket, buffer->data(), buffer->size(), 0,
-            (const struct sockaddr *)(isRTCP ? &mRTCPAddr : &mRTPAddr),
-            sizeof(mRTCPAddr));
+    int sizeSockSt;
+    struct sockaddr *remAddr;
 
-    CHECK_EQ(n, (ssize_t)buffer->size());
+    if (mIsIPv6) {
+        sizeSockSt = sizeof(struct sockaddr_in6);
+        if (isRTCP)
+            remAddr = (struct sockaddr *)&mRTCPAddr6;
+        else
+            remAddr = (struct sockaddr *)&mRTPAddr6;
+    } else {
+        sizeSockSt = sizeof(struct sockaddr_in);
+        if (isRTCP)
+            remAddr = (struct sockaddr *)&mRTCPAddr;
+        else
+            remAddr = (struct sockaddr *)&mRTPAddr;
+    }
+
+    ssize_t n = sendto(isRTCP ? mRTCPSocket : mRTPSocket,
+            buffer->data(), buffer->size(), 0, remAddr, sizeSockSt);
+
+    if (n != (ssize_t)buffer->size()) {
+        ALOGW("packets can not be sent. ret=%d, buf=%d", (int)n, (int)buffer->size());
+    }
 
 #if LOG_TO_FILES
     int fd = isRTCP ? mRTCPFd : mRTPFd;
@@ -379,7 +574,6 @@
 
     data[offset++] = 1;  // CNAME
 
-    static const char *kCNAME = "someone@somewhere";
     data[offset++] = strlen(kCNAME);
 
     memcpy(&data[offset], kCNAME, strlen(kCNAME));
@@ -416,9 +610,52 @@
     buffer->setRange(buffer->offset(), buffer->size() + offset);
 }
 
+void ARTPWriter::addTMMBN(const sp<ABuffer> &buffer) {
+    if (buffer->size() + 20 > buffer->capacity()) {
+        ALOGW("RTCP buffer too small to accommodate SR.");
+        return;
+    }
+    if (mOpponentID == 0)
+        return;
+
+    uint8_t *data = buffer->data() + buffer->size();
+
+    data[0] = 0x80 | 4; // TMMBN
+    data[1] = 205;      // TSFB
+    data[2] = 0;
+    data[3] = 4;        // total (4+1) * sizeof(int32_t) = 20 bytes
+    data[4] = mSourceID >> 24;
+    data[5] = (mSourceID >> 16) & 0xff;
+    data[6] = (mSourceID >> 8) & 0xff;
+    data[7] = mSourceID & 0xff;
+
+    *(int32_t*)(&data[8]) = 0;  // 4 bytes blank
+
+    data[12] = mOpponentID >> 24;
+    data[13] = (mOpponentID >> 16) & 0xff;
+    data[14] = (mOpponentID >> 8) & 0xff;
+    data[15] = mOpponentID & 0xff;
+
+    int32_t exp, mantissa;
+
+    // Round off to the nearest 2^4th
+    ALOGI("UE -> Op Noti Tx bitrate : %d ", mBitrate & 0xfffffff0);
+    for (exp=4 ; exp < 32 ; exp++)
+        if (((mBitrate >> exp) & 0x01) != 0)
+            break;
+    mantissa = mBitrate >> exp;
+
+    data[16] = ((exp << 2) & 0xfc) | ((mantissa & 0x18000) >> 15);
+    data[17] =                        (mantissa & 0x07f80) >> 7;
+    data[18] =                        (mantissa & 0x0007f) << 1;
+    data[19] = 40;              // 40 bytes overhead;
+
+    buffer->setRange(buffer->offset(), buffer->size() + 20);
+}
+
 // static
 uint64_t ARTPWriter::GetNowNTP() {
-    uint64_t nowUs = ALooper::GetNowUs();
+    uint64_t nowUs = systemTime(SYSTEM_TIME_REALTIME) / 1000ll;
 
     nowUs += ((70LL * 365 + 17) * 24) * 60 * 60 * 1000000LL;
 
@@ -463,7 +700,7 @@
         sdp.append("m=audio ");
     }
 
-    sdp.append(AStringPrintf("%d", ntohs(mRTPAddr.sin_port)));
+    sdp.append(AStringPrintf("%d", mIsIPv6 ? ntohs(mRTPAddr6.sin6_port) : ntohs(mRTPAddr.sin_port)));
     sdp.append(
           " RTP/AVP " PT_STR "\r\n"
           "b=AS 320000\r\n"
@@ -569,24 +806,49 @@
     send(buffer, true /* isRTCP */);
 }
 
-void ARTPWriter::sendAVCData(MediaBufferBase *mediaBuf) {
+void ARTPWriter::sendSPSPPSIfIFrame(MediaBufferBase *mediaBuf, int64_t timeUs) {
+    const uint8_t *mediaData =
+        (const uint8_t *)mediaBuf->data() + mediaBuf->range_offset();
+
+    if (mediaBuf->range_length() == 0
+            || (mediaData[0] & H264_NALU_MASK) != H264_NALU_IFRAME)
+        return;
+
+    if (mSPSBuf != NULL) {
+        mSPSBuf->meta_data().setInt64(kKeyTime, timeUs);
+        mSPSBuf->meta_data().setInt32(kKeySps, 1);
+        sendAVCData(mSPSBuf);
+    }
+
+    if (mPPSBuf != NULL) {
+        mPPSBuf->meta_data().setInt64(kKeyTime, timeUs);
+        mPPSBuf->meta_data().setInt32(kKeyPps, 1);
+        sendAVCData(mPPSBuf);
+    }
+}
+
+void ARTPWriter::sendHEVCData(MediaBufferBase *mediaBuf) {
     // 12 bytes RTP header + 2 bytes for the FU-indicator and FU-header.
     CHECK_GE(kMaxPacketSize, 12u + 2u);
 
     int64_t timeUs;
     CHECK(mediaBuf->meta_data().findInt64(kKeyTime, &timeUs));
 
-    uint32_t rtpTime = mRTPTimeBase + (timeUs * 9 / 100LL);
+    sendSPSPPSIfIFrame(mediaBuf, timeUs);
+
+    uint32_t rtpTime = mRTPTimeBase + (timeUs * 9 / 100ll);
 
     const uint8_t *mediaData =
         (const uint8_t *)mediaBuf->data() + mediaBuf->range_offset();
 
     sp<ABuffer> buffer = new ABuffer(kMaxPacketSize);
-    if (mediaBuf->range_length() + 12 <= buffer->capacity()) {
+
+    if (mediaBuf->range_length() + UDP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_PAYLOAD_ROOM_SIZE
+            <= buffer->capacity()) {
         // The data fits into a single packet
         uint8_t *data = buffer->data();
         data[0] = 0x80;
-        data[1] = (1 << 7) | PT;  // M-bit
+        data[1] = (1 << 7) | mPayloadType;  // M-bit
         data[2] = (mSeqNo >> 8) & 0xff;
         data[3] = mSeqNo & 0xff;
         data[4] = rtpTime >> 24;
@@ -611,21 +873,24 @@
     } else {
         // FU-A
 
-        unsigned nalType = mediaData[0];
-        size_t offset = 1;
+        unsigned nalType = (mediaData[0] >> 1) & H265_NALU_MASK;
+        ALOGV("H265 nalType 0x%x, data[0]=0x%x", nalType, mediaData[0]);
+        size_t offset = 2; //H265 payload header is 16 bit.
 
         bool firstPacket = true;
         while (offset < mediaBuf->range_length()) {
             size_t size = mediaBuf->range_length() - offset;
             bool lastPacket = true;
-            if (size + 12 + 2 > buffer->capacity()) {
+            if (size + UDP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_FU_HEADER_SIZE +
+                    RTP_PAYLOAD_ROOM_SIZE > buffer->capacity()) {
                 lastPacket = false;
-                size = buffer->capacity() - 12 - 2;
+                size = buffer->capacity() - UDP_HEADER_SIZE - RTP_HEADER_SIZE -
+                    RTP_FU_HEADER_SIZE - RTP_PAYLOAD_ROOM_SIZE;
             }
 
             uint8_t *data = buffer->data();
             data[0] = 0x80;
-            data[1] = (lastPacket ? (1 << 7) : 0x00) | PT;  // M-bit
+            data[1] = (lastPacket ? (1 << 7) : 0x00) | mPayloadType;  // M-bit
             data[2] = (mSeqNo >> 8) & 0xff;
             data[3] = mSeqNo & 0xff;
             data[4] = rtpTime >> 24;
@@ -637,18 +902,39 @@
             data[10] = (mSourceID >> 8) & 0xff;
             data[11] = mSourceID & 0xff;
 
-            data[12] = 28 | (nalType & 0xe0);
+            /*  H265 payload header is 16 bit
+                 0 1 2 3 4 5 6 7 0 1 2 3 4 5 6 7
+                +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+                |F|     Type  |  Layer ID | TID |
+                +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+            */
+            ALOGV("H265 payload header 0x%x %x", mediaData[0], mediaData[1]);
+            // excludes Type from 1st byte of H265 payload header.
+            data[12] = mediaData[0] & 0x81;
+            // fills Type as FU (49 == 0x31)
+            data[12] = data[12] | (0x31 << 1);
+            data[13] = mediaData[1];
+
+            ALOGV("H265 FU header 0x%x %x", data[12], data[13]);
 
             CHECK(!firstPacket || !lastPacket);
+            /*
+                FU INDICATOR HDR
+                0 1 2 3 4 5 6 7
+                +-+-+-+-+-+-+-+
+                |S|E|   Type  |
+                +-+-+-+-+-+-+-+
+            */
 
-            data[13] =
+            data[14] =
                 (firstPacket ? 0x80 : 0x00)
                 | (lastPacket ? 0x40 : 0x00)
-                | (nalType & 0x1f);
+                | (nalType & H265_NALU_MASK);
+            ALOGV("H265 FU indicator 0x%x", data[14]);
 
-            memcpy(&data[14], &mediaData[offset], size);
+            memcpy(&data[15], &mediaData[offset], size);
 
-            buffer->setRange(0, 14 + size);
+            buffer->setRange(0, 15 + size);
 
             send(buffer, false /* isRTCP */);
 
@@ -663,6 +949,170 @@
 
     mLastRTPTime = rtpTime;
     mLastNTPTime = GetNowNTP();
+
+}
+
+void ARTPWriter::sendAVCData(MediaBufferBase *mediaBuf) {
+    // 12 bytes RTP header + 2 bytes for the FU-indicator and FU-header.
+    CHECK_GE(kMaxPacketSize, 12u + 2u);
+
+    int64_t timeUs;
+    CHECK(mediaBuf->meta_data().findInt64(kKeyTime, &timeUs));
+
+    sendSPSPPSIfIFrame(mediaBuf, timeUs);
+
+    uint32_t rtpTime = mRTPTimeBase + (timeUs * 9 / 100LL);
+
+    const uint8_t *mediaData =
+        (const uint8_t *)mediaBuf->data() + mediaBuf->range_offset();
+
+    int32_t sps, pps;
+    bool isSpsPps = false;
+    if (mediaBuf->meta_data().findInt32(kKeySps, &sps) ||
+            mediaBuf->meta_data().findInt32(kKeyPps, &pps)) {
+        isSpsPps = true;
+    }
+
+    sp<ABuffer> buffer = new ABuffer(kMaxPacketSize);
+    if (mediaBuf->range_length() + UDP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_PAYLOAD_ROOM_SIZE
+            <= buffer->capacity()) {
+        // The data fits into a single packet
+        uint8_t *data = buffer->data();
+        data[0] = 0x80;
+        if (mRTPCVOExtMap > 0)
+            data[0] |= 0x10;
+        if (isSpsPps)
+            data[1] = mPayloadType;  // Marker bit should not be set in case of sps/pps
+        else
+            data[1] = (1 << 7) | mPayloadType;
+        data[2] = (mSeqNo >> 8) & 0xff;
+        data[3] = mSeqNo & 0xff;
+        data[4] = rtpTime >> 24;
+        data[5] = (rtpTime >> 16) & 0xff;
+        data[6] = (rtpTime >> 8) & 0xff;
+        data[7] = rtpTime & 0xff;
+        data[8] = mSourceID >> 24;
+        data[9] = (mSourceID >> 16) & 0xff;
+        data[10] = (mSourceID >> 8) & 0xff;
+        data[11] = mSourceID & 0xff;
+
+        int rtpExtIndex = 0;
+        if (mRTPCVOExtMap > 0) {
+            /*
+                0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+               +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+               |       0xBE    |    0xDE       |           length=3            |
+               +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+               |  ID   | L=0   |     data      |  ID   |  L=1  |   data...
+               +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+                     ...data   |    0 (pad)    |    0 (pad)    |  ID   | L=3   |
+               +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+               |                          data                                 |
+               +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+
+              In the one-byte header form of extensions, the 16-bit value required
+              by the RTP specification for a header extension, labeled in the RTP
+              specification as "defined by profile", takes the fixed bit pattern
+              0xBEDE (the first version of this specification was written on the
+              feast day of the Venerable Bede).
+            */
+            data[12] = 0xBE;
+            data[13] = 0xDE;
+            // put a length of RTP Extension.
+            data[14] = 0x00;
+            data[15] = 0x01;
+            // put extmap of RTP assigned for CVO.
+            data[16] = (mRTPCVOExtMap << 4) | 0x0;
+            // put image degrees as per CVO specification.
+            data[17] = mRTPCVODegrees;
+            data[18] = 0x0;
+            data[19] = 0x0;
+            rtpExtIndex = 8;
+        }
+
+        memcpy(&data[12 + rtpExtIndex],
+               mediaData, mediaBuf->range_length());
+
+        buffer->setRange(0, mediaBuf->range_length() + (12 + rtpExtIndex));
+
+        send(buffer, false /* isRTCP */);
+
+        ++mSeqNo;
+        ++mNumRTPSent;
+        mNumRTPOctetsSent += buffer->size() - (12 + rtpExtIndex);
+    } else {
+        // FU-A
+
+        unsigned nalType = mediaData[0];
+        size_t offset = 1;
+
+        bool firstPacket = true;
+        while (offset < mediaBuf->range_length()) {
+            size_t size = mediaBuf->range_length() - offset;
+            bool lastPacket = true;
+            if (size + UDP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_FU_HEADER_SIZE +
+                    RTP_PAYLOAD_ROOM_SIZE > buffer->capacity()) {
+                lastPacket = false;
+                size = buffer->capacity() - UDP_HEADER_SIZE - RTP_HEADER_SIZE -
+                    RTP_FU_HEADER_SIZE - RTP_PAYLOAD_ROOM_SIZE;
+            }
+
+            uint8_t *data = buffer->data();
+            data[0] = 0x80;
+            if (lastPacket && mRTPCVOExtMap > 0)
+                data[0] |= 0x10;
+            data[1] = (lastPacket ? (1 << 7) : 0x00) | mPayloadType;  // M-bit
+            data[2] = (mSeqNo >> 8) & 0xff;
+            data[3] = mSeqNo & 0xff;
+            data[4] = rtpTime >> 24;
+            data[5] = (rtpTime >> 16) & 0xff;
+            data[6] = (rtpTime >> 8) & 0xff;
+            data[7] = rtpTime & 0xff;
+            data[8] = mSourceID >> 24;
+            data[9] = (mSourceID >> 16) & 0xff;
+            data[10] = (mSourceID >> 8) & 0xff;
+            data[11] = mSourceID & 0xff;
+
+            int rtpExtIndex = 0;
+            if (lastPacket && mRTPCVOExtMap > 0) {
+                data[12] = 0xBE;
+                data[13] = 0xDE;
+                data[14] = 0x00;
+                data[15] = 0x01;
+                data[16] = (mRTPCVOExtMap << 4) | 0x0;
+                data[17] = mRTPCVODegrees;
+                data[18] = 0x0;
+                data[19] = 0x0;
+                rtpExtIndex = 8;
+            }
+
+            data[12 + rtpExtIndex] = 28 | (nalType & 0xe0);
+
+            CHECK(!firstPacket || !lastPacket);
+
+            data[13 + rtpExtIndex] =
+                (firstPacket ? 0x80 : 0x00)
+                | (lastPacket ? 0x40 : 0x00)
+                | (nalType & 0x1f);
+
+            memcpy(&data[14 + rtpExtIndex], &mediaData[offset], size);
+
+            buffer->setRange(0, 14 + rtpExtIndex + size);
+
+            send(buffer, false /* isRTCP */);
+
+            ++mSeqNo;
+            ++mNumRTPSent;
+            mNumRTPOctetsSent += buffer->size() - (12 + rtpExtIndex);
+
+            firstPacket = false;
+            offset += size;
+        }
+    }
+
+    mLastRTPTime = rtpTime;
+    mLastNTPTime = GetNowNTP();
 }
 
 void ARTPWriter::sendH263Data(MediaBufferBase *mediaBuf) {
@@ -696,7 +1146,7 @@
 
         uint8_t *data = buffer->data();
         data[0] = 0x80;
-        data[1] = (lastPacket ? 0x80 : 0x00) | PT;  // M-bit
+        data[1] = (lastPacket ? 0x80 : 0x00) | mPayloadType;  // M-bit
         data[2] = (mSeqNo >> 8) & 0xff;
         data[3] = mSeqNo & 0xff;
         data[4] = rtpTime >> 24;
@@ -727,6 +1177,54 @@
     mLastNTPTime = GetNowNTP();
 }
 
+void ARTPWriter::updateCVODegrees(int32_t cvoDegrees) {
+    Mutex::Autolock autoLock(mLock);
+    mRTPCVODegrees = cvoDegrees;
+}
+
+void ARTPWriter::updatePayloadType(int32_t payloadType) {
+    Mutex::Autolock autoLock(mLock);
+    mPayloadType = payloadType;
+}
+
+void ARTPWriter::updateSocketDscp(int32_t dscp) {
+    mRtpLayer3Dscp = dscp << 2;
+
+    /* mRtpLayer3Dscp will be mapped to WMM(Wifi) as per operator's requirement */
+    if (setsockopt(mRTPSocket, IPPROTO_IP, IP_TOS,
+                (int *)&mRtpLayer3Dscp, sizeof(mRtpLayer3Dscp)) < 0) {
+        ALOGE("failed to set dscp on rtpsock. err=%s", strerror(errno));
+    } else {
+        ALOGD("successfully set dscp on rtpsock. opt=%d", mRtpLayer3Dscp);
+        setsockopt(mRTCPSocket, IPPROTO_IP, IP_TOS,
+                (int *)&mRtpLayer3Dscp, sizeof(mRtpLayer3Dscp));
+        ALOGD("successfully set dscp on rtcpsock. opt=%d", mRtpLayer3Dscp);
+    }
+}
+
+void ARTPWriter::updateSocketNetwork(int64_t socketNetwork) {
+    mRTPSockNetwork = (net_handle_t)socketNetwork;
+    ALOGI("trying to bind rtp socket(%d) to network(%llu).",
+                mRTPSocket, (unsigned long long)mRTPSockNetwork);
+
+    int result = android_setsocknetwork(mRTPSockNetwork, mRTPSocket);
+    if (result != 0) {
+        ALOGW("failed(%d) to bind rtp socket(%d) to network(%llu)",
+                result, mRTPSocket, (unsigned long long)mRTPSockNetwork);
+    }
+    result = android_setsocknetwork(mRTPSockNetwork, mRTCPSocket);
+    if (result != 0) {
+        ALOGW("failed(%d) to bind rtcp socket(%d) to network(%llu)",
+                result, mRTCPSocket, (unsigned long long)mRTPSockNetwork);
+    }
+    ALOGI("done. bind rtp socket(%d) to network(%llu)",
+                mRTPSocket, (unsigned long long)mRTPSockNetwork);
+}
+
+uint32_t ARTPWriter::getSequenceNum() {
+    return mSeqNo;
+}
+
 static size_t getFrameSize(bool isWide, unsigned FT) {
     static const size_t kFrameSizeNB[8] = {
         95, 103, 118, 134, 148, 159, 204, 244
@@ -778,7 +1276,7 @@
     // The data fits into a single packet
     uint8_t *data = buffer->data();
     data[0] = 0x80;
-    data[1] = PT;
+    data[1] = mPayloadType;
     if (mNumRTPSent == 0) {
         // Signal start of talk-spurt.
         data[1] |= 0x80;  // M-bit
@@ -834,5 +1332,80 @@
     mLastNTPTime = GetNowNTP();
 }
 
-}  // namespace android
+void ARTPWriter::makeSocketPairAndBind(String8& localIp, int localPort,
+        String8& remoteIp, int remotePort) {
+    static char kSomeone[16] = "someone@";
+    int nameLength = strlen(kSomeone);
+    memcpy(kCNAME, kSomeone, nameLength);
+    memcpy(kCNAME + nameLength, localIp.c_str(), localIp.length() + 1);
 
+    if (localIp.contains(":"))
+        mIsIPv6 = true;
+    else
+        mIsIPv6 = false;
+
+    mRTPSocket = socket(mIsIPv6 ? AF_INET6 : AF_INET, SOCK_DGRAM, 0);
+    CHECK_GE(mRTPSocket, 0);
+    mRTCPSocket = socket(mIsIPv6 ? AF_INET6 : AF_INET, SOCK_DGRAM, 0);
+    CHECK_GE(mRTCPSocket, 0);
+
+    int sockopt = 1;
+    setsockopt(mRTPSocket, SOL_SOCKET, SO_REUSEADDR, (int *)&sockopt, sizeof(sockopt));
+    setsockopt(mRTCPSocket, SOL_SOCKET, SO_REUSEADDR, (int *)&sockopt, sizeof(sockopt));
+
+    if (mIsIPv6) {
+        memset(&mLocalAddr6, 0, sizeof(mLocalAddr6));
+        memset(&mRTPAddr6, 0, sizeof(mRTPAddr6));
+        memset(&mRTCPAddr6, 0, sizeof(mRTCPAddr6));
+
+        mLocalAddr6.sin6_family = AF_INET6;
+        inet_pton(AF_INET6, localIp.string(), &mLocalAddr6.sin6_addr);
+        mLocalAddr6.sin6_port = htons((uint16_t)localPort);
+
+        mRTPAddr6.sin6_family = AF_INET6;
+        inet_pton(AF_INET6, remoteIp.string(), &mRTPAddr6.sin6_addr);
+        mRTPAddr6.sin6_port = htons((uint16_t)remotePort);
+
+        mRTCPAddr6 = mRTPAddr6;
+        mRTCPAddr6.sin6_port = htons((uint16_t)(remotePort + 1));
+    } else {
+        memset(&mLocalAddr, 0, sizeof(mLocalAddr));
+        memset(&mRTPAddr, 0, sizeof(mRTPAddr));
+        memset(&mRTCPAddr, 0, sizeof(mRTCPAddr));
+
+        mLocalAddr.sin_family = AF_INET;
+        mLocalAddr.sin_addr.s_addr = inet_addr(localIp.string());
+        mLocalAddr.sin_port = htons((uint16_t)localPort);
+
+        mRTPAddr.sin_family = AF_INET;
+        mRTPAddr.sin_addr.s_addr = inet_addr(remoteIp.string());
+        mRTPAddr.sin_port = htons((uint16_t)remotePort);
+
+        mRTCPAddr = mRTPAddr;
+        mRTCPAddr.sin_port = htons((uint16_t)(remotePort + 1));
+    }
+
+    struct sockaddr *localAddr = mIsIPv6 ?
+        (struct sockaddr*)&mLocalAddr6 : (struct sockaddr*)&mLocalAddr;
+
+    int sizeSockSt = mIsIPv6 ? sizeof(mLocalAddr6) : sizeof(mLocalAddr);
+
+    if (bind(mRTPSocket, localAddr, sizeSockSt) == -1) {
+        ALOGE("failed to bind rtp %s:%d err=%s", localIp.string(), localPort, strerror(errno));
+    } else {
+        ALOGD("succeed to bind rtp %s:%d", localIp.string(), localPort);
+    }
+
+    if (mIsIPv6)
+        mLocalAddr6.sin6_port = htons((uint16_t)(localPort + 1));
+    else
+        mLocalAddr.sin_port = htons((uint16_t)(localPort + 1));
+
+    if (bind(mRTCPSocket, localAddr, sizeSockSt) == -1) {
+        ALOGE("failed to bind rtcp %s:%d err=%s", localIp.string(), localPort + 1, strerror(errno));
+    } else {
+        ALOGD("succeed to bind rtcp %s:%d", localIp.string(), localPort + 1);
+    }
+}
+
+}  // namespace android
diff --git a/media/libstagefright/rtsp/ARTPWriter.h b/media/libstagefright/rtsp/ARTPWriter.h
index 2f13486..f7e2204 100644
--- a/media/libstagefright/rtsp/ARTPWriter.h
+++ b/media/libstagefright/rtsp/ARTPWriter.h
@@ -27,6 +27,8 @@
 #include <arpa/inet.h>
 #include <sys/socket.h>
 
+#include <android/multinetwork.h>
+
 #define LOG_TO_FILES    0
 
 namespace android {
@@ -36,14 +38,23 @@
 
 struct ARTPWriter : public MediaWriter {
     explicit ARTPWriter(int fd);
+    explicit ARTPWriter(int fd, String8& localIp, int localPort,
+                                String8& remoteIp, int remotePort,
+                                uint32_t seqNo);
 
     virtual status_t addSource(const sp<MediaSource> &source);
     virtual bool reachedEOS();
     virtual status_t start(MetaData *params);
     virtual status_t stop();
     virtual status_t pause();
+    void updateCVODegrees(int32_t cvoDegrees);
+    void updatePayloadType(int32_t payloadType);
+    void updateSocketDscp(int32_t dscp);
+    void updateSocketNetwork(int64_t socketNetwork);
+    uint32_t getSequenceNum();
 
     virtual void onMessageReceived(const sp<AMessage> &msg);
+    virtual void setTMMBNInfo(uint32_t opponentID, uint32_t bitrate);
 
 protected:
     virtual ~ARTPWriter();
@@ -76,15 +87,26 @@
     sp<ALooper> mLooper;
     sp<AHandlerReflector<ARTPWriter> > mReflector;
 
-    int mSocket;
+    bool mIsIPv6;
+    int mRTPSocket, mRTCPSocket;
+    struct sockaddr_in mLocalAddr;
     struct sockaddr_in mRTPAddr;
     struct sockaddr_in mRTCPAddr;
+    struct sockaddr_in6 mLocalAddr6;
+    struct sockaddr_in6 mRTPAddr6;
+    struct sockaddr_in6 mRTCPAddr6;
+    int32_t mRtpLayer3Dscp;
+    net_handle_t mRTPSockNetwork;
 
     AString mProfileLevel;
     AString mSeqParamSet;
     AString mPicParamSet;
 
+    MediaBufferBase *mSPSBuf;
+    MediaBufferBase *mPPSBuf;
+
     uint32_t mSourceID;
+    uint32_t mPayloadType;
     uint32_t mSeqNo;
     uint32_t mRTPTimeBase;
     uint32_t mNumRTPSent;
@@ -92,10 +114,16 @@
     uint32_t mLastRTPTime;
     uint64_t mLastNTPTime;
 
+    uint32_t mOpponentID;
+    uint32_t mBitrate;
+
     int32_t mNumSRsSent;
+    int32_t mRTPCVOExtMap;
+    int32_t mRTPCVODegrees;
 
     enum {
         INVALID,
+        H265,
         H264,
         H263,
         AMR_NB,
@@ -109,16 +137,20 @@
 
     void addSR(const sp<ABuffer> &buffer);
     void addSDES(const sp<ABuffer> &buffer);
+    void addTMMBN(const sp<ABuffer> &buffer);
 
     void makeH264SPropParamSets(MediaBufferBase *buffer);
     void dumpSessionDesc();
 
     void sendBye();
+    void sendSPSPPSIfIFrame(MediaBufferBase *mediaBuf, int64_t timeUs);
+    void sendHEVCData(MediaBufferBase *mediaBuf);
     void sendAVCData(MediaBufferBase *mediaBuf);
     void sendH263Data(MediaBufferBase *mediaBuf);
     void sendAMRData(MediaBufferBase *mediaBuf);
 
     void send(const sp<ABuffer> &buffer, bool isRTCP);
+    void makeSocketPairAndBind(String8& localIp, int localPort, String8& remoteIp, int remotePort);
 
     DISALLOW_EVIL_CONSTRUCTORS(ARTPWriter);
 };
diff --git a/media/libstagefright/rtsp/ASessionDescription.cpp b/media/libstagefright/rtsp/ASessionDescription.cpp
index 2b42040..5b5b4b1 100644
--- a/media/libstagefright/rtsp/ASessionDescription.cpp
+++ b/media/libstagefright/rtsp/ASessionDescription.cpp
@@ -27,6 +27,8 @@
 
 namespace android {
 
+constexpr unsigned kDefaultAs = 960; // kbps?
+
 ASessionDescription::ASessionDescription()
     : mIsValid(false) {
 }
@@ -103,7 +105,7 @@
                     key.setTo(line, 0, colonPos);
 
                     if (key == "a=fmtp" || key == "a=rtpmap"
-                            || key == "a=framesize") {
+                            || key == "a=framesize" || key == "a=extmap") {
                         ssize_t spacePos = line.find(" ", colonPos + 1);
                         if (spacePos < 0) {
                             return false;
@@ -201,6 +203,33 @@
     return true;
 }
 
+bool ASessionDescription::getCvoExtMap(
+        size_t index, int32_t *cvoExtMap) const {
+    CHECK_GE(index, 0u);
+    CHECK_LT(index, mTracks.size());
+
+    AString key, value;
+    *cvoExtMap = 0;
+
+    const Attribs &track = mTracks.itemAt(index);
+    for (size_t i = 0; i < track.size(); i++) {
+        value = track.valueAt(i);
+        if (value.size() > 0 && strcmp(value.c_str(), "urn:3gpp:video-orientation") == 0) {
+            key = track.keyAt(i);
+            break;
+        }
+    }
+
+    if (key.size() > 0) {
+        const char *colonPos = strrchr(key.c_str(), ':');
+        colonPos++;
+        *cvoExtMap = atoi(colonPos);
+        return true;
+    }
+
+    return false;
+}
+
 void ASessionDescription::getFormatType(
         size_t index, unsigned long *PT,
         AString *desc, AString *params) const {
@@ -345,5 +374,74 @@
     return *npt2 > *npt1;
 }
 
+// static
+void ASessionDescription::SDPStringFactory(AString &sdp,
+        const char *ip, bool isAudio, unsigned port, unsigned payloadType,
+        unsigned as, const char *codec, const char *fmtp,
+        int32_t width, int32_t height, int32_t cvoExtMap)
+{
+    bool isIPv4 = (AString(ip).find("::") == -1) ? true : false;
+    sdp.clear();
+    sdp.append("v=0\r\n");
+
+    sdp.append("a=range:npt=now-\r\n");
+
+    sdp.append("m=");
+    sdp.append(isAudio ? "audio " : "video ");
+    sdp.append(port);
+    sdp.append(" RTP/AVP ");
+    sdp.append(payloadType);
+    sdp.append("\r\n");
+
+    sdp.append("c= IN IP");
+    if (isIPv4) {
+        sdp.append("4 ");
+    } else {
+        sdp.append("6 ");
+    }
+    sdp.append(ip);
+    sdp.append("\r\n");
+
+    sdp.append("b=AS:");
+    sdp.append(as > 0 ? as : kDefaultAs);
+    sdp.append("\r\n");
+
+    sdp.append("a=rtpmap:");
+    sdp.append(payloadType);
+    sdp.append(" ");
+    sdp.append(codec);
+    sdp.append("/");
+    sdp.append(isAudio ? "8000" : "90000");
+    sdp.append("\r\n");
+
+    if (fmtp != NULL) {
+        sdp.append("a=fmtp:");
+        sdp.append(payloadType);
+        sdp.append(" ");
+        sdp.append(fmtp);
+        sdp.append("\r\n");
+    }
+
+    if (!isAudio && width > 0 && height > 0) {
+        sdp.append("a=framesize:");
+        sdp.append(payloadType);
+        sdp.append(" ");
+        sdp.append(width);
+        sdp.append("-");
+        sdp.append(height);
+        sdp.append("\r\n");
+    }
+
+    if (cvoExtMap > 0) {
+        sdp.append("a=extmap:");
+        sdp.append(cvoExtMap);
+        sdp.append(" ");
+        sdp.append("urn:3gpp:video-orientation");
+        sdp.append("\r\n");
+    }
+
+    ALOGV("SDPStringFactory => %s", sdp.c_str());
+}
+
 }  // namespace android
 
diff --git a/media/libstagefright/rtsp/ASessionDescription.h b/media/libstagefright/rtsp/ASessionDescription.h
index b462983..91f5442 100644
--- a/media/libstagefright/rtsp/ASessionDescription.h
+++ b/media/libstagefright/rtsp/ASessionDescription.h
@@ -40,6 +40,8 @@
     size_t countTracks() const;
     void getFormat(size_t index, AString *value) const;
 
+    bool getCvoExtMap(size_t index, int32_t *cvoExtMap) const;
+
     void getFormatType(
             size_t index, unsigned long *PT,
             AString *desc, AString *params) const;
@@ -63,6 +65,9 @@
     // i.e. we have a fixed duration, otherwise this is live streaming.
     static bool parseNTPRange(const char *s, float *npt1, float *npt2);
 
+    static void SDPStringFactory(AString &sdp, const char *ip, bool isAudio, unsigned port,
+        unsigned payloadType, unsigned as, const char *codec, const char *fmtp = NULL,
+        int32_t width = 0, int32_t height = 0, int32_t cvoExtMap = 0);
 protected:
     virtual ~ASessionDescription();
 
diff --git a/media/libstagefright/rtsp/Android.bp b/media/libstagefright/rtsp/Android.bp
index a5a895e..6179142 100644
--- a/media/libstagefright/rtsp/Android.bp
+++ b/media/libstagefright/rtsp/Android.bp
@@ -4,6 +4,7 @@
     srcs: [
         "AAMRAssembler.cpp",
         "AAVCAssembler.cpp",
+        "AHEVCAssembler.cpp",
         "AH263Assembler.cpp",
         "AMPEG2TSAssembler.cpp",
         "AMPEG4AudioAssembler.cpp",
@@ -17,9 +18,11 @@
         "ARTSPConnection.cpp",
         "ASessionDescription.cpp",
         "SDPLoader.cpp",
+        "QualManager.cpp",
     ],
 
     shared_libs: [
+        "libandroid_net",
         "libcrypto",
         "libdatasource",
         "libmedia",
@@ -28,6 +31,7 @@
     include_dirs: [
         "frameworks/av/media/libstagefright",
         "frameworks/native/include/media/openmax",
+        "frameworks/native/include/android",
     ],
 
     arch: {
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h
index 7f025a5..0fdf431 100644
--- a/media/libstagefright/rtsp/MyHandler.h
+++ b/media/libstagefright/rtsp/MyHandler.h
@@ -1032,6 +1032,11 @@
                     break;
                 }
 
+                int32_t rtcpEvent;
+                if (msg->findInt32("rtcp-event", &rtcpEvent)) {
+                    break;
+                }
+
                 ++mNumAccessUnitsReceived;
                 postAccessUnitTimeoutCheck();
 
diff --git a/media/libstagefright/rtsp/QualManager.cpp b/media/libstagefright/rtsp/QualManager.cpp
new file mode 100644
index 0000000..37aa326
--- /dev/null
+++ b/media/libstagefright/rtsp/QualManager.cpp
@@ -0,0 +1,174 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "QualManager"
+
+#include <algorithm>
+
+#include <sys/prctl.h>
+#include <utils/Log.h>
+
+#include "QualManager.h"
+
+namespace android {
+
+QualManager::Watcher::Watcher(int32_t timeLimit)
+    : Thread(false), mWatching(false), mSwitch(false),
+      mTimeLimit(timeLimit * 1000000LL)     // timeLimit ms
+{
+}
+
+bool QualManager::Watcher::isExpired() const
+{
+    return mSwitch;
+}
+
+void QualManager::Watcher::setup() {
+    AutoMutex _l(mMyLock);
+    if (mWatching == false) {
+        mWatching = true;
+        mMyCond.signal();
+    }
+}
+
+void QualManager::Watcher::release() {
+    AutoMutex _l(mMyLock);
+    if (mSwitch) {
+        ALOGW("%s DISARMED", name);
+        mSwitch = false;
+    }
+    if (mWatching == true) {
+        ALOGW("%s DISARMED", name);
+        mWatching = false;
+        mMyCond.signal();
+    }
+}
+
+void QualManager::Watcher::exit() {
+    AutoMutex _l(mMyLock);
+    // The order is important to avoid dead lock.
+    Thread::requestExit();
+    mMyCond.signal();
+}
+
+QualManager::Watcher::~Watcher() {
+    ALOGI("%s thread dead", name);
+}
+
+bool QualManager::Watcher::threadLoop() {
+    AutoMutex _l(mMyLock);
+#if defined(__linux__)
+    prctl(PR_GET_NAME, name, 0, 0, 0);
+#endif
+    while (!exitPending()) {
+        ALOGW("%s Timer init", name);
+        mMyCond.wait(mMyLock);                      // waits as non-watching state
+        if (exitPending())
+            return false;
+        ALOGW("%s timer BOOM after %d msec", name, (int)(mTimeLimit / 1000000LL));
+        mMyCond.waitRelative(mMyLock, mTimeLimit);  // waits as watching satte
+        if (mWatching == true) {
+            mSwitch = true;
+            ALOGW("%s BOOM!!!!", name);
+        }
+        mWatching = false;
+    }
+    return false;
+}
+
+
+QualManager::QualManager()
+    : mMinBitrate(-1), mMaxBitrate(-1),
+      mTargetBitrate(512000), mLastTargetBitrate(-1),
+      mLastSetBitrateTime(0), mIsNewTargetBitrate(false)
+{
+    VFPWatcher = new Watcher(3000);     //Very Few Packet Watcher
+    VFPWatcher->run("VeryFewPtk");
+    LBRWatcher = new Watcher(10000);    //Low Bit Rate Watcher
+    LBRWatcher->run("LowBitRate");
+}
+
+QualManager::~QualManager() {
+    VFPWatcher->exit();
+    LBRWatcher->exit();
+}
+
+int32_t QualManager::getTargetBitrate() {
+    if (mIsNewTargetBitrate) {
+        mIsNewTargetBitrate = false;
+        mLastTargetBitrate = clampingBitrate(mTargetBitrate);
+        mTargetBitrate = mLastTargetBitrate;
+        return mTargetBitrate;
+    } else {
+        return -1;
+    }
+}
+
+bool QualManager::isNeedToDowngrade() {
+    return LBRWatcher->isExpired();
+}
+
+void QualManager::setTargetBitrate(uint8_t fraction, int64_t nowUs, bool isTooLowPkts) {
+    /* Too Low Packet. Maybe opponent is switching camera.
+     * If this condition goes longer, we should down bitrate.
+     */
+    if (isTooLowPkts) {
+        VFPWatcher->setup();
+    } else {
+        VFPWatcher->release();
+    }
+
+    if ((fraction > (256 * 5 / 100) && !isTooLowPkts) || VFPWatcher->isExpired()) {
+        // loss more than 5%                          or  VFPWatcher BOOMED
+        mTargetBitrate -= mBitrateStep * 3;
+    } else if (fraction <= (256 * 2 /100)) {
+        // loss less than 2%
+        mTargetBitrate += mBitrateStep;
+    }
+
+    if (mTargetBitrate > mMaxBitrate) {
+        mTargetBitrate = mMaxBitrate + mBitrateStep;
+    } else if (mTargetBitrate < mMinBitrate) {
+        LBRWatcher->setup();
+        mTargetBitrate = mMinBitrate - mBitrateStep;
+    }
+
+    if (mLastTargetBitrate != clampingBitrate(mTargetBitrate) ||
+        nowUs - mLastSetBitrateTime > 5000000ll) {
+        mIsNewTargetBitrate = true;
+        mLastSetBitrateTime = nowUs;
+    }
+}
+
+void QualManager::setMinMaxBitrate(int32_t min, int32_t max) {
+    mMinBitrate = min;
+    mMaxBitrate = max;
+    mBitrateStep = (max - min) / 8;
+}
+
+void QualManager::setBitrateData(int32_t bitrate, int64_t /*now*/) {
+    // A bitrate that is considered packetloss also should be good.
+    if (bitrate >= mMinBitrate && mTargetBitrate >= mMinBitrate) {
+        LBRWatcher->release();
+    } else if (bitrate < mMinBitrate){
+        LBRWatcher->setup();
+    }
+}
+
+int32_t QualManager::clampingBitrate(int32_t bitrate) {
+    return std::min(std::max(mMinBitrate, bitrate), mMaxBitrate);
+}
+} // namespace android
diff --git a/media/libstagefright/rtsp/QualManager.h b/media/libstagefright/rtsp/QualManager.h
new file mode 100644
index 0000000..a7dc921
--- /dev/null
+++ b/media/libstagefright/rtsp/QualManager.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef QUAL_MANAGER_H_
+
+#define QUAL_MANAGER_H_
+
+#include <stdint.h>
+#include <utils/Thread.h>
+
+namespace android {
+class QualManager {
+public:
+    QualManager();
+    ~QualManager();
+
+    int32_t getTargetBitrate();
+    bool isNeedToDowngrade();
+
+    void setTargetBitrate(uint8_t fraction, int64_t nowUs, bool isTooLowPkts);
+    void setMinMaxBitrate(int32_t min, int32_t max);
+    void setBitrateData(int32_t bitrate, int64_t now);
+private:
+    class Watcher : public Thread
+    {
+    public:
+        Watcher(int32_t timeLimit);
+
+        void setup();
+        void release();
+        void exit();
+        bool isExpired() const;
+    private:
+        virtual ~Watcher();
+        virtual bool threadLoop();
+
+        char name[32] = {0,};
+
+        Condition mMyCond;
+        Mutex mMyLock;
+
+        bool mWatching;
+        bool mSwitch;
+        const nsecs_t mTimeLimit;
+    };
+    sp<Watcher> VFPWatcher;
+    sp<Watcher> LBRWatcher;
+    int32_t mMinBitrate;
+    int32_t mMaxBitrate;
+    int32_t mBitrateStep;
+
+    int32_t mTargetBitrate;
+    int32_t mLastTargetBitrate;
+    int64_t mLastSetBitrateTime;
+
+    bool mIsNewTargetBitrate;
+
+    int32_t clampingBitrate(int32_t bitrate);
+};
+} //namespace android
+
+#endif  // QUAL_MANAGER_H_
diff --git a/media/libstagefright/tests/extractorFactory/Android.bp b/media/libstagefright/tests/extractorFactory/Android.bp
index e3e61d7..26ec507 100644
--- a/media/libstagefright/tests/extractorFactory/Android.bp
+++ b/media/libstagefright/tests/extractorFactory/Android.bp
@@ -17,6 +17,7 @@
 cc_test {
     name: "ExtractorFactoryTest",
     gtest: true,
+    test_suites: ["device-tests"],
 
     srcs: [
         "ExtractorFactoryTest.cpp",
diff --git a/media/libstagefright/timedtext/TEST_MAPPING b/media/libstagefright/timedtext/TEST_MAPPING
new file mode 100644
index 0000000..185f824
--- /dev/null
+++ b/media/libstagefright/timedtext/TEST_MAPPING
@@ -0,0 +1,7 @@
+// mappings for frameworks/av/media/libstagefright/timedtext
+{
+  "presubmit": [
+    // TODO(b/148094059): unit tests not allowed to download content
+    //{ "name": "TimedTextUnitTest" }
+  ]
+}
diff --git a/media/libstagefright/xmlparser/TEST_MAPPING b/media/libstagefright/xmlparser/TEST_MAPPING
new file mode 100644
index 0000000..8626d72
--- /dev/null
+++ b/media/libstagefright/xmlparser/TEST_MAPPING
@@ -0,0 +1,6 @@
+// test mapping for frameworks/av/media/libstagefright/xmlparser
+{
+  "presubmit": [
+    { "name": "XMLParserTest" }
+  ]
+}
diff --git a/media/libstagefright/xmlparser/test/Android.bp b/media/libstagefright/xmlparser/test/Android.bp
index 6d97c96..ba02f84 100644
--- a/media/libstagefright/xmlparser/test/Android.bp
+++ b/media/libstagefright/xmlparser/test/Android.bp
@@ -16,6 +16,7 @@
 
 cc_test {
     name: "XMLParserTest",
+    test_suites: ["device-tests"],
     gtest: true,
 
     srcs: [
diff --git a/media/libwatchdog/Android.bp b/media/libwatchdog/Android.bp
index 1a87824..f7f0db7 100644
--- a/media/libwatchdog/Android.bp
+++ b/media/libwatchdog/Android.bp
@@ -14,6 +14,7 @@
 
 cc_library {
     name: "libwatchdog",
+    host_supported: true,
     srcs: [
         "Watchdog.cpp",
     ],
@@ -29,6 +30,11 @@
         darwin: {
             enabled: false,
         },
+        linux_glibc: {
+            cflags: [
+                "-Dsigev_notify_thread_id=_sigev_un._tid",
+            ],
+        },
     },
     apex_available: ["com.android.media"],
     min_sdk_version: "29",
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index 37598f8..d0e0cc7 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -181,6 +181,7 @@
 
 cc_test {
     name: "AImageReaderWindowHandleTest",
+    test_suites: ["device-tests"],
     srcs: ["tests/AImageReaderWindowHandleTest.cpp"],
     shared_libs: [
         "libbinder",
diff --git a/media/ndk/NdkImagePriv.h b/media/ndk/NdkImagePriv.h
index 0e8cbcb..b019448 100644
--- a/media/ndk/NdkImagePriv.h
+++ b/media/ndk/NdkImagePriv.h
@@ -30,6 +30,18 @@
 
 using namespace android;
 
+// Formats not listed in the public API, but still available to AImageReader
+enum AIMAGE_PRIVATE_FORMATS {
+    /**
+     * Unprocessed implementation-dependent raw
+     * depth measurements, opaque with 16 bit
+     * samples.
+     *
+     */
+
+    AIMAGE_FORMAT_RAW_DEPTH = 0x1002,
+};
+
 // TODO: this only supports ImageReader
 struct AImage {
     AImage(AImageReader* reader, int32_t format, uint64_t usage, BufferItem* buffer,
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index c0ceb3d..5d8f0b8 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -21,7 +21,6 @@
 
 #include "NdkImagePriv.h"
 #include "NdkImageReaderPriv.h"
-#include <private/media/NdkImage.h>
 
 #include <cutils/atomic.h>
 #include <utils/Log.h>
diff --git a/media/ndk/TEST_MAPPING b/media/ndk/TEST_MAPPING
new file mode 100644
index 0000000..1a81538
--- /dev/null
+++ b/media/ndk/TEST_MAPPING
@@ -0,0 +1,6 @@
+// mappings for frameworks/av/media/ndk
+{
+  "presubmit": [
+    { "name": "AImageReaderWindowHandleTest" }
+  ]
+}
diff --git a/media/ndk/include/private/media/NdkImage.h b/media/ndk/include/private/media/NdkImage.h
deleted file mode 100644
index 4368a56..0000000
--- a/media/ndk/include/private/media/NdkImage.h
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright (C) 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef _PRIVATE_MEDIA_NDKIMAGE_H_
-#define _PRIVATE_MEDIA_NDKIMAGE_H_
-// Formats not listed in the public API, but still available to AImageReader
-enum AIMAGE_PRIVATE_FORMATS {
-    /**
-     * Unprocessed implementation-dependent raw
-     * depth measurements, opaque with 16 bit
-     * samples.
-     *
-     */
-
-    AIMAGE_FORMAT_RAW_DEPTH = 0x1002,
-};
-#endif // _PRIVATE_MEDIA_NDKIMAGE
diff --git a/media/ndk/tests/AImageReaderWindowHandleTest.cpp b/media/ndk/tests/AImageReaderWindowHandleTest.cpp
index 5b65064..27864c2 100644
--- a/media/ndk/tests/AImageReaderWindowHandleTest.cpp
+++ b/media/ndk/tests/AImageReaderWindowHandleTest.cpp
@@ -17,10 +17,10 @@
 #include <gtest/gtest.h>
 #include <media/NdkImageReader.h>
 #include <media/NdkImage.h>
-#include <private/media/NdkImage.h>
 #include <mediautils/AImageReaderUtils.h>
 #include <gui/IGraphicBufferProducer.h>
 #include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
+#include <NdkImagePriv.h>
 #include <NdkImageReaderPriv.h>
 #include <vndk/hardware_buffer.h>
 #include <memory>
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index f014209..136ae98 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -61,6 +61,7 @@
 #include <system/audio_effects/effect_visualizer.h>
 #include <system/audio_effects/effect_ns.h>
 #include <system/audio_effects/effect_aec.h>
+#include <system/audio_effects/effect_hapticgenerator.h>
 
 #include <audio_utils/primitives.h>
 
@@ -398,7 +399,7 @@
             return ret;
         }
     }
-    return AudioMixer::HAPTIC_SCALE_MUTE;
+    return static_cast<int>(os::HapticScale::MUTE);
 }
 
 /* static */
@@ -3297,6 +3298,16 @@
     return minThread;
 }
 
+AudioFlinger::ThreadBase *AudioFlinger::hapticPlaybackThread_l() const {
+    for (size_t i  = 0; i < mPlaybackThreads.size(); ++i) {
+        PlaybackThread *thread = mPlaybackThreads.valueAt(i).get();
+        if (thread->hapticChannelMask() != AUDIO_CHANNEL_NONE) {
+            return thread;
+        }
+    }
+    return nullptr;
+}
+
 sp<AudioFlinger::SyncEvent> AudioFlinger::createSyncEvent(AudioSystem::sync_event_t type,
                                     audio_session_t triggerSession,
                                     audio_session_t listenerSession,
@@ -3538,6 +3549,16 @@
             goto Exit;
         }
 
+        const bool hapticPlaybackRequired = EffectModule::isHapticGenerator(&desc.type);
+        if (hapticPlaybackRequired
+                && (sessionId == AUDIO_SESSION_DEVICE
+                        || sessionId == AUDIO_SESSION_OUTPUT_MIX
+                        || sessionId == AUDIO_SESSION_OUTPUT_STAGE)) {
+            // haptic-generating effect is only valid when the session id is a general session id
+            lStatus = INVALID_OPERATION;
+            goto Exit;
+        }
+
         // return effect descriptor
         *pDesc = desc;
         if (io == AUDIO_IO_HANDLE_NONE && sessionId == AUDIO_SESSION_OUTPUT_MIX) {
@@ -3612,7 +3633,17 @@
             // allow only one effect chain per sessionId on mPlaybackThreads.
             for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
                 const audio_io_handle_t checkIo = mPlaybackThreads.keyAt(i);
-                if (io == checkIo) continue;
+                if (io == checkIo) {
+                    if (hapticPlaybackRequired
+                            && mPlaybackThreads.valueAt(i)
+                                    ->hapticChannelMask() == AUDIO_CHANNEL_NONE) {
+                        ALOGE("%s: haptic playback thread is required while the required playback "
+                              "thread(io=%d) doesn't support", __func__, (int)io);
+                        lStatus = BAD_VALUE;
+                        goto Exit;
+                    }
+                    continue;
+                }
                 const uint32_t sessionType =
                         mPlaybackThreads.valueAt(i)->hasAudioSession(sessionId);
                 if ((sessionType & ThreadBase::EFFECT_SESSION) != 0) {
@@ -3649,6 +3680,20 @@
 
         // create effect on selected output thread
         bool pinned = !audio_is_global_session(sessionId) && isSessionAcquired_l(sessionId);
+        ThreadBase *oriThread = nullptr;
+        if (hapticPlaybackRequired && thread->hapticChannelMask() == AUDIO_CHANNEL_NONE) {
+            ThreadBase *hapticThread = hapticPlaybackThread_l();
+            if (hapticThread == nullptr) {
+                ALOGE("%s haptic thread not found while it is required", __func__);
+                lStatus = INVALID_OPERATION;
+                goto Exit;
+            }
+            if (hapticThread != thread) {
+                // Force to use haptic thread for haptic-generating effect.
+                oriThread = thread;
+                thread = hapticThread;
+            }
+        }
         handle = thread->createEffect_l(client, effectClient, priority, sessionId,
                 &desc, enabled, &lStatus, pinned, probe);
         if (lStatus != NO_ERROR && lStatus != ALREADY_EXISTS) {
@@ -3658,6 +3703,11 @@
         } else {
             // handle must be valid here, but check again to be safe.
             if (handle.get() != nullptr && id != nullptr) *id = handle->id();
+            // Invalidate audio session when haptic playback is created.
+            if (hapticPlaybackRequired && oriThread != nullptr) {
+                // invalidateTracksForAudioSession will trigger locking the thread.
+                oriThread->invalidateTracksForAudioSession(sessionId);
+            }
         }
     }
 
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 20f561e..d3ad908 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -91,13 +91,14 @@
 #include "ThreadMetrics.h"
 #include "TrackMetrics.h"
 
-#include <powermanager/IPowerManager.h>
+#include <android/os/IPowerManager.h>
 
 #include <media/nblog/NBLog.h>
 #include <private/media/AudioEffectShared.h>
 #include <private/media/AudioTrackShared.h>
 
 #include <vibrator/ExternalVibration.h>
+#include <vibrator/ExternalVibrationUtils.h>
 
 #include "android/media/BnAudioRecord.h"
 
@@ -756,6 +757,8 @@
 
               sp<ThreadBase> getEffectThread_l(audio_session_t sessionId, int effectId);
 
+              ThreadBase *hapticPlaybackThread_l() const;
+
 
                 void        removeClient_l(pid_t pid);
                 void        removeNotificationClient(pid_t pid);
diff --git a/services/audioflinger/DeviceEffectManager.h b/services/audioflinger/DeviceEffectManager.h
index 81e6065..c6d2110 100644
--- a/services/audioflinger/DeviceEffectManager.h
+++ b/services/audioflinger/DeviceEffectManager.h
@@ -165,6 +165,7 @@
     uint32_t  sampleRate() const override { return 0; }
     audio_channel_mask_t channelMask() const override { return AUDIO_CHANNEL_NONE; }
     uint32_t channelCount() const override { return 0; }
+    audio_channel_mask_t hapticChannelMask() const override { return AUDIO_CHANNEL_NONE; }
     size_t    frameCount() const override  { return 0; }
     uint32_t  latency() const override  { return 0; }
 
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 3dfeb83..9ee47c9 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -25,6 +25,7 @@
 #include <utils/Log.h>
 #include <system/audio_effects/effect_aec.h>
 #include <system/audio_effects/effect_dynamicsprocessing.h>
+#include <system/audio_effects/effect_hapticgenerator.h>
 #include <system/audio_effects/effect_ns.h>
 #include <system/audio_effects/effect_visualizer.h>
 #include <audio_utils/channels.h>
@@ -879,6 +880,11 @@
         }
 #endif
     }
+    if (isHapticGenerator()) {
+        audio_channel_mask_t hapticChannelMask = mCallback->hapticChannelMask();
+        mConfig.inputCfg.channels |= hapticChannelMask;
+        mConfig.outputCfg.channels |= hapticChannelMask;
+    }
     mInChannelCountRequested =
             audio_channel_count_from_out_mask(mConfig.inputCfg.channels);
     mOutChannelCountRequested =
@@ -1522,6 +1528,42 @@
     return mOffloaded;
 }
 
+/*static*/
+bool AudioFlinger::EffectModule::isHapticGenerator(const effect_uuid_t *type) {
+    return memcmp(type, FX_IID_HAPTICGENERATOR, sizeof(effect_uuid_t)) == 0;
+}
+
+bool AudioFlinger::EffectModule::isHapticGenerator() const {
+    return isHapticGenerator(&mDescriptor.type);
+}
+
+status_t AudioFlinger::EffectModule::setHapticIntensity(int id, int intensity)
+{
+    if (mStatus != NO_ERROR) {
+        return mStatus;
+    }
+    if (!isHapticGenerator()) {
+        ALOGW("Should not set haptic intensity for effects that are not HapticGenerator");
+        return INVALID_OPERATION;
+    }
+
+    uint32_t buf32[sizeof(effect_param_t) / sizeof(uint32_t) + 3];
+    effect_param_t *param = (effect_param_t*) buf32;
+    param->psize = sizeof(int32_t);
+    param->vsize = sizeof(int32_t) * 2;
+    *(int32_t*)param->data = HG_PARAM_HAPTIC_INTENSITY;
+    *((int32_t*)param->data + 1) = id;
+    *((int32_t*)param->data + 2) = intensity;
+    uint32_t size = sizeof(int32_t);
+    status_t status = command(
+            EFFECT_CMD_SET_PARAM, sizeof(effect_param_t) + param->psize + param->vsize,
+            param, &size, &param->status);
+    if (status == NO_ERROR) {
+        status = param->status;
+    }
+    return status;
+}
+
 static std::string dumpInOutBuffer(bool isInput, const sp<EffectBufferHalInterface> &buffer) {
     std::stringstream ss;
 
@@ -2385,6 +2427,25 @@
     }
 }
 
+// containsHapticGeneratingEffect_l must be called with ThreadBase::mLock or EffectChain::mLock held
+bool AudioFlinger::EffectChain::containsHapticGeneratingEffect_l()
+{
+    for (size_t i = 0; i < mEffects.size(); ++i) {
+        if (mEffects[i]->isHapticGenerator()) {
+            return true;
+        }
+    }
+    return false;
+}
+
+void AudioFlinger::EffectChain::setHapticIntensity_l(int id, int intensity)
+{
+    Mutex::Autolock _l(mLock);
+    for (size_t i = 0; i < mEffects.size(); ++i) {
+        mEffects[i]->setHapticIntensity(id, intensity);
+    }
+}
+
 void AudioFlinger::EffectChain::syncHalEffectsState()
 {
     Mutex::Autolock _l(mLock);
@@ -2839,6 +2900,14 @@
     return t->channelCount();
 }
 
+audio_channel_mask_t AudioFlinger::EffectChain::EffectCallback::hapticChannelMask() const {
+    sp<ThreadBase> t = mThread.promote();
+    if (t == nullptr) {
+        return AUDIO_CHANNEL_NONE;
+    }
+    return t->hapticChannelMask();
+}
+
 size_t AudioFlinger::EffectChain::EffectCallback::frameCount() const {
     sp<ThreadBase> t = mThread.promote();
     if (t == nullptr) {
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index 2826297..3cc5a44 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -36,6 +36,7 @@
     virtual uint32_t sampleRate() const = 0;
     virtual audio_channel_mask_t channelMask() const = 0;
     virtual uint32_t channelCount() const = 0;
+    virtual audio_channel_mask_t hapticChannelMask() const = 0;
     virtual size_t frameCount() const = 0;
 
     // Non trivial methods usually implemented with help from ThreadBase:
@@ -257,6 +258,11 @@
 
     sp<EffectModule> asEffectModule() override { return this; }
 
+    static bool      isHapticGenerator(const effect_uuid_t* type);
+    bool             isHapticGenerator() const;
+
+    status_t         setHapticIntensity(int id, int intensity);
+
     void             dump(int fd, const Vector<String16>& args);
 
 private:
@@ -503,6 +509,10 @@
     // isCompatibleWithThread_l() must be called with thread->mLock held
     bool isCompatibleWithThread_l(const sp<ThreadBase>& thread) const;
 
+    bool containsHapticGeneratingEffect_l();
+
+    void setHapticIntensity_l(int id, int intensity);
+
     sp<EffectCallbackInterface> effectCallback() const { return mEffectCallback; }
     wp<ThreadBase> thread() const { return mEffectCallback->thread(); }
 
@@ -534,6 +544,7 @@
         uint32_t sampleRate() const override;
         audio_channel_mask_t channelMask() const override;
         uint32_t channelCount() const override;
+        audio_channel_mask_t hapticChannelMask() const override;
         size_t frameCount() const override;
         uint32_t latency() const override;
 
@@ -685,6 +696,7 @@
         uint32_t sampleRate() const override;
         audio_channel_mask_t channelMask() const override;
         uint32_t channelCount() const override;
+        audio_channel_mask_t hapticChannelMask() const override { return AUDIO_CHANNEL_NONE; }
         size_t frameCount() const override  { return 0; }
         uint32_t latency() const override  { return 0; }
 
diff --git a/services/audioflinger/FastMixerState.h b/services/audioflinger/FastMixerState.h
index 396c797..857d3de 100644
--- a/services/audioflinger/FastMixerState.h
+++ b/services/audioflinger/FastMixerState.h
@@ -23,6 +23,7 @@
 #include <media/ExtendedAudioBufferProvider.h>
 #include <media/nbaio/NBAIO.h>
 #include <media/nblog/NBLog.h>
+#include <vibrator/ExternalVibrationUtils.h>
 #include "FastThreadState.h"
 
 namespace android {
@@ -49,8 +50,7 @@
     audio_format_t          mFormat;         // track format
     int                     mGeneration;     // increment when any field is assigned
     bool                    mHapticPlaybackEnabled = false; // haptic playback is enabled or not
-    AudioMixer::haptic_intensity_t mHapticIntensity = AudioMixer::HAPTIC_SCALE_MUTE; // intensity of
-                                                                                     // haptic data
+    os::HapticScale         mHapticIntensity = os::HapticScale::MUTE; // intensity of haptic data
 };
 
 // Represents a single state of the fast mixer
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index d8eebf3..d05c8b8 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -159,12 +159,12 @@
                 mHapticPlaybackEnabled = hapticPlaybackEnabled;
             }
             /** Return at what intensity to play haptics, used in mixer. */
-            AudioMixer::haptic_intensity_t getHapticIntensity() const { return mHapticIntensity; }
+            os::HapticScale getHapticIntensity() const { return mHapticIntensity; }
             /** Set intensity of haptic playback, should be set after querying vibrator service. */
-            void    setHapticIntensity(AudioMixer::haptic_intensity_t hapticIntensity) {
-                if (AudioMixer::isValidHapticIntensity(hapticIntensity)) {
+            void    setHapticIntensity(os::HapticScale hapticIntensity) {
+                if (os::isValidHapticScale(hapticIntensity)) {
                     mHapticIntensity = hapticIntensity;
-                    setHapticPlaybackEnabled(mHapticIntensity != AudioMixer::HAPTIC_SCALE_MUTE);
+                    setHapticPlaybackEnabled(mHapticIntensity != os::HapticScale::MUTE);
                 }
             }
             sp<os::ExternalVibration> getExternalVibration() const { return mExternalVibration; }
@@ -265,7 +265,7 @@
 
     bool                mHapticPlaybackEnabled = false; // indicates haptic playback enabled or not
     // intensity to play haptic data
-    AudioMixer::haptic_intensity_t mHapticIntensity = AudioMixer::HAPTIC_SCALE_MUTE;
+    os::HapticScale mHapticIntensity = os::HapticScale::MUTE;
     class AudioVibrationController : public os::BnExternalVibrationController {
     public:
         explicit AudioVibrationController(Track* track) : mTrack(track) {}
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index c4ef0fa..92f18c1 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -986,15 +986,16 @@
     if (mPowerManager != 0) {
         sp<IBinder> binder = new BBinder();
         // Uses AID_AUDIOSERVER for wakelock.  updateWakeLockUids_l() updates with client uids.
-        status_t status = mPowerManager->acquireWakeLock(POWERMANAGER_PARTIAL_WAKE_LOCK,
-                    binder,
+        binder::Status status = mPowerManager->acquireWakeLockAsync(binder,
+                    POWERMANAGER_PARTIAL_WAKE_LOCK,
                     getWakeLockTag(),
                     String16("audioserver"),
-                    true /* FIXME force oneway contrary to .aidl */);
-        if (status == NO_ERROR) {
+                    {} /* workSource */,
+                    {} /* historyTag */);
+        if (status.isOk()) {
             mWakeLockToken = binder;
         }
-        ALOGV("acquireWakeLock_l() %s status %d", mThreadName, status);
+        ALOGV("acquireWakeLock_l() %s status %d", mThreadName, status.exceptionCode());
     }
 
     gBoottime.acquire(mWakeLockToken);
@@ -1014,8 +1015,7 @@
     if (mWakeLockToken != 0) {
         ALOGV("releaseWakeLock_l() %s", mThreadName);
         if (mPowerManager != 0) {
-            mPowerManager->releaseWakeLock(mWakeLockToken, 0,
-                    true /* FIXME force oneway contrary to .aidl */);
+            mPowerManager->releaseWakeLockAsync(mWakeLockToken, 0);
         }
         mWakeLockToken.clear();
     }
@@ -1029,7 +1029,7 @@
         if (binder == 0) {
             ALOGW("Thread %s cannot connect to the power manager service", mThreadName);
         } else {
-            mPowerManager = interface_cast<IPowerManager>(binder);
+            mPowerManager = interface_cast<os::IPowerManager>(binder);
             binder->linkToDeath(mDeathRecipient);
         }
     }
@@ -1056,10 +1056,9 @@
     }
     if (mPowerManager != 0) {
         std::vector<int> uidsAsInt(uids.begin(), uids.end()); // powermanager expects uids as ints
-        status_t status = mPowerManager->updateWakeLockUids(
-                mWakeLockToken, uidsAsInt.size(), uidsAsInt.data(),
-                true /* FIXME force oneway contrary to .aidl */);
-        ALOGV("updateWakeLockUids_l() %s status %d", mThreadName, status);
+        binder::Status status = mPowerManager->updateWakeLockUidsAsync(
+                mWakeLockToken, uidsAsInt);
+        ALOGV("updateWakeLockUids_l() %s status %d", mThreadName, status.exceptionCode());
     }
 }
 
@@ -1244,6 +1243,11 @@
             return BAD_VALUE;
         }
     }
+
+    if (EffectModule::isHapticGenerator(&desc->type)) {
+        ALOGE("%s(): HapticGenerator is not supported in RecordThread", __func__);
+        return BAD_VALUE;
+    }
     return NO_ERROR;
 }
 
@@ -1263,6 +1267,12 @@
         return NO_ERROR;
     }
 
+    if (EffectModule::isHapticGenerator(&desc->type) && mHapticChannelCount == 0) {
+        ALOGW("%s: thread doesn't support haptic playback while the effect is HapticGenerator",
+                __func__);
+        return BAD_VALUE;
+    }
+
     switch (mType) {
     case MIXER: {
 #ifndef MULTICHANNEL_EFFECT_CHAIN
@@ -2528,15 +2538,17 @@
                     track->sharedBuffer() != 0 ? Track::FS_FILLED : Track::FS_FILLING;
         }
 
-        if ((track->channelMask() & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE
-                && mHapticChannelMask != AUDIO_CHANNEL_NONE) {
+        sp<EffectChain> chain = getEffectChain_l(track->sessionId());
+        if (mHapticChannelMask != AUDIO_CHANNEL_NONE
+                && ((track->channelMask() & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE
+                        || (chain != nullptr && chain->containsHapticGeneratingEffect_l()))) {
             // Unlock due to VibratorService will lock for this call and will
             // call Tracks.mute/unmute which also require thread's lock.
             mLock.unlock();
             const int intensity = AudioFlinger::onExternalVibrationStart(
                     track->getExternalVibration());
             mLock.lock();
-            track->setHapticIntensity(static_cast<AudioMixer::haptic_intensity_t>(intensity));
+            track->setHapticIntensity(static_cast<os::HapticScale>(intensity));
             // Haptic playback should be enabled by vibrator service.
             if (track->getHapticPlaybackEnabled()) {
                 // Disable haptic playback of all active track to ensure only
@@ -2545,12 +2557,16 @@
                     t->setHapticPlaybackEnabled(false);
                 }
             }
+
+            // Set haptic intensity for effect
+            if (chain != nullptr) {
+                chain->setHapticIntensity_l(track->id(), intensity);
+            }
         }
 
         track->mResetDone = false;
         track->mPresentationCompleteFrames = 0;
         mActiveTracks.add(track);
-        sp<EffectChain> chain = getEffectChain_l(track->sessionId());
         if (chain != 0) {
             ALOGV("addTrack_l() starting track on chain %p for session %d", chain.get(),
                     track->sessionId());
@@ -3733,9 +3749,15 @@
 
             // Determine which session to pick up haptic data.
             // This must be done under the same lock as prepareTracks_l().
+            // The haptic data from the effect is at a higher priority than the one from track.
             // TODO: Write haptic data directly to sink buffer when mixing.
             if (mHapticChannelCount > 0 && effectChains.size() > 0) {
                 for (const auto& track : mActiveTracks) {
+                    sp<EffectChain> effectChain = getEffectChain_l(track->sessionId());
+                    if (effectChain != nullptr && effectChain->containsHapticGeneratingEffect_l()) {
+                        activeHapticSessionId = track->sessionId();
+                        break;
+                    }
                     if (track->getHapticPlaybackEnabled()) {
                         activeHapticSessionId = track->sessionId();
                         break;
@@ -4105,13 +4127,20 @@
             // remove from our tracks vector
             removeTrack_l(track);
         }
-        if ((track->channelMask() & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE
-                && mHapticChannelCount > 0) {
+        if (mHapticChannelCount > 0 &&
+                ((track->channelMask() & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE
+                        || (chain != nullptr && chain->containsHapticGeneratingEffect_l()))) {
             mLock.unlock();
             // Unlock due to VibratorService will lock for this call and will
             // call Tracks.mute/unmute which also require thread's lock.
             AudioFlinger::onExternalVibrationStop(track->getExternalVibration());
             mLock.lock();
+
+            // When the track is stop, set the haptic intensity as MUTE
+            // for the HapticGenerator effect.
+            if (chain != nullptr) {
+                chain->setHapticIntensity_l(track->id(), static_cast<int>(os::HapticScale::MUTE));
+            }
         }
     }
 }
@@ -4454,7 +4483,7 @@
                                                                      // audio to FastMixer
         fastTrack->mFormat = mFormat; // mPipeSink format for audio to FastMixer
         fastTrack->mHapticPlaybackEnabled = mHapticChannelMask != AUDIO_CHANNEL_NONE;
-        fastTrack->mHapticIntensity = AudioMixer::HAPTIC_SCALE_NONE;
+        fastTrack->mHapticIntensity = os::HapticScale::NONE;
         fastTrack->mGeneration++;
         state->mFastTracksGen++;
         state->mTrackMask = 1;
@@ -9394,6 +9423,11 @@
         return BAD_VALUE;
     }
 
+    if (EffectModule::isHapticGenerator(&desc->type)) {
+        ALOGE("%s(): HapticGenerator is not supported for MmapThread", __func__);
+        return BAD_VALUE;
+    }
+
     return NO_ERROR;
 }
 
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index c1ac2e4..2e81ae7 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -272,6 +272,7 @@
                 // Called by AudioFlinger::frameCount(audio_io_handle_t output) and effects,
                 // and returns the [normal mix] buffer's frame count.
     virtual     size_t      frameCount() const = 0;
+    virtual     audio_channel_mask_t hapticChannelMask() const { return AUDIO_CHANNEL_NONE; }
     virtual     uint32_t    latency_l() const { return 0; }
     virtual     void        setVolumeForOutput_l(float left __unused, float right __unused) const {}
 
@@ -478,6 +479,25 @@
                 void onEffectEnable(const sp<EffectModule>& effect);
                 void onEffectDisable();
 
+                // invalidateTracksForAudioSession_l must be called with holding mLock.
+    virtual     void invalidateTracksForAudioSession_l(audio_session_t sessionId __unused) const { }
+                // Invalidate all the tracks with the given audio session.
+                void invalidateTracksForAudioSession(audio_session_t sessionId) const {
+                    Mutex::Autolock _l(mLock);
+                    invalidateTracksForAudioSession_l(sessionId);
+                }
+
+                template <typename T>
+                void invalidateTracksForAudioSession_l(audio_session_t sessionId,
+                                                       const T& tracks) const {
+                    for (size_t i = 0; i < tracks.size(); ++i) {
+                        const sp<TrackBase>& track = tracks[i];
+                        if (sessionId == track->sessionId()) {
+                            track->invalidate();
+                        }
+                    }
+                }
+
 protected:
 
                 // entry describing an effect being suspended in mSuspendedSessions keyed vector
@@ -575,7 +595,7 @@
 
                 static const int        kThreadNameLength = 16; // prctl(PR_SET_NAME) limit
                 char                    mThreadName[kThreadNameLength]; // guaranteed NUL-terminated
-                sp<IPowerManager>       mPowerManager;
+                sp<os::IPowerManager>   mPowerManager;
                 sp<IBinder>             mWakeLockToken;
                 const sp<PMDeathRecipient> mDeathRecipient;
                 // list of suspended effects per session and per type. The first (outer) vector is
@@ -939,6 +959,13 @@
                                         && outDeviceTypes().count(mTimestampCorrectedDevice) != 0;
                             }
 
+                audio_channel_mask_t hapticChannelMask() const override {
+                                         return mHapticChannelMask;
+                                     }
+                bool supportsHapticPlayback() const {
+                    return (mHapticChannelMask & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE;
+                }
+
 protected:
     // updated by readOutputParameters_l()
     size_t                          mNormalFrameCount;  // normal mixer and effects
@@ -1061,6 +1088,11 @@
 
                 uint32_t    trackCountForUid_l(uid_t uid) const;
 
+                void        invalidateTracksForAudioSession_l(
+                                    audio_session_t sessionId) const override {
+                                ThreadBase::invalidateTracksForAudioSession_l(sessionId, mTracks);
+                            }
+
 private:
 
     friend class AudioFlinger;      // for numerous
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index d366bb7..c92bce5 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -595,7 +595,10 @@
             + "_" + std::to_string(mId) + "_T");
 #endif
 
-    if (channelMask & AUDIO_CHANNEL_HAPTIC_ALL) {
+    if (thread->supportsHapticPlayback()) {
+        // If the track is attached to haptic playback thread, it is potentially to have
+        // HapticGenerator effect, which will generate haptic data, on the track. In that case,
+        // external vibration is always created for all tracks attached to haptic playback thread.
         mAudioVibrationController = new AudioVibrationController(this);
         mExternalVibration = new os::ExternalVibration(
                 mUid, "" /* pkg */, mAttr, mAudioVibrationController);
diff --git a/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
index c4eab30..59eee52 100644
--- a/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
@@ -72,6 +72,9 @@
                      audio_io_handle_t dstOutput);
     void moveEffects(const std::vector<int>& ids, audio_io_handle_t dstOutput);
 
+    audio_io_handle_t getIoForSession(audio_session_t sessionId,
+                                      const effect_uuid_t *effectType = nullptr);
+
     void dump(String8 *dst, int spaces = 0, bool verbose = true) const;
 
 private:
diff --git a/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
index 415962a..843f5da 100644
--- a/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
@@ -202,6 +202,19 @@
     }
 }
 
+audio_io_handle_t EffectDescriptorCollection::getIoForSession(audio_session_t sessionId,
+                                                              const effect_uuid_t *effectType)
+{
+    for (size_t i = 0; i < size(); ++i) {
+        sp<EffectDescriptor> effect = valueAt(i);
+        if (effect->mSession == sessionId && (effectType == nullptr ||
+                memcmp(&effect->mDesc.type, effectType, sizeof(effect_uuid_t)) == 0)) {
+            return effect->mIo;
+        }
+    }
+    return AUDIO_IO_HANDLE_NONE;
+}
+
 EffectDescriptorCollection EffectDescriptorCollection::getEffectsForIo(audio_io_handle_t io) const
 {
     EffectDescriptorCollection effects;
diff --git a/services/audiopolicy/engine/common/src/VolumeCurve.cpp b/services/audiopolicy/engine/common/src/VolumeCurve.cpp
index c352578..8aa4b08 100644
--- a/services/audiopolicy/engine/common/src/VolumeCurve.cpp
+++ b/services/audiopolicy/engine/common/src/VolumeCurve.cpp
@@ -43,10 +43,24 @@
         indexInUi = volIndexMax;
     }
 
+    // Calculate the new volume index
     size_t nbCurvePoints = mCurvePoints.size();
-    // the volume index in the UI is relative to the min and max volume indices for this stream
-    int nbSteps = 1 + mCurvePoints[nbCurvePoints - 1].mIndex - mCurvePoints[0].mIndex;
-    int volIdx = (nbSteps * (indexInUi - volIndexMin)) / (volIndexMax - volIndexMin);
+
+    int volIdx;
+    if (volIndexMin == volIndexMax) {
+        if (indexInUi == volIndexMin) {
+            volIdx = volIndexMin;
+        } else {
+            // This would result in a divide-by-zero below
+            ALOG_ASSERT(volIndexmin != volIndexMax, "Invalid volume index range & value: 0");
+            return NAN;
+        }
+    } else {
+        // interpolaate
+        // the volume index in the UI is relative to the min and max volume indices for this stream
+        int nbSteps = 1 + mCurvePoints[nbCurvePoints - 1].mIndex - mCurvePoints[0].mIndex;
+        volIdx = (nbSteps * (indexInUi - volIndexMin)) / (volIndexMax - volIndexMin);
+    }
 
     // Where would this volume index been inserted in the curve point
     size_t indexInUiPosition = mCurvePoints.orderOf(CurvePoint(volIdx, 0));
diff --git a/services/audiopolicy/manager/Android.bp b/services/audiopolicy/manager/Android.bp
new file mode 100644
index 0000000..5bb432f
--- /dev/null
+++ b/services/audiopolicy/manager/Android.bp
@@ -0,0 +1,32 @@
+cc_library_shared {
+    name: "libaudiopolicymanager",
+
+    srcs: [
+        "AudioPolicyFactory.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/services/audioflinger"
+    ],
+
+    shared_libs: [
+        "libaudiopolicymanagerdefault",
+    ],
+
+    static_libs: [
+        "libaudiopolicycomponents",
+    ],
+
+    header_libs: [
+        "libaudiopolicycommon",
+        "libaudiopolicyengine_interface_headers",
+        "libaudiopolicymanager_interface_headers",
+        "libaudioutils_headers",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+}
diff --git a/services/audiopolicy/manager/Android.mk b/services/audiopolicy/manager/Android.mk
deleted file mode 100644
index cae6cfa..0000000
--- a/services/audiopolicy/manager/Android.mk
+++ /dev/null
@@ -1,30 +0,0 @@
-LOCAL_PATH:= $(call my-dir)
-
-ifneq ($(USE_CUSTOM_AUDIO_POLICY), 1)
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
-    AudioPolicyFactory.cpp
-
-LOCAL_SHARED_LIBRARIES := \
-    libaudiopolicymanagerdefault
-
-LOCAL_STATIC_LIBRARIES := \
-    libaudiopolicycomponents
-
-LOCAL_C_INCLUDES += \
-    $(call include-path-for, audio-utils)
-
-LOCAL_HEADER_LIBRARIES := \
-    libaudiopolicycommon \
-    libaudiopolicyengine_interface_headers \
-    libaudiopolicymanager_interface_headers
-
-LOCAL_CFLAGS := -Wall -Werror
-
-LOCAL_MODULE:= libaudiopolicymanager
-
-include $(BUILD_SHARED_LIBRARY)
-
-endif #ifneq ($(USE_CUSTOM_AUDIO_POLICY), 1)
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 1fe340a..7492cd8 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -49,6 +49,7 @@
 #include <private/android_filesystem_config.h>
 #include <system/audio.h>
 #include <system/audio_config.h>
+#include <system/audio_effects/effect_hapticgenerator.h>
 #include "AudioPolicyManager.h"
 #include <Serializer.h>
 #include "TypeConverter.h"
@@ -1300,7 +1301,8 @@
 
         // at this stage we should ignore the DIRECT flag as no direct output could be found earlier
         *flags = (audio_output_flags_t)(*flags & ~AUDIO_OUTPUT_FLAG_DIRECT);
-        output = selectOutput(outputs, *flags, config->format, channelMask, config->sample_rate);
+        output = selectOutput(
+                outputs, *flags, config->format, channelMask, config->sample_rate, session);
     }
     ALOGW_IF((output == 0), "getOutputForDevices() could not find output for stream %d, "
             "sampling rate %d, format %#x, channels %#x, flags %#x",
@@ -1473,14 +1475,26 @@
 }
 
 audio_io_handle_t AudioPolicyManager::selectOutput(const SortedVector<audio_io_handle_t>& outputs,
-                                                       audio_output_flags_t flags,
-                                                       audio_format_t format,
-                                                       audio_channel_mask_t channelMask,
-                                                       uint32_t samplingRate)
+                                                   audio_output_flags_t flags,
+                                                   audio_format_t format,
+                                                   audio_channel_mask_t channelMask,
+                                                   uint32_t samplingRate,
+                                                   audio_session_t sessionId)
 {
     LOG_ALWAYS_FATAL_IF(!(format == AUDIO_FORMAT_INVALID || audio_is_linear_pcm(format)),
         "%s called with format %#x", __func__, format);
 
+    // Return the output that haptic-generating attached to when 1) session id is specified,
+    // 2) haptic-generating effect exists for given session id and 3) the output that
+    // haptic-generating effect attached to is in given outputs.
+    if (sessionId != AUDIO_SESSION_NONE) {
+        audio_io_handle_t hapticGeneratingOutput = mEffects.getIoForSession(
+                sessionId, FX_IID_HAPTICGENERATOR);
+        if (outputs.indexOf(hapticGeneratingOutput) >= 0) {
+            return hapticGeneratingOutput;
+        }
+    }
+
     // Flags disqualifying an output: the match must happen before calling selectOutput()
     static const audio_output_flags_t kExcludedFlags = (audio_output_flags_t)
         (AUDIO_OUTPUT_FLAG_HW_AV_SYNC | AUDIO_OUTPUT_FLAG_MMAP_NOIRQ | AUDIO_OUTPUT_FLAG_DIRECT);
@@ -5290,7 +5304,8 @@
                                                                      client->flags(),
                                                                      client->config().format,
                                                                      client->config().channel_mask,
-                                                                     client->config().sample_rate);
+                                                                     client->config().sample_rate,
+                                                                     client->session());
                     if (newOutput != srcOut) {
                         invalidate = true;
                         break;
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index b588f89..201abc6 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -608,7 +608,8 @@
                                        audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
                                        audio_format_t format = AUDIO_FORMAT_INVALID,
                                        audio_channel_mask_t channelMask = AUDIO_CHANNEL_NONE,
-                                       uint32_t samplingRate = 0);
+                                       uint32_t samplingRate = 0,
+                                       audio_session_t sessionId = AUDIO_SESSION_NONE);
         // samplingRate, format, channelMask are in/out and so may be modified
         sp<IOProfile> getInputProfile(const sp<DeviceDescriptor> & device,
                                       uint32_t& samplingRate,
diff --git a/services/audiopolicy/service/Android.bp b/services/audiopolicy/service/Android.bp
new file mode 100644
index 0000000..8a7a1b2
--- /dev/null
+++ b/services/audiopolicy/service/Android.bp
@@ -0,0 +1,55 @@
+cc_library_shared {
+    name: "libaudiopolicyservice",
+
+    srcs: [
+        "AudioPolicyClientImpl.cpp",
+        "AudioPolicyEffects.cpp",
+        "AudioPolicyInterfaceImpl.cpp",
+        "AudioPolicyService.cpp",
+        "CaptureStateNotifier.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/services/audioflinger"
+    ],
+
+    shared_libs: [
+        "libaudioclient",
+        "libaudiofoundation",
+        "libaudiopolicymanager",
+        "libaudioutils",
+        "libbinder",
+        "libcutils",
+        "libeffectsconfig",
+        "libhardware_legacy",
+        "liblog",
+        "libmedia_helper",
+        "libmediametrics",
+        "libmediautils",
+        "libsensorprivacy",
+        "libutils",
+        "capture_state_listener-aidl-cpp",
+    ],
+
+    static_libs: [
+        "libaudiopolicycomponents",
+    ],
+
+    header_libs: [
+        "libaudiopolicycommon",
+        "libaudiopolicyengine_interface_headers",
+        "libaudiopolicymanager_interface_headers",
+        "libaudioutils_headers",
+    ],
+
+    cflags: [
+        "-fvisibility=hidden",
+        "-Werror",
+        "-Wall",
+        "-Wthread-safety",
+    ],
+
+    export_shared_lib_headers: [
+        "libsensorprivacy",
+    ],
+}
diff --git a/services/audiopolicy/service/Android.mk b/services/audiopolicy/service/Android.mk
deleted file mode 100644
index 680b077..0000000
--- a/services/audiopolicy/service/Android.mk
+++ /dev/null
@@ -1,50 +0,0 @@
-LOCAL_PATH:= $(call my-dir)
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
-    AudioPolicyService.cpp \
-    AudioPolicyEffects.cpp \
-    AudioPolicyInterfaceImpl.cpp \
-    AudioPolicyClientImpl.cpp \
-    CaptureStateNotifier.cpp
-
-LOCAL_C_INCLUDES := \
-    frameworks/av/services/audioflinger \
-    $(call include-path-for, audio-utils)
-
-LOCAL_HEADER_LIBRARIES := \
-    libaudiopolicycommon \
-    libaudiopolicyengine_interface_headers \
-    libaudiopolicymanager_interface_headers
-
-LOCAL_SHARED_LIBRARIES := \
-    libcutils \
-    libutils \
-    liblog \
-    libbinder \
-    libaudioclient \
-    libaudioutils \
-    libaudiofoundation \
-    libhardware_legacy \
-    libaudiopolicymanager \
-    libmedia_helper \
-    libmediametrics \
-    libmediautils \
-    libeffectsconfig \
-    libsensorprivacy \
-    capture_state_listener-aidl-cpp
-
-LOCAL_EXPORT_SHARED_LIBRARY_HEADERS := \
-    libsensorprivacy
-
-LOCAL_STATIC_LIBRARIES := \
-    libaudiopolicycomponents
-
-LOCAL_MODULE:= libaudiopolicyservice
-
-LOCAL_CFLAGS += -fvisibility=hidden
-LOCAL_CFLAGS += -Wall -Werror -Wthread-safety
-
-include $(BUILD_SHARED_LIBRARY)
-
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index e847f9f..a6e8989 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -488,9 +488,9 @@
         }
 
         bool isAccessibility = mUidPolicy->isA11yUid(current->uid);
-        // Clients capturing for Accessibility services are not considered
+        // Clients capturing for Accessibility services or virtual sources are not considered
         // for top or latest active to avoid masking regular clients started before
-        if (!isAccessibility) {
+        if (!isAccessibility && !isVirtualSource(current->attributes.source)) {
             bool isAssistant = mUidPolicy->isAssistantUid(current->uid);
             bool isPrivacySensitive =
                     (current->attributes.flags & AUDIO_FLAG_CAPTURE_PRIVATE) != 0;
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 501d922..bdbcdb0 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -30,7 +30,6 @@
         "common/CameraProviderManager.cpp",
         "common/DepthPhotoProcessor.cpp",
         "common/FrameProcessorBase.cpp",
-        "api1/CameraClient.cpp",
         "api1/Camera2Client.cpp",
         "api1/client2/Parameters.cpp",
         "api1/client2/FrameProcessor.cpp",
@@ -46,7 +45,6 @@
         "api2/DepthCompositeStream.cpp",
         "api2/HeicEncoderInfoManager.cpp",
         "api2/HeicCompositeStream.cpp",
-        "device1/CameraHardwareInterface.cpp",
         "device3/BufferUtils.cpp",
         "device3/Camera3Device.cpp",
         "device3/Camera3OfflineSession.cpp",
@@ -122,7 +120,6 @@
         "android.hardware.camera.provider@2.4",
         "android.hardware.camera.provider@2.5",
         "android.hardware.camera.provider@2.6",
-        "android.hardware.camera.device@1.0",
         "android.hardware.camera.device@3.2",
         "android.hardware.camera.device@3.3",
         "android.hardware.camera.device@3.4",
diff --git a/services/camera/libcameraservice/CameraFlashlight.cpp b/services/camera/libcameraservice/CameraFlashlight.cpp
index e629cdd..ccdd9e5 100644
--- a/services/camera/libcameraservice/CameraFlashlight.cpp
+++ b/services/camera/libcameraservice/CameraFlashlight.cpp
@@ -59,9 +59,8 @@
     if (mProviderManager->supportSetTorchMode(cameraId.string())) {
         mFlashControl = new ProviderFlashControl(mProviderManager);
     } else {
-        // Only HAL1 devices do not support setTorchMode
-        mFlashControl =
-                new CameraHardwareInterfaceFlashControl(mProviderManager, mCallbacks);
+        ALOGE("Flashlight control not supported by this device!");
+        return NO_INIT;
     }
 
     return OK;
@@ -309,271 +308,4 @@
 }
 // ProviderFlashControl implementation ends
 
-/////////////////////////////////////////////////////////////////////
-// CameraHardwareInterfaceFlashControl implementation begins
-// Flash control for camera module <= v2.3 and camera HAL v1
-/////////////////////////////////////////////////////////////////////
-
-CameraHardwareInterfaceFlashControl::CameraHardwareInterfaceFlashControl(
-        sp<CameraProviderManager> manager,
-        CameraProviderManager::StatusListener* callbacks) :
-        mProviderManager(manager),
-        mCallbacks(callbacks),
-        mTorchEnabled(false) {
-}
-
-CameraHardwareInterfaceFlashControl::~CameraHardwareInterfaceFlashControl() {
-    disconnectCameraDevice();
-
-    mSurface.clear();
-    mSurfaceTexture.clear();
-    mProducer.clear();
-    mConsumer.clear();
-
-    if (mTorchEnabled) {
-        if (mCallbacks) {
-            ALOGV("%s: notify the framework that torch was turned off",
-                    __FUNCTION__);
-            mCallbacks->onTorchStatusChanged(mCameraId, TorchModeStatus::AVAILABLE_OFF);
-        }
-    }
-}
-
-status_t CameraHardwareInterfaceFlashControl::setTorchMode(
-        const String8& cameraId, bool enabled) {
-    Mutex::Autolock l(mLock);
-
-    // pre-check
-    status_t res;
-    if (enabled) {
-        bool hasFlash = false;
-        // Check if it has a flash unit and leave camera device open.
-        res = hasFlashUnitLocked(cameraId, &hasFlash, /*keepDeviceOpen*/true);
-        // invalid camera?
-        if (res) {
-            // hasFlashUnitLocked() returns BAD_INDEX if mDevice is connected to
-            // another camera device.
-            return res == BAD_INDEX ? BAD_INDEX : -EINVAL;
-        }
-        // no flash unit?
-        if (!hasFlash) {
-            // Disconnect camera device if it has no flash.
-            disconnectCameraDevice();
-            return -ENOSYS;
-        }
-    } else if (mDevice == NULL || cameraId != mCameraId) {
-        // disabling the torch mode of an un-opened or different device.
-        return OK;
-    } else {
-        // disabling the torch mode of currently opened device
-        disconnectCameraDevice();
-        mTorchEnabled = false;
-        mCallbacks->onTorchStatusChanged(cameraId, TorchModeStatus::AVAILABLE_OFF);
-        return OK;
-    }
-
-    res = startPreviewAndTorch();
-    if (res) {
-        return res;
-    }
-
-    mTorchEnabled = true;
-    mCallbacks->onTorchStatusChanged(cameraId, TorchModeStatus::AVAILABLE_ON);
-    return OK;
-}
-
-status_t CameraHardwareInterfaceFlashControl::hasFlashUnit(
-        const String8& cameraId, bool *hasFlash) {
-    Mutex::Autolock l(mLock);
-    // Close device after checking if it has a flash unit.
-    return hasFlashUnitLocked(cameraId, hasFlash, /*keepDeviceOpen*/false);
-}
-
-status_t CameraHardwareInterfaceFlashControl::hasFlashUnitLocked(
-        const String8& cameraId, bool *hasFlash, bool keepDeviceOpen) {
-    bool closeCameraDevice = false;
-
-    if (!hasFlash) {
-        return BAD_VALUE;
-    }
-
-    status_t res;
-    if (mDevice == NULL) {
-        // Connect to camera device to query if it has a flash unit.
-        res = connectCameraDevice(cameraId);
-        if (res) {
-            return res;
-        }
-        // Close camera device only when it is just opened and the caller doesn't want to keep
-        // the camera device open.
-        closeCameraDevice = !keepDeviceOpen;
-    }
-
-    if (cameraId != mCameraId) {
-        return BAD_INDEX;
-    }
-
-    const char *flashMode =
-            mParameters.get(CameraParameters::KEY_SUPPORTED_FLASH_MODES);
-    if (flashMode && strstr(flashMode, CameraParameters::FLASH_MODE_TORCH)) {
-        *hasFlash = true;
-    } else {
-        *hasFlash = false;
-    }
-
-    if (closeCameraDevice) {
-        res = disconnectCameraDevice();
-        if (res != OK) {
-            ALOGE("%s: Failed to disconnect camera device. %s (%d)", __FUNCTION__,
-                    strerror(-res), res);
-            return res;
-        }
-    }
-
-    return OK;
-}
-
-status_t CameraHardwareInterfaceFlashControl::startPreviewAndTorch() {
-    status_t res = OK;
-    res = mDevice->startPreview();
-    if (res) {
-        ALOGE("%s: start preview failed. %s (%d)", __FUNCTION__,
-                strerror(-res), res);
-        return res;
-    }
-
-    mParameters.set(CameraParameters::KEY_FLASH_MODE,
-            CameraParameters::FLASH_MODE_TORCH);
-
-    return mDevice->setParameters(mParameters);
-}
-
-status_t CameraHardwareInterfaceFlashControl::getSmallestSurfaceSize(
-        int32_t *width, int32_t *height) {
-    if (!width || !height) {
-        return BAD_VALUE;
-    }
-
-    int32_t w = INT32_MAX;
-    int32_t h = 1;
-    Vector<Size> sizes;
-
-    mParameters.getSupportedPreviewSizes(sizes);
-    for (size_t i = 0; i < sizes.size(); i++) {
-        Size s = sizes[i];
-        if (w * h > s.width * s.height) {
-            w = s.width;
-            h = s.height;
-        }
-    }
-
-    if (w == INT32_MAX) {
-        return NAME_NOT_FOUND;
-    }
-
-    *width = w;
-    *height = h;
-
-    return OK;
-}
-
-status_t CameraHardwareInterfaceFlashControl::initializePreviewWindow(
-        const sp<CameraHardwareInterface>& device, int32_t width, int32_t height) {
-    status_t res;
-    BufferQueue::createBufferQueue(&mProducer, &mConsumer);
-
-    mSurfaceTexture = new GLConsumer(mConsumer, 0, GLConsumer::TEXTURE_EXTERNAL,
-            true, true);
-    if (mSurfaceTexture == NULL) {
-        return NO_MEMORY;
-    }
-
-    int32_t format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
-    res = mSurfaceTexture->setDefaultBufferSize(width, height);
-    if (res) {
-        return res;
-    }
-    res = mSurfaceTexture->setDefaultBufferFormat(format);
-    if (res) {
-        return res;
-    }
-
-    mSurface = new Surface(mProducer, /*useAsync*/ true);
-    if (mSurface == NULL) {
-        return NO_MEMORY;
-    }
-
-    res = native_window_api_connect(mSurface.get(), NATIVE_WINDOW_API_CAMERA);
-    if (res) {
-        ALOGE("%s: Unable to connect to native window", __FUNCTION__);
-        return res;
-    }
-
-    return device->setPreviewWindow(mSurface);
-}
-
-status_t CameraHardwareInterfaceFlashControl::connectCameraDevice(
-        const String8& cameraId) {
-    sp<CameraHardwareInterface> device =
-            new CameraHardwareInterface(cameraId.string());
-
-    status_t res = device->initialize(mProviderManager);
-    if (res) {
-        ALOGE("%s: initializing camera %s failed", __FUNCTION__,
-                cameraId.string());
-        return res;
-    }
-
-    // need to set __get_memory in set_callbacks().
-    device->setCallbacks(NULL, NULL, NULL, NULL, NULL);
-
-    mParameters = device->getParameters();
-
-    int32_t width, height;
-    res = getSmallestSurfaceSize(&width, &height);
-    if (res) {
-        ALOGE("%s: failed to get smallest surface size for camera %s",
-                __FUNCTION__, cameraId.string());
-        return res;
-    }
-
-    res = initializePreviewWindow(device, width, height);
-    if (res) {
-        ALOGE("%s: failed to initialize preview window for camera %s",
-                __FUNCTION__, cameraId.string());
-        return res;
-    }
-
-    mCameraId = cameraId;
-    mDevice = device;
-    return OK;
-}
-
-status_t CameraHardwareInterfaceFlashControl::disconnectCameraDevice() {
-    if (mDevice == NULL) {
-        return OK;
-    }
-
-    if (mParameters.get(CameraParameters::KEY_FLASH_MODE)) {
-        // There is a flash, turn if off.
-        // (If there isn't one, leave the parameter null)
-        mParameters.set(CameraParameters::KEY_FLASH_MODE,
-                CameraParameters::FLASH_MODE_OFF);
-        mDevice->setParameters(mParameters);
-    }
-    mDevice->stopPreview();
-    status_t res = native_window_api_disconnect(mSurface.get(),
-            NATIVE_WINDOW_API_CAMERA);
-    if (res) {
-        ALOGW("%s: native_window_api_disconnect failed: %s (%d)",
-                __FUNCTION__, strerror(-res), res);
-    }
-    mDevice->setPreviewWindow(NULL);
-    mDevice->release();
-    mDevice = NULL;
-
-    return OK;
-}
-// CameraHardwareInterfaceFlashControl implementation ends
-
 }
diff --git a/services/camera/libcameraservice/CameraFlashlight.h b/services/camera/libcameraservice/CameraFlashlight.h
index 1baaba2..b97fa5f 100644
--- a/services/camera/libcameraservice/CameraFlashlight.h
+++ b/services/camera/libcameraservice/CameraFlashlight.h
@@ -23,8 +23,6 @@
 #include <utils/SortedVector.h>
 #include "common/CameraProviderManager.h"
 #include "common/CameraDeviceBase.h"
-#include "device1/CameraHardwareInterface.h"
-
 
 namespace android {
 
@@ -124,59 +122,6 @@
         Mutex mLock;
 };
 
-/**
- * Flash control for camera module <= v2.3 and camera HAL v1
- */
-class CameraHardwareInterfaceFlashControl : public FlashControlBase {
-    public:
-        CameraHardwareInterfaceFlashControl(
-                sp<CameraProviderManager> manager,
-                CameraProviderManager::StatusListener* callbacks);
-        virtual ~CameraHardwareInterfaceFlashControl();
-
-        // FlashControlBase
-        status_t setTorchMode(const String8& cameraId, bool enabled);
-        status_t hasFlashUnit(const String8& cameraId, bool *hasFlash);
-
-    private:
-        // connect to a camera device
-        status_t connectCameraDevice(const String8& cameraId);
-
-        // disconnect and free mDevice
-        status_t disconnectCameraDevice();
-
-        // initialize the preview window
-        status_t initializePreviewWindow(const sp<CameraHardwareInterface>& device,
-                int32_t width, int32_t height);
-
-        // start preview and enable torch
-        status_t startPreviewAndTorch();
-
-        // get the smallest surface
-        status_t getSmallestSurfaceSize(int32_t *width, int32_t *height);
-
-        // protected by mLock
-        // If this function opens camera device in order to check if it has a flash unit, the
-        // camera device will remain open if keepDeviceOpen is true and the camera device will be
-        // closed if keepDeviceOpen is false. If camera device is already open when calling this
-        // function, keepDeviceOpen is ignored.
-        status_t hasFlashUnitLocked(const String8& cameraId, bool *hasFlash, bool keepDeviceOpen);
-
-        sp<CameraProviderManager> mProviderManager;
-        CameraProviderManager::StatusListener* mCallbacks;
-        sp<CameraHardwareInterface> mDevice;
-        String8 mCameraId;
-        CameraParameters mParameters;
-        bool mTorchEnabled;
-
-        sp<IGraphicBufferProducer> mProducer;
-        sp<IGraphicBufferConsumer>  mConsumer;
-        sp<GLConsumer> mSurfaceTexture;
-        sp<Surface> mSurface;
-
-        Mutex mLock;
-};
-
 } // namespace android
 
 #endif
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index b5de1b7..adafbda 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -70,7 +70,6 @@
 #include <system/camera.h>
 
 #include "CameraService.h"
-#include "api1/CameraClient.h"
 #include "api1/Camera2Client.h"
 #include "api2/CameraDeviceClient.h"
 #include "utils/CameraTraces.h"
@@ -679,9 +678,15 @@
     status_t res = mCameraProviderManager->getCameraCharacteristics(
             String8(cameraId).string(), cameraInfo);
     if (res != OK) {
-        return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Unable to retrieve camera "
-                "characteristics for device %s: %s (%d)", String8(cameraId).string(),
-                strerror(-res), res);
+        if (res == NAME_NOT_FOUND) {
+            return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Unable to retrieve camera "
+                    "characteristics for unknown device %s: %s (%d)", String8(cameraId).string(),
+                    strerror(-res), res);
+        } else {
+            return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Unable to retrieve camera "
+                    "characteristics for device %s: %s (%d)", String8(cameraId).string(),
+                    strerror(-res), res);
+        }
     }
     SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
     if (getSystemCameraKind(String8(cameraId), &deviceKind) != OK) {
@@ -802,35 +807,26 @@
 
 Status CameraService::makeClient(const sp<CameraService>& cameraService,
         const sp<IInterface>& cameraCb, const String16& packageName,
-        const std::optional<String16>& featureId, const String8& cameraId, int api1CameraId,
-        int facing, int clientPid, uid_t clientUid, int servicePid, int halVersion,
+        const std::optional<String16>& featureId,  const String8& cameraId,
+        int api1CameraId, int facing, int clientPid, uid_t clientUid, int servicePid,
         int deviceVersion, apiLevel effectiveApiLevel,
         /*out*/sp<BasicClient>* client) {
 
-    if (halVersion < 0 || halVersion == deviceVersion) {
-        // Default path: HAL version is unspecified by caller, create CameraClient
-        // based on device version reported by the HAL.
-        switch(deviceVersion) {
-          case CAMERA_DEVICE_API_VERSION_1_0:
-            if (effectiveApiLevel == API_1) {  // Camera1 API route
-                sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
-                *client = new CameraClient(cameraService, tmp, packageName, featureId,
-                        api1CameraId, facing, clientPid, clientUid,
-                        getpid());
-            } else { // Camera2 API route
-                ALOGW("Camera using old HAL version: %d", deviceVersion);
-                return STATUS_ERROR_FMT(ERROR_DEPRECATED_HAL,
-                        "Camera device \"%s\" HAL version %d does not support camera2 API",
-                        cameraId.string(), deviceVersion);
-            }
+    // Create CameraClient based on device version reported by the HAL.
+    switch(deviceVersion) {
+        case CAMERA_DEVICE_API_VERSION_1_0:
+            ALOGE("Camera using old HAL version: %d", deviceVersion);
+            return STATUS_ERROR_FMT(ERROR_DEPRECATED_HAL,
+                    "Camera device \"%s\" HAL version %d no longer supported",
+                    cameraId.string(), deviceVersion);
             break;
-          case CAMERA_DEVICE_API_VERSION_3_0:
-          case CAMERA_DEVICE_API_VERSION_3_1:
-          case CAMERA_DEVICE_API_VERSION_3_2:
-          case CAMERA_DEVICE_API_VERSION_3_3:
-          case CAMERA_DEVICE_API_VERSION_3_4:
-          case CAMERA_DEVICE_API_VERSION_3_5:
-          case CAMERA_DEVICE_API_VERSION_3_6:
+        case CAMERA_DEVICE_API_VERSION_3_0:
+        case CAMERA_DEVICE_API_VERSION_3_1:
+        case CAMERA_DEVICE_API_VERSION_3_2:
+        case CAMERA_DEVICE_API_VERSION_3_3:
+        case CAMERA_DEVICE_API_VERSION_3_4:
+        case CAMERA_DEVICE_API_VERSION_3_5:
+        case CAMERA_DEVICE_API_VERSION_3_6:
             if (effectiveApiLevel == API_1) { // Camera1 API route
                 sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
                 *client = new Camera2Client(cameraService, tmp, packageName, featureId,
@@ -844,32 +840,12 @@
                         cameraId, facing, clientPid, clientUid, servicePid);
             }
             break;
-          default:
+        default:
             // Should not be reachable
             ALOGE("Unknown camera device HAL version: %d", deviceVersion);
             return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
                     "Camera device \"%s\" has unknown HAL version %d",
                     cameraId.string(), deviceVersion);
-        }
-    } else {
-        // A particular HAL version is requested by caller. Create CameraClient
-        // based on the requested HAL version.
-        if (deviceVersion > CAMERA_DEVICE_API_VERSION_1_0 &&
-            halVersion == CAMERA_DEVICE_API_VERSION_1_0) {
-            // Only support higher HAL version device opened as HAL1.0 device.
-            sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
-            *client = new CameraClient(cameraService, tmp, packageName, featureId,
-                    api1CameraId, facing, clientPid, clientUid,
-                    servicePid);
-        } else {
-            // Other combinations (e.g. HAL3.x open as HAL2.x) are not supported yet.
-            ALOGE("Invalid camera HAL version %x: HAL %x device can only be"
-                    " opened as HAL %x device", halVersion, deviceVersion,
-                    CAMERA_DEVICE_API_VERSION_1_0);
-            return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
-                    "Camera device \"%s\" (HAL version %d) cannot be opened as HAL version %d",
-                    cameraId.string(), deviceVersion, halVersion);
-        }
     }
     return Status::ok();
 }
@@ -957,7 +933,6 @@
     sp<Client> tmp = nullptr;
     if (!(ret = connectHelper<ICameraClient,Client>(
             sp<ICameraClient>{nullptr}, id, cameraId,
-            static_cast<int>(CAMERA_HAL_API_VERSION_UNSPECIFIED),
             internalPackageName, {}, uid, USE_CALLING_PID,
             API_1, /*shimUpdateOnly*/ true, /*out*/ tmp)
             ).isOk()) {
@@ -1476,34 +1451,7 @@
     String8 id = cameraIdIntToStr(api1CameraId);
     sp<Client> client = nullptr;
     ret = connectHelper<ICameraClient,Client>(cameraClient, id, api1CameraId,
-            CAMERA_HAL_API_VERSION_UNSPECIFIED, clientPackageName, {},
-            clientUid, clientPid, API_1, /*shimUpdateOnly*/ false, /*out*/client);
-
-    if(!ret.isOk()) {
-        logRejected(id, CameraThreadState::getCallingPid(), String8(clientPackageName),
-                ret.toString8());
-        return ret;
-    }
-
-    *device = client;
-    return ret;
-}
-
-Status CameraService::connectLegacy(
-        const sp<ICameraClient>& cameraClient,
-        int api1CameraId, int halVersion,
-        const String16& clientPackageName,
-        int clientUid,
-        /*out*/
-        sp<ICamera>* device) {
-
-    ATRACE_CALL();
-    String8 id = cameraIdIntToStr(api1CameraId);
-
-    Status ret = Status::ok();
-    sp<Client> client = nullptr;
-    ret = connectHelper<ICameraClient,Client>(cameraClient, id, api1CameraId, halVersion,
-            clientPackageName, {}, clientUid, USE_CALLING_PID, API_1,
+            clientPackageName, {}, clientUid, clientPid, API_1,
             /*shimUpdateOnly*/ false, /*out*/client);
 
     if(!ret.isOk()) {
@@ -1545,8 +1493,9 @@
     int cUid = CameraThreadState::getCallingUid();
     SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
     if (getSystemCameraKind(cameraId, &systemCameraKind) != OK) {
-        ALOGE("%s: Invalid camera id %s, ", __FUNCTION__, cameraId.c_str());
-        return true;
+        // This isn't a known camera ID, so it's not a system camera
+        ALOGV("%s: Unknown camera id %s, ", __FUNCTION__, cameraId.c_str());
+        return false;
     }
 
     // (1) Cameraserver trying to connect, accept.
@@ -1595,8 +1544,7 @@
         clientPackageNameAdj = String16(vendorClient.c_str());
     }
     ret = connectHelper<hardware::camera2::ICameraDeviceCallbacks,CameraDeviceClient>(cameraCb, id,
-            /*api1CameraId*/-1,
-            CAMERA_HAL_API_VERSION_UNSPECIFIED, clientPackageNameAdj, clientFeatureId,
+            /*api1CameraId*/-1, clientPackageNameAdj, clientFeatureId,
             clientUid, USE_CALLING_PID, API_2, /*shimUpdateOnly*/ false, /*out*/client);
 
     if(!ret.isOk()) {
@@ -1611,7 +1559,7 @@
 
 template<class CALLBACK, class CLIENT>
 Status CameraService::connectHelper(const sp<CALLBACK>& cameraCb, const String8& cameraId,
-        int api1CameraId, int halVersion, const String16& clientPackageName,
+        int api1CameraId, const String16& clientPackageName,
         const std::optional<String16>& clientFeatureId, int clientUid, int clientPid,
         apiLevel effectiveApiLevel, bool shimUpdateOnly,
         /*out*/sp<CLIENT>& device) {
@@ -1621,9 +1569,8 @@
 
     int originalClientPid = 0;
 
-    ALOGI("CameraService::connect call (PID %d \"%s\", camera ID %s) for HAL version %s and "
+    ALOGI("CameraService::connect call (PID %d \"%s\", camera ID %s) and "
             "Camera API version %d", clientPid, clientName8.string(), cameraId.string(),
-            (halVersion == -1) ? "default" : std::to_string(halVersion).c_str(),
             static_cast<int>(effectiveApiLevel));
 
     sp<CLIENT> client = nullptr;
@@ -1703,7 +1650,7 @@
         if(!(ret = makeClient(this, cameraCb, clientPackageName, clientFeatureId,
                 cameraId, api1CameraId, facing,
                 clientPid, clientUid, getpid(),
-                halVersion, deviceVersion, effectiveApiLevel,
+                deviceVersion, effectiveApiLevel,
                 /*out*/&tmp)).isOk()) {
             return ret;
         }
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 35e13e7..3d3b7dd 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -70,7 +70,6 @@
     public virtual CameraProviderManager::StatusListener
 {
     friend class BinderService<CameraService>;
-    friend class CameraClient;
     friend class CameraOfflineSessionClient;
 public:
     class Client;
@@ -134,12 +133,6 @@
             /*out*/
             sp<hardware::ICamera>* device);
 
-    virtual binder::Status     connectLegacy(const sp<hardware::ICameraClient>& cameraClient,
-            int32_t cameraId, int32_t halVersion,
-            const String16& clientPackageName, int32_t clientUid,
-            /*out*/
-            sp<hardware::ICamera>* device);
-
     virtual binder::Status     connectDevice(
             const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb, const String16& cameraId,
             const String16& clientPackageName, const std::optional<String16>& clientFeatureId,
@@ -728,7 +721,7 @@
     // Single implementation shared between the various connect calls
     template<class CALLBACK, class CLIENT>
     binder::Status connectHelper(const sp<CALLBACK>& cameraCb, const String8& cameraId,
-            int api1CameraId, int halVersion, const String16& clientPackageName,
+            int api1CameraId, const String16& clientPackageName,
             const std::optional<String16>& clientFeatureId, int clientUid, int clientPid,
             apiLevel effectiveApiLevel, bool shimUpdateOnly, /*out*/sp<CLIENT>& device);
 
@@ -1059,7 +1052,7 @@
     static binder::Status makeClient(const sp<CameraService>& cameraService,
             const sp<IInterface>& cameraCb, const String16& packageName,
             const std::optional<String16>& featureId, const String8& cameraId, int api1CameraId,
-            int facing, int clientPid, uid_t clientUid, int servicePid, int halVersion,
+            int facing, int clientPid, uid_t clientUid, int servicePid,
             int deviceVersion, apiLevel effectiveApiLevel,
             /*out*/sp<BasicClient>* client);
 
diff --git a/services/camera/libcameraservice/TEST_MAPPING b/services/camera/libcameraservice/TEST_MAPPING
index 6fdac68..ca6cc58 100644
--- a/services/camera/libcameraservice/TEST_MAPPING
+++ b/services/camera/libcameraservice/TEST_MAPPING
@@ -1,7 +1,12 @@
 {
   "presubmit": [
     {
-       "name": "cameraservice_test"
+      "name": "cameraservice_test"
+    }
+  ],
+  "imports": [
+    {
+      "path": "frameworks/av/camera"
     }
   ]
 }
diff --git a/services/camera/libcameraservice/api1/CameraClient.cpp b/services/camera/libcameraservice/api1/CameraClient.cpp
deleted file mode 100644
index b860ceb..0000000
--- a/services/camera/libcameraservice/api1/CameraClient.cpp
+++ /dev/null
@@ -1,1208 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "CameraClient"
-//#define LOG_NDEBUG 0
-
-#include <cutils/atomic.h>
-#include <cutils/properties.h>
-#include <gui/Surface.h>
-#include <media/hardware/HardwareAPI.h>
-
-#include "api1/CameraClient.h"
-#include "device1/CameraHardwareInterface.h"
-#include "CameraService.h"
-#include "utils/CameraThreadState.h"
-
-namespace android {
-
-#define LOG1(...) ALOGD_IF(gLogLevel >= 1, __VA_ARGS__);
-#define LOG2(...) ALOGD_IF(gLogLevel >= 2, __VA_ARGS__);
-
-CameraClient::CameraClient(const sp<CameraService>& cameraService,
-        const sp<hardware::ICameraClient>& cameraClient,
-        const String16& clientPackageName, const std::optional<String16>& clientFeatureId,
-        int cameraId, int cameraFacing,
-        int clientPid, int clientUid,
-        int servicePid):
-        Client(cameraService, cameraClient, clientPackageName, clientFeatureId,
-                String8::format("%d", cameraId), cameraId, cameraFacing, clientPid,
-                clientUid, servicePid)
-{
-    int callingPid = CameraThreadState::getCallingPid();
-    LOG1("CameraClient::CameraClient E (pid %d, id %d)", callingPid, cameraId);
-
-    mHardware = NULL;
-    mMsgEnabled = 0;
-    mSurface = 0;
-    mPreviewWindow = 0;
-    mDestructionStarted = false;
-
-    // Callback is disabled by default
-    mPreviewCallbackFlag = CAMERA_FRAME_CALLBACK_FLAG_NOOP;
-    mOrientation = getOrientation(0, mCameraFacing == CAMERA_FACING_FRONT);
-    mPlayShutterSound = true;
-    LOG1("CameraClient::CameraClient X (pid %d, id %d)", callingPid, cameraId);
-}
-
-status_t CameraClient::initialize(sp<CameraProviderManager> manager,
-        const String8& /*monitorTags*/) {
-    int callingPid = CameraThreadState::getCallingPid();
-    status_t res;
-
-    LOG1("CameraClient::initialize E (pid %d, id %d)", callingPid, mCameraId);
-
-    // Verify ops permissions
-    res = startCameraOps();
-    if (res != OK) {
-        return res;
-    }
-
-    char camera_device_name[10];
-    snprintf(camera_device_name, sizeof(camera_device_name), "%d", mCameraId);
-
-    mHardware = new CameraHardwareInterface(camera_device_name);
-    res = mHardware->initialize(manager);
-    if (res != OK) {
-        ALOGE("%s: Camera %d: unable to initialize device: %s (%d)",
-                __FUNCTION__, mCameraId, strerror(-res), res);
-        mHardware.clear();
-        return res;
-    }
-
-    mHardware->setCallbacks(notifyCallback,
-            dataCallback,
-            dataCallbackTimestamp,
-            handleCallbackTimestampBatch,
-            (void *)(uintptr_t)mCameraId);
-
-    // Enable zoom, error, focus, and metadata messages by default
-    enableMsgType(CAMERA_MSG_ERROR | CAMERA_MSG_ZOOM | CAMERA_MSG_FOCUS |
-                  CAMERA_MSG_PREVIEW_METADATA | CAMERA_MSG_FOCUS_MOVE);
-
-    LOG1("CameraClient::initialize X (pid %d, id %d)", callingPid, mCameraId);
-    return OK;
-}
-
-
-// tear down the client
-CameraClient::~CameraClient() {
-    mDestructionStarted = true;
-    int callingPid = CameraThreadState::getCallingPid();
-    LOG1("CameraClient::~CameraClient E (pid %d, this %p)", callingPid, this);
-
-    disconnect();
-    LOG1("CameraClient::~CameraClient X (pid %d, this %p)", callingPid, this);
-}
-
-status_t CameraClient::dump(int fd, const Vector<String16>& args) {
-    return BasicClient::dump(fd, args);
-}
-
-status_t CameraClient::dumpClient(int fd, const Vector<String16>& args) {
-    const size_t SIZE = 256;
-    char buffer[SIZE];
-
-    size_t len = snprintf(buffer, SIZE, "Client[%d] (%p) with UID %d\n",
-            mCameraId,
-            (getRemoteCallback() != NULL ?
-                    IInterface::asBinder(getRemoteCallback()).get() : NULL),
-            mClientUid);
-    len = (len > SIZE - 1) ? SIZE - 1 : len;
-    write(fd, buffer, len);
-
-    len = snprintf(buffer, SIZE, "Latest set parameters:\n");
-    len = (len > SIZE - 1) ? SIZE - 1 : len;
-    write(fd, buffer, len);
-
-    mLatestSetParameters.dump(fd, args);
-
-    const char *enddump = "\n\n";
-    write(fd, enddump, strlen(enddump));
-
-    sp<CameraHardwareInterface> hardware = mHardware;
-    if (hardware != nullptr) {
-        return hardware->dump(fd, args);
-    }
-    ALOGI("%s: camera device closed already, skip dumping", __FUNCTION__);
-    return OK;
-}
-
-// ----------------------------------------------------------------------------
-
-status_t CameraClient::checkPid() const {
-    int callingPid = CameraThreadState::getCallingPid();
-    if (callingPid == mClientPid) return NO_ERROR;
-
-    ALOGW("attempt to use a locked camera from a different process"
-         " (old pid %d, new pid %d)", mClientPid, callingPid);
-    return EBUSY;
-}
-
-status_t CameraClient::checkPidAndHardware() const {
-    if (mHardware == 0) {
-        ALOGE("attempt to use a camera after disconnect() (pid %d)",
-              CameraThreadState::getCallingPid());
-        return INVALID_OPERATION;
-    }
-    status_t result = checkPid();
-    if (result != NO_ERROR) return result;
-    return NO_ERROR;
-}
-
-status_t CameraClient::lock() {
-    int callingPid = CameraThreadState::getCallingPid();
-    LOG1("lock (pid %d)", callingPid);
-    Mutex::Autolock lock(mLock);
-
-    // lock camera to this client if the the camera is unlocked
-    if (mClientPid == 0) {
-        mClientPid = callingPid;
-        return NO_ERROR;
-    }
-
-    // returns NO_ERROR if the client already owns the camera, EBUSY otherwise
-    return checkPid();
-}
-
-status_t CameraClient::unlock() {
-    int callingPid = CameraThreadState::getCallingPid();
-    LOG1("unlock (pid %d)", callingPid);
-    Mutex::Autolock lock(mLock);
-
-    // allow anyone to use camera (after they lock the camera)
-    status_t result = checkPid();
-    if (result == NO_ERROR) {
-        if (mHardware->recordingEnabled()) {
-            ALOGE("Not allowed to unlock camera during recording.");
-            return INVALID_OPERATION;
-        }
-        mClientPid = 0;
-        LOG1("clear mRemoteCallback (pid %d)", callingPid);
-        // we need to remove the reference to ICameraClient so that when the app
-        // goes away, the reference count goes to 0.
-        mRemoteCallback.clear();
-    }
-    return result;
-}
-
-// connect a new client to the camera
-status_t CameraClient::connect(const sp<hardware::ICameraClient>& client) {
-    int callingPid = CameraThreadState::getCallingPid();
-    LOG1("connect E (pid %d)", callingPid);
-    Mutex::Autolock lock(mLock);
-
-    if (mClientPid != 0 && checkPid() != NO_ERROR) {
-        ALOGW("Tried to connect to a locked camera (old pid %d, new pid %d)",
-                mClientPid, callingPid);
-        return EBUSY;
-    }
-
-    if (mRemoteCallback != 0 &&
-        (IInterface::asBinder(client) == IInterface::asBinder(mRemoteCallback))) {
-        LOG1("Connect to the same client");
-        return NO_ERROR;
-    }
-
-    mPreviewCallbackFlag = CAMERA_FRAME_CALLBACK_FLAG_NOOP;
-    mClientPid = callingPid;
-    mRemoteCallback = client;
-
-    LOG1("connect X (pid %d)", callingPid);
-    return NO_ERROR;
-}
-
-static void disconnectWindow(const sp<ANativeWindow>& window) {
-    if (window != 0) {
-        status_t result = native_window_api_disconnect(window.get(),
-                NATIVE_WINDOW_API_CAMERA);
-        if (result != NO_ERROR) {
-            ALOGW("native_window_api_disconnect failed: %s (%d)", strerror(-result),
-                    result);
-        }
-    }
-}
-
-binder::Status CameraClient::disconnect() {
-    int callingPid = CameraThreadState::getCallingPid();
-    LOG1("disconnect E (pid %d)", callingPid);
-    Mutex::Autolock lock(mLock);
-
-    binder::Status res = binder::Status::ok();
-    // Allow both client and the cameraserver to disconnect at all times
-    if (callingPid != mClientPid && callingPid != mServicePid) {
-        ALOGW("different client - don't disconnect");
-        return res;
-    }
-
-    // Make sure disconnect() is done once and once only, whether it is called
-    // from the user directly, or called by the destructor.
-    if (mHardware == 0) return res;
-
-    LOG1("hardware teardown");
-    // Before destroying mHardware, we must make sure it's in the
-    // idle state.
-    // Turn off all messages.
-    disableMsgType(CAMERA_MSG_ALL_MSGS);
-    mHardware->stopPreview();
-    sCameraService->updateProxyDeviceState(
-            hardware::ICameraServiceProxy::CAMERA_STATE_IDLE,
-            mCameraIdStr, mCameraFacing, mClientPackageName,
-            hardware::ICameraServiceProxy::CAMERA_API_LEVEL_1);
-    mHardware->cancelPicture();
-    // Release the hardware resources.
-    mHardware->release();
-
-    // Release the held ANativeWindow resources.
-    if (mPreviewWindow != 0) {
-        disconnectWindow(mPreviewWindow);
-        mPreviewWindow = 0;
-        mHardware->setPreviewWindow(mPreviewWindow);
-    }
-    mHardware.clear();
-
-    CameraService::Client::disconnect();
-
-    LOG1("disconnect X (pid %d)", callingPid);
-
-    return res;
-}
-
-// ----------------------------------------------------------------------------
-
-status_t CameraClient::setPreviewWindow(const sp<IBinder>& binder,
-        const sp<ANativeWindow>& window) {
-    Mutex::Autolock lock(mLock);
-    status_t result = checkPidAndHardware();
-    if (result != NO_ERROR) return result;
-
-    // return if no change in surface.
-    if (binder == mSurface) {
-        return NO_ERROR;
-    }
-
-    if (window != 0) {
-        result = native_window_api_connect(window.get(), NATIVE_WINDOW_API_CAMERA);
-        if (result != NO_ERROR) {
-            ALOGE("native_window_api_connect failed: %s (%d)", strerror(-result),
-                    result);
-            return result;
-        }
-    }
-
-    // If preview has been already started, register preview buffers now.
-    if (mHardware->previewEnabled()) {
-        if (window != 0) {
-            mHardware->setPreviewScalingMode(NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
-            mHardware->setPreviewTransform(mOrientation);
-            result = mHardware->setPreviewWindow(window);
-        }
-    }
-
-    if (result == NO_ERROR) {
-        // Everything has succeeded.  Disconnect the old window and remember the
-        // new window.
-        disconnectWindow(mPreviewWindow);
-        mSurface = binder;
-        mPreviewWindow = window;
-    } else {
-        // Something went wrong after we connected to the new window, so
-        // disconnect here.
-        disconnectWindow(window);
-    }
-
-    return result;
-}
-
-// set the buffer consumer that the preview will use
-status_t CameraClient::setPreviewTarget(
-        const sp<IGraphicBufferProducer>& bufferProducer) {
-    LOG1("setPreviewTarget(%p) (pid %d)", bufferProducer.get(),
-            CameraThreadState::getCallingPid());
-
-    sp<IBinder> binder;
-    sp<ANativeWindow> window;
-    if (bufferProducer != 0) {
-        binder = IInterface::asBinder(bufferProducer);
-        // Using controlledByApp flag to ensure that the buffer queue remains in
-        // async mode for the old camera API, where many applications depend
-        // on that behavior.
-        window = new Surface(bufferProducer, /*controlledByApp*/ true);
-    }
-    return setPreviewWindow(binder, window);
-}
-
-// set the preview callback flag to affect how the received frames from
-// preview are handled.
-void CameraClient::setPreviewCallbackFlag(int callback_flag) {
-    LOG1("setPreviewCallbackFlag(%d) (pid %d)", callback_flag, CameraThreadState::getCallingPid());
-    Mutex::Autolock lock(mLock);
-    if (checkPidAndHardware() != NO_ERROR) return;
-
-    mPreviewCallbackFlag = callback_flag;
-    if (mPreviewCallbackFlag & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) {
-        enableMsgType(CAMERA_MSG_PREVIEW_FRAME);
-    } else {
-        disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
-    }
-}
-
-status_t CameraClient::setPreviewCallbackTarget(
-        const sp<IGraphicBufferProducer>& callbackProducer) {
-    (void)callbackProducer;
-    ALOGE("%s: Unimplemented!", __FUNCTION__);
-    return INVALID_OPERATION;
-}
-
-// start preview mode
-status_t CameraClient::startPreview() {
-    LOG1("startPreview (pid %d)", CameraThreadState::getCallingPid());
-    return startCameraMode(CAMERA_PREVIEW_MODE);
-}
-
-// start recording mode
-status_t CameraClient::startRecording() {
-    LOG1("startRecording (pid %d)", CameraThreadState::getCallingPid());
-    return startCameraMode(CAMERA_RECORDING_MODE);
-}
-
-// start preview or recording
-status_t CameraClient::startCameraMode(camera_mode mode) {
-    LOG1("startCameraMode(%d)", mode);
-    Mutex::Autolock lock(mLock);
-    status_t result = checkPidAndHardware();
-    if (result != NO_ERROR) return result;
-
-    switch(mode) {
-        case CAMERA_PREVIEW_MODE:
-            if (mSurface == 0 && mPreviewWindow == 0) {
-                LOG1("mSurface is not set yet.");
-                // still able to start preview in this case.
-            }
-            return startPreviewMode();
-        case CAMERA_RECORDING_MODE:
-            if (mSurface == 0 && mPreviewWindow == 0) {
-                ALOGE("mSurface or mPreviewWindow must be set before startRecordingMode.");
-                return INVALID_OPERATION;
-            }
-            return startRecordingMode();
-        default:
-            return UNKNOWN_ERROR;
-    }
-}
-
-status_t CameraClient::startPreviewMode() {
-    LOG1("startPreviewMode");
-    status_t result = NO_ERROR;
-
-    // if preview has been enabled, nothing needs to be done
-    if (mHardware->previewEnabled()) {
-        return NO_ERROR;
-    }
-
-    if (mPreviewWindow != 0) {
-        mHardware->setPreviewScalingMode(
-            NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
-        mHardware->setPreviewTransform(mOrientation);
-    }
-    mHardware->setPreviewWindow(mPreviewWindow);
-    result = mHardware->startPreview();
-    if (result == NO_ERROR) {
-        sCameraService->updateProxyDeviceState(
-            hardware::ICameraServiceProxy::CAMERA_STATE_ACTIVE,
-            mCameraIdStr, mCameraFacing, mClientPackageName,
-            hardware::ICameraServiceProxy::CAMERA_API_LEVEL_1);
-    }
-    return result;
-}
-
-status_t CameraClient::startRecordingMode() {
-    LOG1("startRecordingMode");
-    status_t result = NO_ERROR;
-
-    // if recording has been enabled, nothing needs to be done
-    if (mHardware->recordingEnabled()) {
-        return NO_ERROR;
-    }
-
-    // if preview has not been started, start preview first
-    if (!mHardware->previewEnabled()) {
-        result = startPreviewMode();
-        if (result != NO_ERROR) {
-            return result;
-        }
-    }
-
-    // start recording mode
-    enableMsgType(CAMERA_MSG_VIDEO_FRAME);
-    sCameraService->playSound(CameraService::SOUND_RECORDING_START);
-    result = mHardware->startRecording();
-    if (result != NO_ERROR) {
-        ALOGE("mHardware->startRecording() failed with status %d", result);
-    }
-    return result;
-}
-
-// stop preview mode
-void CameraClient::stopPreview() {
-    LOG1("stopPreview (pid %d)", CameraThreadState::getCallingPid());
-    Mutex::Autolock lock(mLock);
-    if (checkPidAndHardware() != NO_ERROR) return;
-
-
-    disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
-    mHardware->stopPreview();
-    sCameraService->updateProxyDeviceState(
-        hardware::ICameraServiceProxy::CAMERA_STATE_IDLE,
-        mCameraIdStr, mCameraFacing, mClientPackageName,
-        hardware::ICameraServiceProxy::CAMERA_API_LEVEL_1);
-    mPreviewBuffer.clear();
-}
-
-// stop recording mode
-void CameraClient::stopRecording() {
-    LOG1("stopRecording (pid %d)", CameraThreadState::getCallingPid());
-    {
-        Mutex::Autolock lock(mLock);
-        if (checkPidAndHardware() != NO_ERROR) return;
-
-        disableMsgType(CAMERA_MSG_VIDEO_FRAME);
-        mHardware->stopRecording();
-        sCameraService->playSound(CameraService::SOUND_RECORDING_STOP);
-
-        mPreviewBuffer.clear();
-    }
-
-    {
-        Mutex::Autolock l(mAvailableCallbackBuffersLock);
-        if (!mAvailableCallbackBuffers.empty()) {
-            mAvailableCallbackBuffers.clear();
-        }
-    }
-}
-
-// release a recording frame
-void CameraClient::releaseRecordingFrame(const sp<IMemory>& mem) {
-    Mutex::Autolock lock(mLock);
-    if (checkPidAndHardware() != NO_ERROR) return;
-    if (mem == nullptr) {
-        android_errorWriteWithInfoLog(CameraService::SN_EVENT_LOG_ID, "26164272",
-                CameraThreadState::getCallingUid(), nullptr, 0);
-        return;
-    }
-
-    mHardware->releaseRecordingFrame(mem);
-}
-
-void CameraClient::releaseRecordingFrameHandle(native_handle_t *handle) {
-    if (handle == nullptr) return;
-    Mutex::Autolock lock(mLock);
-    sp<IMemory> dataPtr;
-    {
-        Mutex::Autolock l(mAvailableCallbackBuffersLock);
-        if (!mAvailableCallbackBuffers.empty()) {
-            dataPtr = mAvailableCallbackBuffers.back();
-            mAvailableCallbackBuffers.pop_back();
-        }
-    }
-
-    if (dataPtr == nullptr) {
-        ALOGE("%s: %d: No callback buffer available. Dropping a native handle.", __FUNCTION__,
-                __LINE__);
-        native_handle_close(handle);
-        native_handle_delete(handle);
-        return;
-    } else if (dataPtr->size() != sizeof(VideoNativeHandleMetadata)) {
-        ALOGE("%s: %d: Callback buffer size doesn't match VideoNativeHandleMetadata", __FUNCTION__,
-                __LINE__);
-        native_handle_close(handle);
-        native_handle_delete(handle);
-        return;
-    }
-
-    if (mHardware != nullptr) {
-        VideoNativeHandleMetadata *metadata = (VideoNativeHandleMetadata*)(dataPtr->unsecurePointer());
-        metadata->eType = kMetadataBufferTypeNativeHandleSource;
-        metadata->pHandle = handle;
-        mHardware->releaseRecordingFrame(dataPtr);
-    }
-}
-
-void CameraClient::releaseRecordingFrameHandleBatch(const std::vector<native_handle_t*>& handles) {
-    Mutex::Autolock lock(mLock);
-    bool disconnected = (mHardware == nullptr);
-    size_t n = handles.size();
-    std::vector<sp<IMemory>> frames;
-    if (!disconnected) {
-        frames.reserve(n);
-    }
-    bool error = false;
-    for (auto& handle : handles) {
-        sp<IMemory> dataPtr;
-        {
-            Mutex::Autolock l(mAvailableCallbackBuffersLock);
-            if (!mAvailableCallbackBuffers.empty()) {
-                dataPtr = mAvailableCallbackBuffers.back();
-                mAvailableCallbackBuffers.pop_back();
-            }
-        }
-
-        if (dataPtr == nullptr) {
-            ALOGE("%s: %d: No callback buffer available. Dropping frames.", __FUNCTION__,
-                    __LINE__);
-            error = true;
-            break;
-        } else if (dataPtr->size() != sizeof(VideoNativeHandleMetadata)) {
-            ALOGE("%s: %d: Callback buffer must be VideoNativeHandleMetadata", __FUNCTION__,
-                    __LINE__);
-            error = true;
-            break;
-        }
-
-        if (!disconnected) {
-            VideoNativeHandleMetadata *metadata = (VideoNativeHandleMetadata*)(dataPtr->unsecurePointer());
-            metadata->eType = kMetadataBufferTypeNativeHandleSource;
-            metadata->pHandle = handle;
-            frames.push_back(dataPtr);
-        }
-    }
-
-    if (error) {
-        for (auto& handle : handles) {
-            native_handle_close(handle);
-            native_handle_delete(handle);
-        }
-    } else if (!disconnected) {
-        mHardware->releaseRecordingFrameBatch(frames);
-    }
-    return;
-}
-
-status_t CameraClient::setVideoBufferMode(int32_t videoBufferMode) {
-    LOG1("setVideoBufferMode: %d", videoBufferMode);
-    bool enableMetadataInBuffers = false;
-
-    if (videoBufferMode == VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA) {
-        enableMetadataInBuffers = true;
-    } else if (videoBufferMode != VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV) {
-        ALOGE("%s: %d: videoBufferMode %d is not supported.", __FUNCTION__, __LINE__,
-                videoBufferMode);
-        return BAD_VALUE;
-    }
-
-    Mutex::Autolock lock(mLock);
-    if (checkPidAndHardware() != NO_ERROR) {
-        return UNKNOWN_ERROR;
-    }
-
-    return mHardware->storeMetaDataInBuffers(enableMetadataInBuffers);
-}
-
-bool CameraClient::previewEnabled() {
-    LOG1("previewEnabled (pid %d)", CameraThreadState::getCallingPid());
-
-    Mutex::Autolock lock(mLock);
-    if (checkPidAndHardware() != NO_ERROR) return false;
-    return mHardware->previewEnabled();
-}
-
-bool CameraClient::recordingEnabled() {
-    LOG1("recordingEnabled (pid %d)", CameraThreadState::getCallingPid());
-
-    Mutex::Autolock lock(mLock);
-    if (checkPidAndHardware() != NO_ERROR) return false;
-    return mHardware->recordingEnabled();
-}
-
-status_t CameraClient::autoFocus() {
-    LOG1("autoFocus (pid %d)", CameraThreadState::getCallingPid());
-
-    Mutex::Autolock lock(mLock);
-    status_t result = checkPidAndHardware();
-    if (result != NO_ERROR) return result;
-
-    return mHardware->autoFocus();
-}
-
-status_t CameraClient::cancelAutoFocus() {
-    LOG1("cancelAutoFocus (pid %d)", CameraThreadState::getCallingPid());
-
-    Mutex::Autolock lock(mLock);
-    status_t result = checkPidAndHardware();
-    if (result != NO_ERROR) return result;
-
-    return mHardware->cancelAutoFocus();
-}
-
-// take a picture - image is returned in callback
-status_t CameraClient::takePicture(int msgType) {
-    LOG1("takePicture (pid %d): 0x%x", CameraThreadState::getCallingPid(), msgType);
-
-    Mutex::Autolock lock(mLock);
-    status_t result = checkPidAndHardware();
-    if (result != NO_ERROR) return result;
-
-    if ((msgType & CAMERA_MSG_RAW_IMAGE) &&
-        (msgType & CAMERA_MSG_RAW_IMAGE_NOTIFY)) {
-        ALOGE("CAMERA_MSG_RAW_IMAGE and CAMERA_MSG_RAW_IMAGE_NOTIFY"
-                " cannot be both enabled");
-        return BAD_VALUE;
-    }
-
-    // We only accept picture related message types
-    // and ignore other types of messages for takePicture().
-    int picMsgType = msgType
-                        & (CAMERA_MSG_SHUTTER |
-                           CAMERA_MSG_POSTVIEW_FRAME |
-                           CAMERA_MSG_RAW_IMAGE |
-                           CAMERA_MSG_RAW_IMAGE_NOTIFY |
-                           CAMERA_MSG_COMPRESSED_IMAGE);
-
-    enableMsgType(picMsgType);
-
-    return mHardware->takePicture();
-}
-
-// set preview/capture parameters - key/value pairs
-status_t CameraClient::setParameters(const String8& params) {
-    LOG1("setParameters (pid %d) (%s)", CameraThreadState::getCallingPid(), params.string());
-
-    Mutex::Autolock lock(mLock);
-    status_t result = checkPidAndHardware();
-    if (result != NO_ERROR) return result;
-
-    mLatestSetParameters = CameraParameters(params);
-    CameraParameters p(params);
-    return mHardware->setParameters(p);
-}
-
-// get preview/capture parameters - key/value pairs
-String8 CameraClient::getParameters() const {
-    Mutex::Autolock lock(mLock);
-    // The camera service can unconditionally get the parameters at all times
-    if (CameraThreadState::getCallingPid() != mServicePid && checkPidAndHardware() != NO_ERROR) {
-        return String8();
-    }
-
-    String8 params(mHardware->getParameters().flatten());
-    LOG1("getParameters (pid %d) (%s)", CameraThreadState::getCallingPid(), params.string());
-    return params;
-}
-
-// enable shutter sound
-status_t CameraClient::enableShutterSound(bool enable) {
-    LOG1("enableShutterSound (pid %d)", CameraThreadState::getCallingPid());
-
-    status_t result = checkPidAndHardware();
-    if (result != NO_ERROR) return result;
-
-    if (enable) {
-        mPlayShutterSound = true;
-        return OK;
-    }
-
-    mPlayShutterSound = false;
-    return OK;
-}
-
-status_t CameraClient::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) {
-    LOG1("sendCommand (pid %d)", CameraThreadState::getCallingPid());
-    int orientation;
-    Mutex::Autolock lock(mLock);
-    status_t result = checkPidAndHardware();
-    if (result != NO_ERROR) return result;
-
-    if (cmd == CAMERA_CMD_SET_DISPLAY_ORIENTATION) {
-        // Mirror the preview if the camera is front-facing.
-        orientation = getOrientation(arg1, mCameraFacing == CAMERA_FACING_FRONT);
-        if (orientation == -1) return BAD_VALUE;
-
-        if (mOrientation != orientation) {
-            mOrientation = orientation;
-            if (mPreviewWindow != 0) {
-                mHardware->setPreviewTransform(mOrientation);
-            }
-        }
-        return OK;
-    } else if (cmd == CAMERA_CMD_ENABLE_SHUTTER_SOUND) {
-        switch (arg1) {
-            case 0:
-                return enableShutterSound(false);
-            case 1:
-                return enableShutterSound(true);
-            default:
-                return BAD_VALUE;
-        }
-        return OK;
-    } else if (cmd == CAMERA_CMD_PLAY_RECORDING_SOUND) {
-        sCameraService->playSound(CameraService::SOUND_RECORDING_START);
-    } else if (cmd == CAMERA_CMD_SET_VIDEO_BUFFER_COUNT) {
-        // Silently ignore this command
-        return INVALID_OPERATION;
-    } else if (cmd == CAMERA_CMD_PING) {
-        // If mHardware is 0, checkPidAndHardware will return error.
-        return OK;
-    }
-
-    return mHardware->sendCommand(cmd, arg1, arg2);
-}
-
-// ----------------------------------------------------------------------------
-
-void CameraClient::enableMsgType(int32_t msgType) {
-    android_atomic_or(msgType, &mMsgEnabled);
-    mHardware->enableMsgType(msgType);
-}
-
-void CameraClient::disableMsgType(int32_t msgType) {
-    android_atomic_and(~msgType, &mMsgEnabled);
-    mHardware->disableMsgType(msgType);
-}
-
-#define CHECK_MESSAGE_INTERVAL 10 // 10ms
-bool CameraClient::lockIfMessageWanted(int32_t msgType) {
-    int sleepCount = 0;
-    while (mMsgEnabled & msgType) {
-        if (mLock.tryLock() == NO_ERROR) {
-            if (sleepCount > 0) {
-                LOG1("lockIfMessageWanted(%d): waited for %d ms",
-                    msgType, sleepCount * CHECK_MESSAGE_INTERVAL);
-            }
-
-            // If messages are no longer enabled after acquiring lock, release and drop message
-            if ((mMsgEnabled & msgType) == 0) {
-                mLock.unlock();
-                break;
-            }
-
-            return true;
-        }
-        if (sleepCount++ == 0) {
-            LOG1("lockIfMessageWanted(%d): enter sleep", msgType);
-        }
-        usleep(CHECK_MESSAGE_INTERVAL * 1000);
-    }
-    ALOGW("lockIfMessageWanted(%d): dropped unwanted message", msgType);
-    return false;
-}
-
-sp<CameraClient> CameraClient::getClientFromCookie(void* user) {
-    String8 cameraId = String8::format("%d", (int)(intptr_t) user);
-    auto clientDescriptor = sCameraService->mActiveClientManager.get(cameraId);
-    if (clientDescriptor != nullptr) {
-        return sp<CameraClient>{
-                static_cast<CameraClient*>(clientDescriptor->getValue().get())};
-    }
-    return sp<CameraClient>{nullptr};
-}
-
-// Callback messages can be dispatched to internal handlers or pass to our
-// client's callback functions, depending on the message type.
-//
-// notifyCallback:
-//      CAMERA_MSG_SHUTTER              handleShutter
-//      (others)                        c->notifyCallback
-// dataCallback:
-//      CAMERA_MSG_PREVIEW_FRAME        handlePreviewData
-//      CAMERA_MSG_POSTVIEW_FRAME       handlePostview
-//      CAMERA_MSG_RAW_IMAGE            handleRawPicture
-//      CAMERA_MSG_COMPRESSED_IMAGE     handleCompressedPicture
-//      (others)                        c->dataCallback
-// dataCallbackTimestamp
-//      (others)                        c->dataCallbackTimestamp
-
-void CameraClient::notifyCallback(int32_t msgType, int32_t ext1,
-        int32_t ext2, void* user) {
-    LOG2("notifyCallback(%d)", msgType);
-
-    sp<CameraClient> client = getClientFromCookie(user);
-    if (client.get() == nullptr) return;
-
-    if (!client->lockIfMessageWanted(msgType)) return;
-
-    switch (msgType) {
-        case CAMERA_MSG_SHUTTER:
-            // ext1 is the dimension of the yuv picture.
-            client->handleShutter();
-            break;
-        default:
-            client->handleGenericNotify(msgType, ext1, ext2);
-            break;
-    }
-}
-
-void CameraClient::dataCallback(int32_t msgType,
-        const sp<IMemory>& dataPtr, camera_frame_metadata_t *metadata, void* user) {
-    LOG2("dataCallback(%d)", msgType);
-
-    sp<CameraClient> client = getClientFromCookie(user);
-    if (client.get() == nullptr) return;
-
-    if (!client->lockIfMessageWanted(msgType)) return;
-    if (dataPtr == 0 && metadata == NULL) {
-        ALOGE("Null data returned in data callback");
-        client->handleGenericNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
-        return;
-    }
-
-    switch (msgType & ~CAMERA_MSG_PREVIEW_METADATA) {
-        case CAMERA_MSG_PREVIEW_FRAME:
-            client->handlePreviewData(msgType, dataPtr, metadata);
-            break;
-        case CAMERA_MSG_POSTVIEW_FRAME:
-            client->handlePostview(dataPtr);
-            break;
-        case CAMERA_MSG_RAW_IMAGE:
-            client->handleRawPicture(dataPtr);
-            break;
-        case CAMERA_MSG_COMPRESSED_IMAGE:
-            client->handleCompressedPicture(dataPtr);
-            break;
-        default:
-            client->handleGenericData(msgType, dataPtr, metadata);
-            break;
-    }
-}
-
-void CameraClient::dataCallbackTimestamp(nsecs_t timestamp,
-        int32_t msgType, const sp<IMemory>& dataPtr, void* user) {
-    LOG2("dataCallbackTimestamp(%d)", msgType);
-
-    sp<CameraClient> client = getClientFromCookie(user);
-    if (client.get() == nullptr) return;
-
-    if (!client->lockIfMessageWanted(msgType)) return;
-
-    if (dataPtr == 0) {
-        ALOGE("Null data returned in data with timestamp callback");
-        client->handleGenericNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
-        return;
-    }
-
-    client->handleGenericDataTimestamp(timestamp, msgType, dataPtr);
-}
-
-void CameraClient::handleCallbackTimestampBatch(
-        int32_t msgType, const std::vector<HandleTimestampMessage>& msgs, void* user) {
-    LOG2("dataCallbackTimestampBatch");
-    sp<CameraClient> client = getClientFromCookie(user);
-    if (client.get() == nullptr) return;
-    if (!client->lockIfMessageWanted(msgType)) return;
-
-    sp<hardware::ICameraClient> c = client->mRemoteCallback;
-    client->mLock.unlock();
-    if (c != 0 && msgs.size() > 0) {
-        size_t n = msgs.size();
-        std::vector<nsecs_t> timestamps;
-        std::vector<native_handle_t*> handles;
-        timestamps.reserve(n);
-        handles.reserve(n);
-        for (auto& msg : msgs) {
-            native_handle_t* handle = nullptr;
-            if (msg.dataPtr->size() != sizeof(VideoNativeHandleMetadata)) {
-                ALOGE("%s: dataPtr does not contain VideoNativeHandleMetadata!", __FUNCTION__);
-                return;
-            }
-            // TODO: Using unsecurePointer() has some associated security pitfalls
-            //       (see declaration for details).
-            //       Either document why it is safe in this case or address the
-            //       issue (e.g. by copying).
-            VideoNativeHandleMetadata *metadata =
-                (VideoNativeHandleMetadata*)(msg.dataPtr->unsecurePointer());
-            if (metadata->eType == kMetadataBufferTypeNativeHandleSource) {
-                handle = metadata->pHandle;
-            }
-
-            if (handle == nullptr) {
-                ALOGE("%s: VideoNativeHandleMetadata type mismatch or null handle passed!",
-                        __FUNCTION__);
-                return;
-            }
-            {
-                Mutex::Autolock l(client->mAvailableCallbackBuffersLock);
-                client->mAvailableCallbackBuffers.push_back(msg.dataPtr);
-            }
-            timestamps.push_back(msg.timestamp);
-            handles.push_back(handle);
-        }
-        c->recordingFrameHandleCallbackTimestampBatch(timestamps, handles);
-    }
-}
-
-// snapshot taken callback
-void CameraClient::handleShutter(void) {
-    if (mPlayShutterSound) {
-        sCameraService->playSound(CameraService::SOUND_SHUTTER);
-    }
-
-    sp<hardware::ICameraClient> c = mRemoteCallback;
-    if (c != 0) {
-        mLock.unlock();
-        c->notifyCallback(CAMERA_MSG_SHUTTER, 0, 0);
-        if (!lockIfMessageWanted(CAMERA_MSG_SHUTTER)) return;
-    }
-    disableMsgType(CAMERA_MSG_SHUTTER);
-
-    // Shutters only happen in response to takePicture, so mark device as
-    // idle now, until preview is restarted
-    sCameraService->updateProxyDeviceState(
-        hardware::ICameraServiceProxy::CAMERA_STATE_IDLE,
-        mCameraIdStr, mCameraFacing, mClientPackageName,
-        hardware::ICameraServiceProxy::CAMERA_API_LEVEL_1);
-
-    mLock.unlock();
-}
-
-// preview callback - frame buffer update
-void CameraClient::handlePreviewData(int32_t msgType,
-                                              const sp<IMemory>& mem,
-                                              camera_frame_metadata_t *metadata) {
-    ssize_t offset;
-    size_t size;
-    sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
-
-    // local copy of the callback flags
-    int flags = mPreviewCallbackFlag;
-
-    // is callback enabled?
-    if (!(flags & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK)) {
-        // If the enable bit is off, the copy-out and one-shot bits are ignored
-        LOG2("frame callback is disabled");
-        mLock.unlock();
-        return;
-    }
-
-    // hold a strong pointer to the client
-    sp<hardware::ICameraClient> c = mRemoteCallback;
-
-    // clear callback flags if no client or one-shot mode
-    if (c == 0 || (mPreviewCallbackFlag & CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK)) {
-        LOG2("Disable preview callback");
-        mPreviewCallbackFlag &= ~(CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK |
-                                  CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK |
-                                  CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK);
-        disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
-    }
-
-    if (c != 0) {
-        // Is the received frame copied out or not?
-        if (flags & CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK) {
-            LOG2("frame is copied");
-            copyFrameAndPostCopiedFrame(msgType, c, heap, offset, size, metadata);
-        } else {
-            LOG2("frame is forwarded");
-            mLock.unlock();
-            c->dataCallback(msgType, mem, metadata);
-        }
-    } else {
-        mLock.unlock();
-    }
-}
-
-// picture callback - postview image ready
-void CameraClient::handlePostview(const sp<IMemory>& mem) {
-    disableMsgType(CAMERA_MSG_POSTVIEW_FRAME);
-
-    sp<hardware::ICameraClient> c = mRemoteCallback;
-    mLock.unlock();
-    if (c != 0) {
-        c->dataCallback(CAMERA_MSG_POSTVIEW_FRAME, mem, NULL);
-    }
-}
-
-// picture callback - raw image ready
-void CameraClient::handleRawPicture(const sp<IMemory>& mem) {
-    disableMsgType(CAMERA_MSG_RAW_IMAGE);
-
-    ssize_t offset;
-    size_t size;
-    sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
-
-    sp<hardware::ICameraClient> c = mRemoteCallback;
-    mLock.unlock();
-    if (c != 0) {
-        c->dataCallback(CAMERA_MSG_RAW_IMAGE, mem, NULL);
-    }
-}
-
-// picture callback - compressed picture ready
-void CameraClient::handleCompressedPicture(const sp<IMemory>& mem) {
-    disableMsgType(CAMERA_MSG_COMPRESSED_IMAGE);
-
-    sp<hardware::ICameraClient> c = mRemoteCallback;
-    mLock.unlock();
-    if (c != 0) {
-        c->dataCallback(CAMERA_MSG_COMPRESSED_IMAGE, mem, NULL);
-    }
-}
-
-
-void CameraClient::handleGenericNotify(int32_t msgType,
-    int32_t ext1, int32_t ext2) {
-    sp<hardware::ICameraClient> c = mRemoteCallback;
-    mLock.unlock();
-    if (c != 0) {
-        c->notifyCallback(msgType, ext1, ext2);
-    }
-}
-
-void CameraClient::handleGenericData(int32_t msgType,
-    const sp<IMemory>& dataPtr, camera_frame_metadata_t *metadata) {
-    sp<hardware::ICameraClient> c = mRemoteCallback;
-    mLock.unlock();
-    if (c != 0) {
-        c->dataCallback(msgType, dataPtr, metadata);
-    }
-}
-
-void CameraClient::handleGenericDataTimestamp(nsecs_t timestamp,
-    int32_t msgType, const sp<IMemory>& dataPtr) {
-    sp<hardware::ICameraClient> c = mRemoteCallback;
-    mLock.unlock();
-    if (c != 0 && dataPtr != nullptr) {
-        native_handle_t* handle = nullptr;
-
-        // Check if dataPtr contains a VideoNativeHandleMetadata.
-        if (dataPtr->size() == sizeof(VideoNativeHandleMetadata)) {
-            // TODO: Using unsecurePointer() has some associated security pitfalls
-            //       (see declaration for details).
-            //       Either document why it is safe in this case or address the
-            //       issue (e.g. by copying).
-            VideoNativeHandleMetadata *metadata =
-                (VideoNativeHandleMetadata*)(dataPtr->unsecurePointer());
-            if (metadata->eType == kMetadataBufferTypeNativeHandleSource) {
-                handle = metadata->pHandle;
-            }
-        }
-
-        // If dataPtr contains a native handle, send it via recordingFrameHandleCallbackTimestamp.
-        if (handle != nullptr) {
-            {
-                Mutex::Autolock l(mAvailableCallbackBuffersLock);
-                mAvailableCallbackBuffers.push_back(dataPtr);
-            }
-            c->recordingFrameHandleCallbackTimestamp(timestamp, handle);
-        } else {
-            c->dataCallbackTimestamp(timestamp, msgType, dataPtr);
-        }
-    }
-}
-
-void CameraClient::copyFrameAndPostCopiedFrame(
-        int32_t msgType, const sp<hardware::ICameraClient>& client,
-        const sp<IMemoryHeap>& heap, size_t offset, size_t size,
-        camera_frame_metadata_t *metadata) {
-    LOG2("copyFrameAndPostCopiedFrame");
-    // It is necessary to copy out of pmem before sending this to
-    // the callback. For efficiency, reuse the same MemoryHeapBase
-    // provided it's big enough. Don't allocate the memory or
-    // perform the copy if there's no callback.
-    // hold the preview lock while we grab a reference to the preview buffer
-    sp<MemoryHeapBase> previewBuffer;
-
-    if (mPreviewBuffer == 0) {
-        mPreviewBuffer = new MemoryHeapBase(size, 0, NULL);
-    } else if (size > mPreviewBuffer->virtualSize()) {
-        mPreviewBuffer.clear();
-        mPreviewBuffer = new MemoryHeapBase(size, 0, NULL);
-    }
-    if (mPreviewBuffer == 0) {
-        ALOGE("failed to allocate space for preview buffer");
-        mLock.unlock();
-        return;
-    }
-    previewBuffer = mPreviewBuffer;
-
-    void* previewBufferBase = previewBuffer->base();
-    void* heapBase = heap->base();
-
-    if (heapBase == MAP_FAILED) {
-        ALOGE("%s: Failed to mmap heap for preview frame.", __FUNCTION__);
-        mLock.unlock();
-        return;
-    } else if (previewBufferBase == MAP_FAILED) {
-        ALOGE("%s: Failed to mmap preview buffer for preview frame.", __FUNCTION__);
-        mLock.unlock();
-        return;
-    }
-
-    memcpy(previewBufferBase, (uint8_t *) heapBase + offset, size);
-
-    sp<MemoryBase> frame = new MemoryBase(previewBuffer, 0, size);
-    if (frame == 0) {
-        ALOGE("failed to allocate space for frame callback");
-        mLock.unlock();
-        return;
-    }
-
-    mLock.unlock();
-    client->dataCallback(msgType, frame, metadata);
-}
-
-int CameraClient::getOrientation(int degrees, bool mirror) {
-    if (!mirror) {
-        if (degrees == 0) return 0;
-        else if (degrees == 90) return HAL_TRANSFORM_ROT_90;
-        else if (degrees == 180) return HAL_TRANSFORM_ROT_180;
-        else if (degrees == 270) return HAL_TRANSFORM_ROT_270;
-    } else {  // Do mirror (horizontal flip)
-        if (degrees == 0) {           // FLIP_H and ROT_0
-            return HAL_TRANSFORM_FLIP_H;
-        } else if (degrees == 90) {   // FLIP_H and ROT_90
-            return HAL_TRANSFORM_FLIP_H | HAL_TRANSFORM_ROT_90;
-        } else if (degrees == 180) {  // FLIP_H and ROT_180
-            return HAL_TRANSFORM_FLIP_V;
-        } else if (degrees == 270) {  // FLIP_H and ROT_270
-            return HAL_TRANSFORM_FLIP_V | HAL_TRANSFORM_ROT_90;
-        }
-    }
-    ALOGE("Invalid setDisplayOrientation degrees=%d", degrees);
-    return -1;
-}
-
-status_t CameraClient::setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer) {
-    (void)bufferProducer;
-    ALOGE("%s: %d: CameraClient doesn't support setting a video target.", __FUNCTION__, __LINE__);
-    return INVALID_OPERATION;
-}
-
-status_t CameraClient::setAudioRestriction(int mode) {
-    if (!isValidAudioRestriction(mode)) {
-        ALOGE("%s: invalid audio restriction mode %d", __FUNCTION__, mode);
-        return BAD_VALUE;
-    }
-
-    Mutex::Autolock lock(mLock);
-    if (checkPidAndHardware() != NO_ERROR) {
-        return INVALID_OPERATION;
-    }
-    return BasicClient::setAudioRestriction(mode);
-}
-
-int32_t CameraClient::getGlobalAudioRestriction() {
-    Mutex::Autolock lock(mLock);
-    if (checkPidAndHardware() != NO_ERROR) {
-        return INVALID_OPERATION;
-    }
-    return BasicClient::getServiceAudioRestriction();
-}
-
-// API1->Device1 does not support this feature
-status_t CameraClient::setRotateAndCropOverride(uint8_t /*rotateAndCrop*/) {
-    return OK;
-}
-
-}; // namespace android
diff --git a/services/camera/libcameraservice/api1/CameraClient.h b/services/camera/libcameraservice/api1/CameraClient.h
deleted file mode 100644
index aacb00e..0000000
--- a/services/camera/libcameraservice/api1/CameraClient.h
+++ /dev/null
@@ -1,190 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_SERVERS_CAMERA_CAMERACLIENT_H
-#define ANDROID_SERVERS_CAMERA_CAMERACLIENT_H
-
-#include "CameraService.h"
-
-namespace android {
-
-class MemoryHeapBase;
-class CameraHardwareInterface;
-
-/**
- * Interface between android.hardware.Camera API and Camera HAL device for version
- * CAMERA_DEVICE_API_VERSION_1_0.
- */
-
-class CameraClient : public CameraService::Client
-{
-public:
-    // ICamera interface (see ICamera for details)
-    virtual binder::Status  disconnect();
-    virtual status_t        connect(const sp<hardware::ICameraClient>& client);
-    virtual status_t        lock();
-    virtual status_t        unlock();
-    virtual status_t        setPreviewTarget(const sp<IGraphicBufferProducer>& bufferProducer);
-    virtual void            setPreviewCallbackFlag(int flag);
-    virtual status_t        setPreviewCallbackTarget(
-            const sp<IGraphicBufferProducer>& callbackProducer);
-    virtual status_t        startPreview();
-    virtual void            stopPreview();
-    virtual bool            previewEnabled();
-    virtual status_t        setVideoBufferMode(int32_t videoBufferMode);
-    virtual status_t        startRecording();
-    virtual void            stopRecording();
-    virtual bool            recordingEnabled();
-    virtual void            releaseRecordingFrame(const sp<IMemory>& mem);
-    virtual void            releaseRecordingFrameHandle(native_handle_t *handle);
-    virtual void            releaseRecordingFrameHandleBatch(
-                                    const std::vector<native_handle_t*>& handles);
-    virtual status_t        autoFocus();
-    virtual status_t        cancelAutoFocus();
-    virtual status_t        takePicture(int msgType);
-    virtual status_t        setParameters(const String8& params);
-    virtual String8         getParameters() const;
-    virtual status_t        sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
-    virtual status_t        setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer);
-    virtual status_t        setAudioRestriction(int mode);
-    virtual int32_t         getGlobalAudioRestriction();
-
-    virtual status_t        setRotateAndCropOverride(uint8_t override);
-
-    // Interface used by CameraService
-    CameraClient(const sp<CameraService>& cameraService,
-            const sp<hardware::ICameraClient>& cameraClient,
-            const String16& clientPackageName,
-            const std::optional<String16>& clientFeatureId,
-            int cameraId,
-            int cameraFacing,
-            int clientPid,
-            int clientUid,
-            int servicePid);
-    ~CameraClient();
-
-    virtual status_t initialize(sp<CameraProviderManager> manager,
-            const String8& monitorTags) override;
-
-    virtual status_t dump(int fd, const Vector<String16>& args);
-
-    virtual status_t dumpClient(int fd, const Vector<String16>& args);
-
-private:
-
-    // check whether the calling process matches mClientPid.
-    status_t                checkPid() const;
-    status_t                checkPidAndHardware() const;  // also check mHardware != 0
-
-    // these are internal functions used to set up preview buffers
-    status_t                registerPreviewBuffers();
-
-    // camera operation mode
-    enum camera_mode {
-        CAMERA_PREVIEW_MODE   = 0,  // frame automatically released
-        CAMERA_RECORDING_MODE = 1,  // frame has to be explicitly released by releaseRecordingFrame()
-    };
-    // these are internal functions used for preview/recording
-    status_t                startCameraMode(camera_mode mode);
-    status_t                startPreviewMode();
-    status_t                startRecordingMode();
-
-    // internal function used by sendCommand to enable/disable shutter sound.
-    status_t                enableShutterSound(bool enable);
-
-    static sp<CameraClient>        getClientFromCookie(void* user);
-
-    // these are static callback functions
-    static void             notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2, void* user);
-    static void             dataCallback(int32_t msgType, const sp<IMemory>& dataPtr,
-            camera_frame_metadata_t *metadata, void* user);
-    static void             dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr, void* user);
-    static void             handleCallbackTimestampBatch(
-                                    int32_t msgType, const std::vector<HandleTimestampMessage>&, void* user);
-    // handlers for messages
-    void                    handleShutter(void);
-    void                    handlePreviewData(int32_t msgType, const sp<IMemory>& mem,
-            camera_frame_metadata_t *metadata);
-    void                    handlePostview(const sp<IMemory>& mem);
-    void                    handleRawPicture(const sp<IMemory>& mem);
-    void                    handleCompressedPicture(const sp<IMemory>& mem);
-    void                    handleGenericNotify(int32_t msgType, int32_t ext1, int32_t ext2);
-    void                    handleGenericData(int32_t msgType, const sp<IMemory>& dataPtr,
-            camera_frame_metadata_t *metadata);
-    void                    handleGenericDataTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
-
-    void                    copyFrameAndPostCopiedFrame(
-        int32_t msgType,
-        const sp<hardware::ICameraClient>& client,
-        const sp<IMemoryHeap>& heap,
-        size_t offset, size_t size,
-        camera_frame_metadata_t *metadata);
-
-    int                     getOrientation(int orientation, bool mirror);
-
-    status_t                setPreviewWindow(
-        const sp<IBinder>& binder,
-        const sp<ANativeWindow>& window);
-
-
-    // these are initialized in the constructor.
-    sp<CameraHardwareInterface>     mHardware;       // cleared after disconnect()
-    int                             mPreviewCallbackFlag;
-    int                             mOrientation;     // Current display orientation
-    bool                            mPlayShutterSound;
-    bool                            mLegacyMode; // camera2 api legacy mode?
-
-    // Ensures atomicity among the public methods
-    mutable Mutex                   mLock;
-    // This is a binder of Surface or Surface.
-    sp<IBinder>                     mSurface;
-    sp<ANativeWindow>               mPreviewWindow;
-
-    // If the user want us to return a copy of the preview frame (instead
-    // of the original one), we allocate mPreviewBuffer and reuse it if possible.
-    sp<MemoryHeapBase>              mPreviewBuffer;
-
-    // Debugging information
-    CameraParameters                mLatestSetParameters;
-
-    // mAvailableCallbackBuffers stores sp<IMemory> that HAL uses to send VideoNativeHandleMetadata.
-    // It will be used to send VideoNativeHandleMetadata back to HAL when camera receives the
-    // native handle from releaseRecordingFrameHandle.
-    Mutex                           mAvailableCallbackBuffersLock;
-    std::vector<sp<IMemory>>        mAvailableCallbackBuffers;
-
-    // We need to avoid the deadlock when the incoming command thread and
-    // the CameraHardwareInterface callback thread both want to grab mLock.
-    // An extra flag is used to tell the callback thread that it should stop
-    // trying to deliver the callback messages if the client is not
-    // interested in it anymore. For example, if the client is calling
-    // stopPreview(), the preview frame messages do not need to be delivered
-    // anymore.
-
-    // This function takes the same parameter as the enableMsgType() and
-    // disableMsgType() functions in CameraHardwareInterface.
-    void                    enableMsgType(int32_t msgType);
-    void                    disableMsgType(int32_t msgType);
-    volatile int32_t        mMsgEnabled;
-
-    // This function keeps trying to grab mLock, or give up if the message
-    // is found to be disabled. It returns true if mLock is grabbed.
-    bool                    lockIfMessageWanted(int32_t msgType);
-};
-
-}
-
-#endif
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index f115fd8..d11d470 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -417,46 +417,6 @@
     return mapToStatusT(status);
 }
 
-status_t CameraProviderManager::openSession(const std::string &id,
-        const sp<device::V1_0::ICameraDeviceCallback>& callback,
-        /*out*/
-        sp<device::V1_0::ICameraDevice> *session) {
-
-    std::lock_guard<std::mutex> lock(mInterfaceMutex);
-
-    auto deviceInfo = findDeviceInfoLocked(id,
-            /*minVersion*/ {1,0}, /*maxVersion*/ {2,0});
-    if (deviceInfo == nullptr) return NAME_NOT_FOUND;
-
-    auto *deviceInfo1 = static_cast<ProviderInfo::DeviceInfo1*>(deviceInfo);
-    sp<ProviderInfo> parentProvider = deviceInfo->mParentProvider.promote();
-    if (parentProvider == nullptr) {
-        return DEAD_OBJECT;
-    }
-    const sp<provider::V2_4::ICameraProvider> provider = parentProvider->startProviderInterface();
-    if (provider == nullptr) {
-        return DEAD_OBJECT;
-    }
-    saveRef(DeviceMode::CAMERA, id, provider);
-
-    auto interface = deviceInfo1->startDeviceInterface<
-            CameraProviderManager::ProviderInfo::DeviceInfo1::InterfaceT>();
-    if (interface == nullptr) {
-        return DEAD_OBJECT;
-    }
-    hardware::Return<Status> status = interface->open(callback);
-    if (!status.isOk()) {
-        removeRef(DeviceMode::CAMERA, id);
-        ALOGE("%s: Transaction error opening a session for camera device %s: %s",
-                __FUNCTION__, id.c_str(), status.description().c_str());
-        return DEAD_OBJECT;
-    }
-    if (status == Status::OK) {
-        *session = interface;
-    }
-    return mapToStatusT(status);
-}
-
 void CameraProviderManager::saveRef(DeviceMode usageType, const std::string &cameraId,
         sp<provider::V2_4::ICameraProvider> provider) {
     if (!kEnableLazyHal) {
@@ -1534,9 +1494,9 @@
     std::unique_ptr<DeviceInfo> deviceInfo;
     switch (major) {
         case 1:
-            deviceInfo = initializeDeviceInfo<DeviceInfo1>(name, mProviderTagid,
-                    id, minor);
-            break;
+            ALOGE("%s: Device %s: Unsupported HIDL device HAL major version %d:", __FUNCTION__,
+                    name.c_str(), major);
+            return BAD_VALUE;
         case 3:
             deviceInfo = initializeDeviceInfo<DeviceInfo3>(name, mProviderTagid,
                     id, minor);
@@ -2030,35 +1990,6 @@
 }
 
 template<>
-sp<device::V1_0::ICameraDevice>
-CameraProviderManager::ProviderInfo::startDeviceInterface
-        <device::V1_0::ICameraDevice>(const std::string &name) {
-    Status status;
-    sp<device::V1_0::ICameraDevice> cameraInterface;
-    hardware::Return<void> ret;
-    const sp<provider::V2_4::ICameraProvider> interface = startProviderInterface();
-    if (interface == nullptr) {
-        return nullptr;
-    }
-    ret = interface->getCameraDeviceInterface_V1_x(name, [&status, &cameraInterface](
-        Status s, sp<device::V1_0::ICameraDevice> interface) {
-                status = s;
-                cameraInterface = interface;
-            });
-    if (!ret.isOk()) {
-        ALOGE("%s: Transaction error trying to obtain interface for camera device %s: %s",
-                __FUNCTION__, name.c_str(), ret.description().c_str());
-        return nullptr;
-    }
-    if (status != Status::OK) {
-        ALOGE("%s: Unable to obtain interface for camera device %s: %s", __FUNCTION__,
-                name.c_str(), statusToString(status));
-        return nullptr;
-    }
-    return cameraInterface;
-}
-
-template<>
 sp<device::V3_2::ICameraDevice>
 CameraProviderManager::ProviderInfo::startDeviceInterface
         <device::V3_2::ICameraDevice>(const std::string &name) {
@@ -2111,126 +2042,6 @@
     return mapToStatusT(s);
 }
 
-CameraProviderManager::ProviderInfo::DeviceInfo1::DeviceInfo1(const std::string& name,
-        const metadata_vendor_id_t tagId, const std::string &id,
-        uint16_t minorVersion,
-        const CameraResourceCost& resourceCost,
-        sp<ProviderInfo> parentProvider,
-        const std::vector<std::string>& publicCameraIds,
-        sp<InterfaceT> interface) :
-        DeviceInfo(name, tagId, id, hardware::hidl_version{1, minorVersion},
-                   publicCameraIds, resourceCost, parentProvider) {
-    // Get default parameters and initialize flash unit availability
-    // Requires powering on the camera device
-    hardware::Return<Status> status = interface->open(nullptr);
-    if (!status.isOk()) {
-        ALOGE("%s: Transaction error opening camera device %s to check for a flash unit: %s",
-                __FUNCTION__, id.c_str(), status.description().c_str());
-        return;
-    }
-    if (status != Status::OK) {
-        ALOGE("%s: Unable to open camera device %s to check for a flash unit: %s", __FUNCTION__,
-                id.c_str(), CameraProviderManager::statusToString(status));
-        return;
-    }
-    hardware::Return<void> ret;
-    ret = interface->getParameters([this](const hardware::hidl_string& parms) {
-                mDefaultParameters.unflatten(String8(parms.c_str()));
-            });
-    if (!ret.isOk()) {
-        ALOGE("%s: Transaction error reading camera device %s params to check for a flash unit: %s",
-                __FUNCTION__, id.c_str(), status.description().c_str());
-        return;
-    }
-    const char *flashMode =
-            mDefaultParameters.get(CameraParameters::KEY_SUPPORTED_FLASH_MODES);
-    if (flashMode && strstr(flashMode, CameraParameters::FLASH_MODE_TORCH)) {
-        mHasFlashUnit = true;
-    }
-
-    status_t res = cacheCameraInfo(interface);
-    if (res != OK) {
-        ALOGE("%s: Could not cache CameraInfo", __FUNCTION__);
-        return;
-    }
-
-    ret = interface->close();
-    if (!ret.isOk()) {
-        ALOGE("%s: Transaction error closing camera device %s after check for a flash unit: %s",
-                __FUNCTION__, id.c_str(), status.description().c_str());
-    }
-
-    if (!kEnableLazyHal) {
-        // Save HAL reference indefinitely
-        mSavedInterface = interface;
-    }
-}
-
-CameraProviderManager::ProviderInfo::DeviceInfo1::~DeviceInfo1() {}
-
-status_t CameraProviderManager::ProviderInfo::DeviceInfo1::setTorchMode(bool enabled) {
-    return setTorchModeForDevice<InterfaceT>(enabled);
-}
-
-status_t CameraProviderManager::ProviderInfo::DeviceInfo1::getCameraInfo(
-        hardware::CameraInfo *info) const {
-    if (info == nullptr) return BAD_VALUE;
-    *info = mInfo;
-    return OK;
-}
-
-status_t CameraProviderManager::ProviderInfo::DeviceInfo1::cacheCameraInfo(
-        sp<CameraProviderManager::ProviderInfo::DeviceInfo1::InterfaceT> interface) {
-    Status status;
-    device::V1_0::CameraInfo cInfo;
-    hardware::Return<void> ret;
-    ret = interface->getCameraInfo([&status, &cInfo](Status s, device::V1_0::CameraInfo camInfo) {
-                status = s;
-                cInfo = camInfo;
-            });
-    if (!ret.isOk()) {
-        ALOGE("%s: Transaction error reading camera info from device %s: %s",
-                __FUNCTION__, mId.c_str(), ret.description().c_str());
-        return DEAD_OBJECT;
-    }
-    if (status != Status::OK) {
-        return mapToStatusT(status);
-    }
-
-    switch(cInfo.facing) {
-        case device::V1_0::CameraFacing::BACK:
-            mInfo.facing = hardware::CAMERA_FACING_BACK;
-            break;
-        case device::V1_0::CameraFacing::EXTERNAL:
-            // Map external to front for legacy API
-        case device::V1_0::CameraFacing::FRONT:
-            mInfo.facing = hardware::CAMERA_FACING_FRONT;
-            break;
-        default:
-            ALOGW("%s: Device %s: Unknown camera facing: %d",
-                    __FUNCTION__, mId.c_str(), cInfo.facing);
-            mInfo.facing = hardware::CAMERA_FACING_BACK;
-    }
-    mInfo.orientation = cInfo.orientation;
-
-    return OK;
-}
-
-status_t CameraProviderManager::ProviderInfo::DeviceInfo1::dumpState(int fd) {
-    native_handle_t* handle = native_handle_create(1,0);
-    handle->data[0] = fd;
-    const sp<InterfaceT> interface = startDeviceInterface<InterfaceT>();
-    if (interface == nullptr) {
-        return DEAD_OBJECT;
-    }
-    hardware::Return<Status> s = interface->dumpState(handle);
-    native_handle_delete(handle);
-    if (!s.isOk()) {
-        return INVALID_OPERATION;
-    }
-    return mapToStatusT(s);
-}
-
 CameraProviderManager::ProviderInfo::DeviceInfo3::DeviceInfo3(const std::string& name,
         const metadata_vendor_id_t tagId, const std::string &id,
         uint16_t minorVersion,
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index 281cb3d..12da5e6 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -269,11 +269,6 @@
             /*out*/
             sp<hardware::camera::device::V3_2::ICameraDeviceSession> *session);
 
-    status_t openSession(const std::string &id,
-            const sp<hardware::camera::device::V1_0::ICameraDeviceCallback>& callback,
-            /*out*/
-            sp<hardware::camera::device::V1_0::ICameraDevice> *session);
-
     /**
      * Save the ICameraProvider while it is being used by a camera or torch client
      */
@@ -511,27 +506,6 @@
         // physical camera IDs.
         std::vector<std::string> mProviderPublicCameraIds;
 
-        // HALv1-specific camera fields, including the actual device interface
-        struct DeviceInfo1 : public DeviceInfo {
-            typedef hardware::camera::device::V1_0::ICameraDevice InterfaceT;
-
-            virtual status_t setTorchMode(bool enabled) override;
-            virtual status_t getCameraInfo(hardware::CameraInfo *info) const override;
-            //In case of Device1Info assume that we are always API1 compatible
-            virtual bool isAPI1Compatible() const override { return true; }
-            virtual status_t dumpState(int fd) override;
-            DeviceInfo1(const std::string& name, const metadata_vendor_id_t tagId,
-                    const std::string &id, uint16_t minorVersion,
-                    const hardware::camera::common::V1_0::CameraResourceCost& resourceCost,
-                    sp<ProviderInfo> parentProvider,
-                    const std::vector<std::string>& publicCameraIds,
-                    sp<InterfaceT> interface);
-            virtual ~DeviceInfo1();
-        private:
-            CameraParameters2 mDefaultParameters;
-            status_t cacheCameraInfo(sp<InterfaceT> interface);
-        };
-
         // HALv3-specific camera fields, including the actual device interface
         struct DeviceInfo3 : public DeviceInfo {
             typedef hardware::camera::device::V3_2::ICameraDevice InterfaceT;
diff --git a/services/camera/libcameraservice/device1/CameraHardwareInterface.cpp b/services/camera/libcameraservice/device1/CameraHardwareInterface.cpp
deleted file mode 100644
index 62ef681..0000000
--- a/services/camera/libcameraservice/device1/CameraHardwareInterface.cpp
+++ /dev/null
@@ -1,818 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#define LOG_TAG "CameraHardwareInterface"
-//#define LOG_NDEBUG 0
-
-#include <inttypes.h>
-#include <media/hardware/HardwareAPI.h> // For VideoNativeHandleMetadata
-#include "CameraHardwareInterface.h"
-
-namespace android {
-
-using namespace hardware::camera::device::V1_0;
-using namespace hardware::camera::common::V1_0;
-using hardware::hidl_handle;
-
-CameraHardwareInterface::~CameraHardwareInterface()
-{
-    ALOGI("Destroying camera %s", mName.string());
-    if (mHidlDevice != nullptr) {
-        mHidlDevice->close();
-        mHidlDevice.clear();
-        cleanupCirculatingBuffers();
-    }
-}
-
-status_t CameraHardwareInterface::initialize(sp<CameraProviderManager> manager) {
-    ALOGI("Opening camera %s", mName.string());
-
-    status_t ret = manager->openSession(mName.string(), this, &mHidlDevice);
-    if (ret != OK) {
-        ALOGE("%s: openSession failed! %s (%d)", __FUNCTION__, strerror(-ret), ret);
-    }
-    return ret;
-}
-
-status_t CameraHardwareInterface::setPreviewScalingMode(int scalingMode)
-{
-    int rc = OK;
-    mPreviewScalingMode = scalingMode;
-    if (mPreviewWindow != nullptr) {
-        rc = native_window_set_scaling_mode(mPreviewWindow.get(),
-                scalingMode);
-    }
-    return rc;
-}
-
-status_t CameraHardwareInterface::setPreviewTransform(int transform) {
-    int rc = OK;
-    mPreviewTransform = transform;
-    if (mPreviewWindow != nullptr) {
-        rc = native_window_set_buffers_transform(mPreviewWindow.get(),
-                mPreviewTransform);
-    }
-    return rc;
-}
-
-/**
- * Implementation of android::hardware::camera::device::V1_0::ICameraDeviceCallback
- */
-hardware::Return<void> CameraHardwareInterface::notifyCallback(
-        NotifyCallbackMsg msgType, int32_t ext1, int32_t ext2) {
-    sNotifyCb((int32_t) msgType, ext1, ext2, (void*) this);
-    return hardware::Void();
-}
-
-hardware::Return<uint32_t> CameraHardwareInterface::registerMemory(
-        const hardware::hidl_handle& descriptor,
-        uint32_t bufferSize, uint32_t bufferCount) {
-    if (descriptor->numFds != 1) {
-        ALOGE("%s: camera memory descriptor has numFds %d (expect 1)",
-                __FUNCTION__, descriptor->numFds);
-        return 0;
-    }
-    if (descriptor->data[0] < 0) {
-        ALOGE("%s: camera memory descriptor has FD %d (expect >= 0)",
-                __FUNCTION__, descriptor->data[0]);
-        return 0;
-    }
-
-    camera_memory_t* mem = sGetMemory(descriptor->data[0], bufferSize, bufferCount, this);
-    sp<CameraHeapMemory> camMem(static_cast<CameraHeapMemory *>(mem->handle));
-    int memPoolId = camMem->mHeap->getHeapID();
-    if (memPoolId < 0) {
-        ALOGE("%s: CameraHeapMemory has FD %d (expect >= 0)", __FUNCTION__, memPoolId);
-        return 0;
-    }
-    std::lock_guard<std::mutex> lock(mHidlMemPoolMapLock);
-    mHidlMemPoolMap.insert(std::make_pair(memPoolId, mem));
-    return memPoolId;
-}
-
-hardware::Return<void> CameraHardwareInterface::unregisterMemory(uint32_t memId) {
-    camera_memory_t* mem = nullptr;
-    {
-        std::lock_guard<std::mutex> lock(mHidlMemPoolMapLock);
-        if (mHidlMemPoolMap.count(memId) == 0) {
-            ALOGE("%s: memory pool ID %d not found", __FUNCTION__, memId);
-            return hardware::Void();
-        }
-        mem = mHidlMemPoolMap.at(memId);
-        mHidlMemPoolMap.erase(memId);
-    }
-    sPutMemory(mem);
-    return hardware::Void();
-}
-
-hardware::Return<void> CameraHardwareInterface::dataCallback(
-        DataCallbackMsg msgType, uint32_t data, uint32_t bufferIndex,
-        const hardware::camera::device::V1_0::CameraFrameMetadata& metadata) {
-    camera_memory_t* mem = nullptr;
-    {
-        std::lock_guard<std::mutex> lock(mHidlMemPoolMapLock);
-        if (mHidlMemPoolMap.count(data) == 0) {
-            ALOGE("%s: memory pool ID %d not found", __FUNCTION__, data);
-            return hardware::Void();
-        }
-        mem = mHidlMemPoolMap.at(data);
-    }
-    camera_frame_metadata_t md;
-    md.number_of_faces = metadata.faces.size();
-    md.faces = (camera_face_t*) metadata.faces.data();
-    sDataCb((int32_t) msgType, mem, bufferIndex, &md, this);
-    return hardware::Void();
-}
-
-hardware::Return<void> CameraHardwareInterface::dataCallbackTimestamp(
-        DataCallbackMsg msgType, uint32_t data,
-        uint32_t bufferIndex, int64_t timestamp) {
-    camera_memory_t* mem = nullptr;
-    {
-        std::lock_guard<std::mutex> lock(mHidlMemPoolMapLock);
-        if (mHidlMemPoolMap.count(data) == 0) {
-            ALOGE("%s: memory pool ID %d not found", __FUNCTION__, data);
-            return hardware::Void();
-        }
-        mem = mHidlMemPoolMap.at(data);
-    }
-    sDataCbTimestamp(timestamp, (int32_t) msgType, mem, bufferIndex, this);
-    return hardware::Void();
-}
-
-hardware::Return<void> CameraHardwareInterface::handleCallbackTimestamp(
-        DataCallbackMsg msgType, const hidl_handle& frameData, uint32_t data,
-        uint32_t bufferIndex, int64_t timestamp) {
-    camera_memory_t* mem = nullptr;
-    {
-        std::lock_guard<std::mutex> lock(mHidlMemPoolMapLock);
-        if (mHidlMemPoolMap.count(data) == 0) {
-            ALOGE("%s: memory pool ID %d not found", __FUNCTION__, data);
-            return hardware::Void();
-        }
-        mem = mHidlMemPoolMap.at(data);
-    }
-    sp<CameraHeapMemory> heapMem(static_cast<CameraHeapMemory *>(mem->handle));
-    // TODO: Using unsecurePointer() has some associated security pitfalls
-    //       (see declaration for details).
-    //       Either document why it is safe in this case or address the
-    //       issue (e.g. by copying).
-    VideoNativeHandleMetadata* md = (VideoNativeHandleMetadata*)
-            heapMem->mBuffers[bufferIndex]->unsecurePointer();
-    md->pHandle = const_cast<native_handle_t*>(frameData.getNativeHandle());
-    sDataCbTimestamp(timestamp, (int32_t) msgType, mem, bufferIndex, this);
-    return hardware::Void();
-}
-
-hardware::Return<void> CameraHardwareInterface::handleCallbackTimestampBatch(
-        DataCallbackMsg msgType,
-        const hardware::hidl_vec<hardware::camera::device::V1_0::HandleTimestampMessage>& messages) {
-    std::vector<android::HandleTimestampMessage> msgs;
-    msgs.reserve(messages.size());
-    {
-        std::lock_guard<std::mutex> lock(mHidlMemPoolMapLock);
-        for (const auto& hidl_msg : messages) {
-            if (mHidlMemPoolMap.count(hidl_msg.data) == 0) {
-                ALOGE("%s: memory pool ID %d not found", __FUNCTION__, hidl_msg.data);
-                return hardware::Void();
-            }
-            sp<CameraHeapMemory> mem(
-                    static_cast<CameraHeapMemory *>(mHidlMemPoolMap.at(hidl_msg.data)->handle));
-
-            if (hidl_msg.bufferIndex >= mem->mNumBufs) {
-                ALOGE("%s: invalid buffer index %d, max allowed is %d", __FUNCTION__,
-                     hidl_msg.bufferIndex, mem->mNumBufs);
-                return hardware::Void();
-            }
-            // TODO: Using unsecurePointer() has some associated security pitfalls
-            //       (see declaration for details).
-            //       Either document why it is safe in this case or address the
-            //       issue (e.g. by copying).
-            VideoNativeHandleMetadata* md = (VideoNativeHandleMetadata*)
-                    mem->mBuffers[hidl_msg.bufferIndex]->unsecurePointer();
-            md->pHandle = const_cast<native_handle_t*>(hidl_msg.frameData.getNativeHandle());
-
-            msgs.push_back({hidl_msg.timestamp, mem->mBuffers[hidl_msg.bufferIndex]});
-        }
-    }
-    mDataCbTimestampBatch((int32_t) msgType, msgs, mCbUser);
-    return hardware::Void();
-}
-
-std::pair<bool, uint64_t> CameraHardwareInterface::getBufferId(
-        ANativeWindowBuffer* anb) {
-    std::lock_guard<std::mutex> lock(mBufferIdMapLock);
-
-    buffer_handle_t& buf = anb->handle;
-    auto it = mBufferIdMap.find(buf);
-    if (it == mBufferIdMap.end()) {
-        uint64_t bufId = mNextBufferId++;
-        mBufferIdMap[buf] = bufId;
-        mReversedBufMap[bufId] = anb;
-        return std::make_pair(true, bufId);
-    } else {
-        return std::make_pair(false, it->second);
-    }
-}
-
-void CameraHardwareInterface::cleanupCirculatingBuffers() {
-    std::lock_guard<std::mutex> lock(mBufferIdMapLock);
-    mBufferIdMap.clear();
-    mReversedBufMap.clear();
-}
-
-hardware::Return<void>
-CameraHardwareInterface::dequeueBuffer(dequeueBuffer_cb _hidl_cb) {
-    ANativeWindow *a = mPreviewWindow.get();
-    if (a == nullptr) {
-        ALOGE("%s: preview window is null", __FUNCTION__);
-        return hardware::Void();
-    }
-    ANativeWindowBuffer* anb;
-    int rc = native_window_dequeue_buffer_and_wait(a, &anb);
-    Status s = Status::INTERNAL_ERROR;
-    uint64_t bufferId = 0;
-    uint32_t stride = 0;
-    hidl_handle buf = nullptr;
-    if (rc == OK) {
-        s = Status::OK;
-        auto pair = getBufferId(anb);
-        buf = (pair.first) ? anb->handle : nullptr;
-        bufferId = pair.second;
-        stride = anb->stride;
-    }
-
-    _hidl_cb(s, bufferId, buf, stride);
-    return hardware::Void();
-}
-
-hardware::Return<Status>
-CameraHardwareInterface::enqueueBuffer(uint64_t bufferId) {
-    ANativeWindow *a = mPreviewWindow.get();
-    if (a == nullptr) {
-        ALOGE("%s: preview window is null", __FUNCTION__);
-        return Status::INTERNAL_ERROR;
-    }
-    if (mReversedBufMap.count(bufferId) == 0) {
-        ALOGE("%s: bufferId %" PRIu64 " not found", __FUNCTION__, bufferId);
-        return Status::ILLEGAL_ARGUMENT;
-    }
-    int rc = a->queueBuffer(a, mReversedBufMap.at(bufferId), -1);
-    if (rc == 0) {
-        return Status::OK;
-    }
-    return Status::INTERNAL_ERROR;
-}
-
-hardware::Return<Status>
-CameraHardwareInterface::cancelBuffer(uint64_t bufferId) {
-    ANativeWindow *a = mPreviewWindow.get();
-    if (a == nullptr) {
-        ALOGE("%s: preview window is null", __FUNCTION__);
-        return Status::INTERNAL_ERROR;
-    }
-    if (mReversedBufMap.count(bufferId) == 0) {
-        ALOGE("%s: bufferId %" PRIu64 " not found", __FUNCTION__, bufferId);
-        return Status::ILLEGAL_ARGUMENT;
-    }
-    int rc = a->cancelBuffer(a, mReversedBufMap.at(bufferId), -1);
-    if (rc == 0) {
-        return Status::OK;
-    }
-    return Status::INTERNAL_ERROR;
-}
-
-hardware::Return<Status>
-CameraHardwareInterface::setBufferCount(uint32_t count) {
-    ANativeWindow *a = mPreviewWindow.get();
-    if (a != nullptr) {
-        // Workaround for b/27039775
-        // Previously, setting the buffer count would reset the buffer
-        // queue's flag that allows for all buffers to be dequeued on the
-        // producer side, instead of just the producer's declared max count,
-        // if no filled buffers have yet been queued by the producer.  This
-        // reset no longer happens, but some HALs depend on this behavior,
-        // so it needs to be maintained for HAL backwards compatibility.
-        // Simulate the prior behavior by disconnecting/reconnecting to the
-        // window and setting the values again.  This has the drawback of
-        // actually causing memory reallocation, which may not have happened
-        // in the past.
-        native_window_api_disconnect(a, NATIVE_WINDOW_API_CAMERA);
-        native_window_api_connect(a, NATIVE_WINDOW_API_CAMERA);
-        if (mPreviewScalingMode != NOT_SET) {
-            native_window_set_scaling_mode(a, mPreviewScalingMode);
-        }
-        if (mPreviewTransform != NOT_SET) {
-            native_window_set_buffers_transform(a, mPreviewTransform);
-        }
-        if (mPreviewWidth != NOT_SET) {
-            native_window_set_buffers_dimensions(a,
-                    mPreviewWidth, mPreviewHeight);
-            native_window_set_buffers_format(a, mPreviewFormat);
-        }
-        if (mPreviewUsage != 0) {
-            native_window_set_usage(a, mPreviewUsage);
-        }
-        if (mPreviewSwapInterval != NOT_SET) {
-            a->setSwapInterval(a, mPreviewSwapInterval);
-        }
-        if (mPreviewCrop.left != NOT_SET) {
-            native_window_set_crop(a, &(mPreviewCrop));
-        }
-    }
-    int rc = native_window_set_buffer_count(a, count);
-    if (rc == OK) {
-        cleanupCirculatingBuffers();
-        return Status::OK;
-    }
-    return Status::INTERNAL_ERROR;
-}
-
-hardware::Return<Status>
-CameraHardwareInterface::setBuffersGeometry(
-        uint32_t w, uint32_t h, hardware::graphics::common::V1_0::PixelFormat format) {
-    Status s = Status::INTERNAL_ERROR;
-    ANativeWindow *a = mPreviewWindow.get();
-    if (a == nullptr) {
-        ALOGE("%s: preview window is null", __FUNCTION__);
-        return s;
-    }
-    mPreviewWidth = w;
-    mPreviewHeight = h;
-    mPreviewFormat = (int) format;
-    int rc = native_window_set_buffers_dimensions(a, w, h);
-    if (rc == OK) {
-        rc = native_window_set_buffers_format(a, mPreviewFormat);
-    }
-    if (rc == OK) {
-        cleanupCirculatingBuffers();
-        s = Status::OK;
-    }
-    return s;
-}
-
-hardware::Return<Status>
-CameraHardwareInterface::setCrop(int32_t left, int32_t top, int32_t right, int32_t bottom) {
-    Status s = Status::INTERNAL_ERROR;
-    ANativeWindow *a = mPreviewWindow.get();
-    if (a == nullptr) {
-        ALOGE("%s: preview window is null", __FUNCTION__);
-        return s;
-    }
-    mPreviewCrop.left = left;
-    mPreviewCrop.top = top;
-    mPreviewCrop.right = right;
-    mPreviewCrop.bottom = bottom;
-    int rc = native_window_set_crop(a, &mPreviewCrop);
-    if (rc == OK) {
-        s = Status::OK;
-    }
-    return s;
-}
-
-hardware::Return<Status>
-CameraHardwareInterface::setUsage(hardware::graphics::common::V1_0::BufferUsage usage) {
-    Status s = Status::INTERNAL_ERROR;
-    ANativeWindow *a = mPreviewWindow.get();
-    if (a == nullptr) {
-        ALOGE("%s: preview window is null", __FUNCTION__);
-        return s;
-    }
-    mPreviewUsage = static_cast<uint64_t> (usage);
-    int rc = native_window_set_usage(a, mPreviewUsage);
-    if (rc == OK) {
-        cleanupCirculatingBuffers();
-        s = Status::OK;
-    }
-    return s;
-}
-
-hardware::Return<Status>
-CameraHardwareInterface::setSwapInterval(int32_t interval) {
-    Status s = Status::INTERNAL_ERROR;
-    ANativeWindow *a = mPreviewWindow.get();
-    if (a == nullptr) {
-        ALOGE("%s: preview window is null", __FUNCTION__);
-        return s;
-    }
-    mPreviewSwapInterval = interval;
-    int rc = a->setSwapInterval(a, interval);
-    if (rc == OK) {
-        s = Status::OK;
-    }
-    return s;
-}
-
-hardware::Return<void>
-CameraHardwareInterface::getMinUndequeuedBufferCount(getMinUndequeuedBufferCount_cb _hidl_cb) {
-    ANativeWindow *a = mPreviewWindow.get();
-    if (a == nullptr) {
-        ALOGE("%s: preview window is null", __FUNCTION__);
-        return hardware::Void();
-    }
-    int count = 0;
-    int rc = a->query(a, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &count);
-    Status s = Status::INTERNAL_ERROR;
-    if (rc == OK) {
-        s = Status::OK;
-    }
-    _hidl_cb(s, count);
-    return hardware::Void();
-}
-
-hardware::Return<Status>
-CameraHardwareInterface::setTimestamp(int64_t timestamp) {
-    Status s = Status::INTERNAL_ERROR;
-    ANativeWindow *a = mPreviewWindow.get();
-    if (a == nullptr) {
-        ALOGE("%s: preview window is null", __FUNCTION__);
-        return s;
-    }
-    int rc = native_window_set_buffers_timestamp(a, timestamp);
-    if (rc == OK) {
-        s = Status::OK;
-    }
-    return s;
-}
-
-status_t CameraHardwareInterface::setPreviewWindow(const sp<ANativeWindow>& buf)
-{
-    ALOGV("%s(%s) buf %p", __FUNCTION__, mName.string(), buf.get());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        mPreviewWindow = buf;
-        if (buf != nullptr) {
-            if (mPreviewScalingMode != NOT_SET) {
-                setPreviewScalingMode(mPreviewScalingMode);
-            }
-            if (mPreviewTransform != NOT_SET) {
-                setPreviewTransform(mPreviewTransform);
-            }
-        }
-        return CameraProviderManager::mapToStatusT(
-                mHidlDevice->setPreviewWindow(buf.get() ? this : nullptr));
-    }
-    return INVALID_OPERATION;
-}
-
-void CameraHardwareInterface::setCallbacks(notify_callback notify_cb,
-        data_callback data_cb,
-        data_callback_timestamp data_cb_timestamp,
-        data_callback_timestamp_batch data_cb_timestamp_batch,
-        void* user)
-{
-    mNotifyCb = notify_cb;
-    mDataCb = data_cb;
-    mDataCbTimestamp = data_cb_timestamp;
-    mDataCbTimestampBatch = data_cb_timestamp_batch;
-    mCbUser = user;
-
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-}
-
-void CameraHardwareInterface::enableMsgType(int32_t msgType)
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        mHidlDevice->enableMsgType(msgType);
-    }
-}
-
-void CameraHardwareInterface::disableMsgType(int32_t msgType)
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        mHidlDevice->disableMsgType(msgType);
-    }
-}
-
-int CameraHardwareInterface::msgTypeEnabled(int32_t msgType)
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return mHidlDevice->msgTypeEnabled(msgType);
-    }
-    return false;
-}
-
-status_t CameraHardwareInterface::startPreview()
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return CameraProviderManager::mapToStatusT(
-                mHidlDevice->startPreview());
-    }
-    return INVALID_OPERATION;
-}
-
-void CameraHardwareInterface::stopPreview()
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        mHidlDevice->stopPreview();
-    }
-}
-
-int CameraHardwareInterface::previewEnabled()
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return mHidlDevice->previewEnabled();
-    }
-    return false;
-}
-
-status_t CameraHardwareInterface::storeMetaDataInBuffers(int enable)
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return CameraProviderManager::mapToStatusT(
-                mHidlDevice->storeMetaDataInBuffers(enable));
-    }
-    return enable ? INVALID_OPERATION: OK;
-}
-
-status_t CameraHardwareInterface::startRecording()
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return CameraProviderManager::mapToStatusT(
-                mHidlDevice->startRecording());
-    }
-    return INVALID_OPERATION;
-}
-
-/**
- * Stop a previously started recording.
- */
-void CameraHardwareInterface::stopRecording()
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        mHidlDevice->stopRecording();
-    }
-}
-
-/**
- * Returns true if recording is enabled.
- */
-int CameraHardwareInterface::recordingEnabled()
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return mHidlDevice->recordingEnabled();
-    }
-    return false;
-}
-
-void CameraHardwareInterface::releaseRecordingFrame(const sp<IMemory>& mem)
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    ssize_t offset;
-    size_t size;
-    sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
-    int heapId = heap->getHeapID();
-    int bufferIndex = offset / size;
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        if (size == sizeof(VideoNativeHandleMetadata)) {
-            // TODO: Using unsecurePointer() has some associated security pitfalls
-            //       (see declaration for details).
-            //       Either document why it is safe in this case or address the
-            //       issue (e.g. by copying).
-            VideoNativeHandleMetadata* md = (VideoNativeHandleMetadata*) mem->unsecurePointer();
-            // Caching the handle here because md->pHandle will be subject to HAL's edit
-            native_handle_t* nh = md->pHandle;
-            hidl_handle frame = nh;
-            mHidlDevice->releaseRecordingFrameHandle(heapId, bufferIndex, frame);
-            native_handle_close(nh);
-            native_handle_delete(nh);
-        } else {
-            mHidlDevice->releaseRecordingFrame(heapId, bufferIndex);
-        }
-    }
-}
-
-void CameraHardwareInterface::releaseRecordingFrameBatch(const std::vector<sp<IMemory>>& frames)
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    size_t n = frames.size();
-    std::vector<VideoFrameMessage> msgs;
-    msgs.reserve(n);
-    for (auto& mem : frames) {
-        if (CC_LIKELY(mHidlDevice != nullptr)) {
-            ssize_t offset;
-            size_t size;
-            sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
-            if (size == sizeof(VideoNativeHandleMetadata)) {
-                uint32_t heapId = heap->getHeapID();
-                uint32_t bufferIndex = offset / size;
-                // TODO: Using unsecurePointer() has some associated security pitfalls
-                //       (see declaration for details).
-                //       Either document why it is safe in this case or address the
-                //       issue (e.g. by copying).
-                VideoNativeHandleMetadata* md = (VideoNativeHandleMetadata*) mem->unsecurePointer();
-                // Caching the handle here because md->pHandle will be subject to HAL's edit
-                native_handle_t* nh = md->pHandle;
-                VideoFrameMessage msg;
-                msgs.push_back({nh, heapId, bufferIndex});
-            } else {
-                ALOGE("%s only supports VideoNativeHandleMetadata mode", __FUNCTION__);
-                return;
-            }
-        }
-    }
-
-    mHidlDevice->releaseRecordingFrameHandleBatch(msgs);
-
-    for (auto& msg : msgs) {
-        native_handle_t* nh = const_cast<native_handle_t*>(msg.frameData.getNativeHandle());
-        native_handle_close(nh);
-        native_handle_delete(nh);
-    }
-}
-
-status_t CameraHardwareInterface::autoFocus()
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return CameraProviderManager::mapToStatusT(
-                mHidlDevice->autoFocus());
-    }
-    return INVALID_OPERATION;
-}
-
-status_t CameraHardwareInterface::cancelAutoFocus()
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return CameraProviderManager::mapToStatusT(
-                mHidlDevice->cancelAutoFocus());
-    }
-    return INVALID_OPERATION;
-}
-
-status_t CameraHardwareInterface::takePicture()
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return CameraProviderManager::mapToStatusT(
-                mHidlDevice->takePicture());
-    }
-    return INVALID_OPERATION;
-}
-
-status_t CameraHardwareInterface::cancelPicture()
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return CameraProviderManager::mapToStatusT(
-                mHidlDevice->cancelPicture());
-    }
-    return INVALID_OPERATION;
-}
-
-status_t CameraHardwareInterface::setParameters(const CameraParameters &params)
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return CameraProviderManager::mapToStatusT(
-                mHidlDevice->setParameters(params.flatten().string()));
-    }
-    return INVALID_OPERATION;
-}
-
-CameraParameters CameraHardwareInterface::getParameters() const
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    CameraParameters parms;
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        hardware::hidl_string outParam;
-        mHidlDevice->getParameters(
-                [&outParam](const auto& outStr) {
-                    outParam = outStr;
-                });
-        String8 tmp(outParam.c_str());
-        parms.unflatten(tmp);
-    }
-    return parms;
-}
-
-status_t CameraHardwareInterface::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2)
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return CameraProviderManager::mapToStatusT(
-                mHidlDevice->sendCommand((CommandType) cmd, arg1, arg2));
-    }
-    return INVALID_OPERATION;
-}
-
-/**
- * Release the hardware resources owned by this object.  Note that this is
- * *not* done in the destructor.
- */
-void CameraHardwareInterface::release() {
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        mHidlDevice->close();
-        mHidlDevice.clear();
-    }
-}
-
-/**
- * Dump state of the camera hardware
- */
-status_t CameraHardwareInterface::dump(int fd, const Vector<String16>& /*args*/) const
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        native_handle_t* handle = native_handle_create(1,0);
-        handle->data[0] = fd;
-        Status s = mHidlDevice->dumpState(handle);
-        native_handle_delete(handle);
-        return CameraProviderManager::mapToStatusT(s);
-    }
-    return OK; // It's fine if the HAL doesn't implement dump()
-}
-
-void CameraHardwareInterface::sNotifyCb(int32_t msg_type, int32_t ext1,
-                        int32_t ext2, void *user)
-{
-    ALOGV("%s", __FUNCTION__);
-    CameraHardwareInterface *object =
-            static_cast<CameraHardwareInterface *>(user);
-    object->mNotifyCb(msg_type, ext1, ext2, object->mCbUser);
-}
-
-void CameraHardwareInterface::sDataCb(int32_t msg_type,
-                      const camera_memory_t *data, unsigned int index,
-                      camera_frame_metadata_t *metadata,
-                      void *user)
-{
-    ALOGV("%s", __FUNCTION__);
-    CameraHardwareInterface *object =
-            static_cast<CameraHardwareInterface *>(user);
-    sp<CameraHeapMemory> mem(static_cast<CameraHeapMemory *>(data->handle));
-    if (index >= mem->mNumBufs) {
-        ALOGE("%s: invalid buffer index %d, max allowed is %d", __FUNCTION__,
-             index, mem->mNumBufs);
-        return;
-    }
-    object->mDataCb(msg_type, mem->mBuffers[index], metadata, object->mCbUser);
-}
-
-void CameraHardwareInterface::sDataCbTimestamp(nsecs_t timestamp, int32_t msg_type,
-                         const camera_memory_t *data, unsigned index,
-                         void *user)
-{
-    ALOGV("%s", __FUNCTION__);
-    CameraHardwareInterface *object =
-            static_cast<CameraHardwareInterface *>(user);
-    // Start refcounting the heap object from here on.  When the clients
-    // drop all references, it will be destroyed (as well as the enclosed
-    // MemoryHeapBase.
-    sp<CameraHeapMemory> mem(static_cast<CameraHeapMemory *>(data->handle));
-    if (index >= mem->mNumBufs) {
-        ALOGE("%s: invalid buffer index %d, max allowed is %d", __FUNCTION__,
-             index, mem->mNumBufs);
-        return;
-    }
-    object->mDataCbTimestamp(timestamp, msg_type, mem->mBuffers[index], object->mCbUser);
-}
-
-camera_memory_t* CameraHardwareInterface::sGetMemory(
-        int fd, size_t buf_size, uint_t num_bufs,
-        void *user __attribute__((unused)))
-{
-    CameraHeapMemory *mem;
-    if (fd < 0) {
-        mem = new CameraHeapMemory(buf_size, num_bufs);
-    } else {
-        mem = new CameraHeapMemory(fd, buf_size, num_bufs);
-    }
-    mem->incStrong(mem);
-    return &mem->handle;
-}
-
-void CameraHardwareInterface::sPutMemory(camera_memory_t *data)
-{
-    if (!data) {
-        return;
-    }
-
-    CameraHeapMemory *mem = static_cast<CameraHeapMemory *>(data->handle);
-    mem->decStrong(mem);
-}
-
-}; // namespace android
diff --git a/services/camera/libcameraservice/device1/CameraHardwareInterface.h b/services/camera/libcameraservice/device1/CameraHardwareInterface.h
deleted file mode 100644
index e519b04..0000000
--- a/services/camera/libcameraservice/device1/CameraHardwareInterface.h
+++ /dev/null
@@ -1,488 +0,0 @@
-/*
- * Copyright (C) 2008 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_CAMERA_HARDWARE_INTERFACE_H
-#define ANDROID_HARDWARE_CAMERA_HARDWARE_INTERFACE_H
-
-#include <unordered_map>
-#include <binder/IMemory.h>
-#include <binder/MemoryBase.h>
-#include <binder/MemoryHeapBase.h>
-#include <utils/RefBase.h>
-#include <ui/GraphicBuffer.h>
-#include <camera/Camera.h>
-#include <camera/CameraParameters.h>
-#include <system/window.h>
-#include <hardware/camera.h>
-
-#include <common/CameraProviderManager.h>
-
-namespace android {
-
-typedef void (*notify_callback)(int32_t msgType,
-                            int32_t ext1,
-                            int32_t ext2,
-                            void* user);
-
-typedef void (*data_callback)(int32_t msgType,
-                            const sp<IMemory> &dataPtr,
-                            camera_frame_metadata_t *metadata,
-                            void* user);
-
-typedef void (*data_callback_timestamp)(nsecs_t timestamp,
-                            int32_t msgType,
-                            const sp<IMemory> &dataPtr,
-                            void *user);
-
-struct HandleTimestampMessage {
-    nsecs_t timestamp;
-    const sp<IMemory> dataPtr;
-};
-
-typedef void (*data_callback_timestamp_batch)(
-        int32_t msgType,
-        const std::vector<HandleTimestampMessage>&, void* user);
-
-/**
- * CameraHardwareInterface.h defines the interface to the
- * camera hardware abstraction layer, used for setting and getting
- * parameters, live previewing, and taking pictures. It is used for
- * HAL devices with version CAMERA_DEVICE_API_VERSION_1_0 only.
- *
- * It is a referenced counted interface with RefBase as its base class.
- * CameraService calls openCameraHardware() to retrieve a strong pointer to the
- * instance of this interface and may be called multiple times. The
- * following steps describe a typical sequence:
- *
- *   -# After CameraService calls openCameraHardware(), getParameters() and
- *      setParameters() are used to initialize the camera instance.
- *   -# startPreview() is called.
- *
- * Prior to taking a picture, CameraService often calls autofocus(). When auto
- * focusing has completed, the camera instance sends a CAMERA_MSG_FOCUS notification,
- * which informs the application whether focusing was successful. The camera instance
- * only sends this message once and it is up  to the application to call autoFocus()
- * again if refocusing is desired.
- *
- * CameraService calls takePicture() to request the camera instance take a
- * picture. At this point, if a shutter, postview, raw, and/or compressed
- * callback is desired, the corresponding message must be enabled. Any memory
- * provided in a data callback must be copied if it's needed after returning.
- */
-
-class CameraHardwareInterface :
-        public virtual RefBase,
-        public virtual hardware::camera::device::V1_0::ICameraDeviceCallback,
-        public virtual hardware::camera::device::V1_0::ICameraDevicePreviewCallback {
-
-public:
-    explicit CameraHardwareInterface(const char *name):
-            mHidlDevice(nullptr),
-            mName(name),
-            mPreviewScalingMode(NOT_SET),
-            mPreviewTransform(NOT_SET),
-            mPreviewWidth(NOT_SET),
-            mPreviewHeight(NOT_SET),
-            mPreviewFormat(NOT_SET),
-            mPreviewUsage(0),
-            mPreviewSwapInterval(NOT_SET),
-            mPreviewCrop{NOT_SET,NOT_SET,NOT_SET,NOT_SET}
-    {
-    }
-
-    ~CameraHardwareInterface();
-
-    status_t initialize(sp<CameraProviderManager> manager);
-
-    /** Set the ANativeWindow to which preview frames are sent */
-    status_t setPreviewWindow(const sp<ANativeWindow>& buf);
-
-    status_t setPreviewScalingMode(int scalingMode);
-
-    status_t setPreviewTransform(int transform);
-
-    /** Set the notification and data callbacks */
-    void setCallbacks(notify_callback notify_cb,
-                      data_callback data_cb,
-                      data_callback_timestamp data_cb_timestamp,
-                      data_callback_timestamp_batch data_cb_timestamp_batch,
-                      void* user);
-
-    /**
-     * The following three functions all take a msgtype,
-     * which is a bitmask of the messages defined in
-     * include/ui/Camera.h
-     */
-
-    /**
-     * Enable a message, or set of messages.
-     */
-    void enableMsgType(int32_t msgType);
-
-    /**
-     * Disable a message, or a set of messages.
-     *
-     * Once received a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), camera hal
-     * should not rely on its client to call releaseRecordingFrame() to release
-     * video recording frames sent out by the cameral hal before and after the
-     * disableMsgType(CAMERA_MSG_VIDEO_FRAME) call. Camera hal clients must not
-     * modify/access any video recording frame after calling
-     * disableMsgType(CAMERA_MSG_VIDEO_FRAME).
-     */
-    void disableMsgType(int32_t msgType);
-
-    /**
-     * Query whether a message, or a set of messages, is enabled.
-     * Note that this is operates as an AND, if any of the messages
-     * queried are off, this will return false.
-     */
-    int msgTypeEnabled(int32_t msgType);
-
-    /**
-     * Start preview mode.
-     */
-    status_t startPreview();
-
-    /**
-     * Stop a previously started preview.
-     */
-    void stopPreview();
-
-    /**
-     * Returns true if preview is enabled.
-     */
-    int previewEnabled();
-
-    /**
-     * Request the camera hal to store meta data or real YUV data in
-     * the video buffers send out via CAMERA_MSG_VIDEO_FRRAME for a
-     * recording session. If it is not called, the default camera
-     * hal behavior is to store real YUV data in the video buffers.
-     *
-     * This method should be called before startRecording() in order
-     * to be effective.
-     *
-     * If meta data is stored in the video buffers, it is up to the
-     * receiver of the video buffers to interpret the contents and
-     * to find the actual frame data with the help of the meta data
-     * in the buffer. How this is done is outside of the scope of
-     * this method.
-     *
-     * Some camera hal may not support storing meta data in the video
-     * buffers, but all camera hal should support storing real YUV data
-     * in the video buffers. If the camera hal does not support storing
-     * the meta data in the video buffers when it is requested to do
-     * do, INVALID_OPERATION must be returned. It is very useful for
-     * the camera hal to pass meta data rather than the actual frame
-     * data directly to the video encoder, since the amount of the
-     * uncompressed frame data can be very large if video size is large.
-     *
-     * @param enable if true to instruct the camera hal to store
-     *      meta data in the video buffers; false to instruct
-     *      the camera hal to store real YUV data in the video
-     *      buffers.
-     *
-     * @return OK on success.
-     */
-
-    status_t storeMetaDataInBuffers(int enable);
-
-    /**
-     * Start record mode. When a record image is available a CAMERA_MSG_VIDEO_FRAME
-     * message is sent with the corresponding frame. Every record frame must be released
-     * by a cameral hal client via releaseRecordingFrame() before the client calls
-     * disableMsgType(CAMERA_MSG_VIDEO_FRAME). After the client calls
-     * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is camera hal's responsibility
-     * to manage the life-cycle of the video recording frames, and the client must
-     * not modify/access any video recording frames.
-     */
-    status_t startRecording();
-
-    /**
-     * Stop a previously started recording.
-     */
-    void stopRecording();
-
-    /**
-     * Returns true if recording is enabled.
-     */
-    int recordingEnabled();
-
-    /**
-     * Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME.
-     *
-     * It is camera hal client's responsibility to release video recording
-     * frames sent out by the camera hal before the camera hal receives
-     * a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME). After it receives
-     * the call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is camera hal's
-     * responsibility of managing the life-cycle of the video recording
-     * frames.
-     */
-    void releaseRecordingFrame(const sp<IMemory>& mem);
-
-    /**
-     * Release a batch of recording frames previously returned by
-     * CAMERA_MSG_VIDEO_FRAME. This method only supports frames that are
-     * stored as VideoNativeHandleMetadata.
-     *
-     * It is camera hal client's responsibility to release video recording
-     * frames sent out by the camera hal before the camera hal receives
-     * a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME). After it receives
-     * the call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is camera hal's
-     * responsibility of managing the life-cycle of the video recording
-     * frames.
-     */
-    void releaseRecordingFrameBatch(const std::vector<sp<IMemory>>& frames);
-
-    /**
-     * Start auto focus, the notification callback routine is called
-     * with CAMERA_MSG_FOCUS once when focusing is complete. autoFocus()
-     * will be called again if another auto focus is needed.
-     */
-    status_t autoFocus();
-
-    /**
-     * Cancels auto-focus function. If the auto-focus is still in progress,
-     * this function will cancel it. Whether the auto-focus is in progress
-     * or not, this function will return the focus position to the default.
-     * If the camera does not support auto-focus, this is a no-op.
-     */
-    status_t cancelAutoFocus();
-
-    /**
-     * Take a picture.
-     */
-    status_t takePicture();
-
-    /**
-     * Cancel a picture that was started with takePicture.  Calling this
-     * method when no picture is being taken is a no-op.
-     */
-    status_t cancelPicture();
-
-    /**
-     * Set the camera parameters. This returns BAD_VALUE if any parameter is
-     * invalid or not supported. */
-    status_t setParameters(const CameraParameters &params);
-
-    /** Return the camera parameters. */
-    CameraParameters getParameters() const;
-
-    /**
-     * Send command to camera driver.
-     */
-    status_t sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
-
-    /**
-     * Release the hardware resources owned by this object.  Note that this is
-     * *not* done in the destructor.
-     */
-    void release();
-
-    /**
-     * Dump state of the camera hardware
-     */
-    status_t dump(int fd, const Vector<String16>& /*args*/) const;
-
-private:
-    sp<hardware::camera::device::V1_0::ICameraDevice> mHidlDevice;
-    String8 mName;
-
-    static void sNotifyCb(int32_t msg_type, int32_t ext1,
-                            int32_t ext2, void *user);
-
-    static void sDataCb(int32_t msg_type,
-                          const camera_memory_t *data, unsigned int index,
-                          camera_frame_metadata_t *metadata,
-                          void *user);
-
-    static void sDataCbTimestamp(nsecs_t timestamp, int32_t msg_type,
-                             const camera_memory_t *data, unsigned index,
-                             void *user);
-
-    // This is a utility class that combines a MemoryHeapBase and a MemoryBase
-    // in one.  Since we tend to use them in a one-to-one relationship, this is
-    // handy.
-    class CameraHeapMemory : public RefBase {
-    public:
-        CameraHeapMemory(int fd, size_t buf_size, uint_t num_buffers = 1) :
-                         mBufSize(buf_size),
-                         mNumBufs(num_buffers)
-        {
-            mHeap = new MemoryHeapBase(fd, buf_size * num_buffers);
-            commonInitialization();
-        }
-
-        explicit CameraHeapMemory(size_t buf_size, uint_t num_buffers = 1) :
-                                  mBufSize(buf_size),
-                                  mNumBufs(num_buffers)
-        {
-            mHeap = new MemoryHeapBase(buf_size * num_buffers);
-            commonInitialization();
-        }
-
-        void commonInitialization()
-        {
-            handle.data = mHeap->base();
-            handle.size = mBufSize * mNumBufs;
-            handle.handle = this;
-
-            mBuffers = new sp<MemoryBase>[mNumBufs];
-            for (uint_t i = 0; i < mNumBufs; i++)
-                mBuffers[i] = new MemoryBase(mHeap,
-                                             i * mBufSize,
-                                             mBufSize);
-
-            handle.release = sPutMemory;
-        }
-
-        virtual ~CameraHeapMemory()
-        {
-            delete [] mBuffers;
-        }
-
-        size_t mBufSize;
-        uint_t mNumBufs;
-        sp<MemoryHeapBase> mHeap;
-        sp<MemoryBase> *mBuffers;
-
-        camera_memory_t handle;
-    };
-
-    static camera_memory_t* sGetMemory(int fd, size_t buf_size, uint_t num_bufs,
-                                         void *user __attribute__((unused)));
-
-    static void sPutMemory(camera_memory_t *data);
-
-    std::pair<bool, uint64_t> getBufferId(ANativeWindowBuffer* anb);
-    void cleanupCirculatingBuffers();
-
-    /**
-     * Implementation of android::hardware::camera::device::V1_0::ICameraDeviceCallback
-     */
-    hardware::Return<void> notifyCallback(
-            hardware::camera::device::V1_0::NotifyCallbackMsg msgType,
-            int32_t ext1, int32_t ext2) override;
-    hardware::Return<uint32_t> registerMemory(
-            const hardware::hidl_handle& descriptor,
-            uint32_t bufferSize, uint32_t bufferCount) override;
-    hardware::Return<void> unregisterMemory(uint32_t memId) override;
-    hardware::Return<void> dataCallback(
-            hardware::camera::device::V1_0::DataCallbackMsg msgType,
-            uint32_t data, uint32_t bufferIndex,
-            const hardware::camera::device::V1_0::CameraFrameMetadata& metadata) override;
-    hardware::Return<void> dataCallbackTimestamp(
-            hardware::camera::device::V1_0::DataCallbackMsg msgType,
-            uint32_t data, uint32_t bufferIndex, int64_t timestamp) override;
-    hardware::Return<void> handleCallbackTimestamp(
-            hardware::camera::device::V1_0::DataCallbackMsg msgType,
-            const hardware::hidl_handle& frameData, uint32_t data,
-            uint32_t bufferIndex, int64_t timestamp) override;
-    hardware::Return<void> handleCallbackTimestampBatch(
-            hardware::camera::device::V1_0::DataCallbackMsg msgType,
-            const hardware::hidl_vec<
-                    hardware::camera::device::V1_0::HandleTimestampMessage>&) override;
-
-    /**
-     * Implementation of android::hardware::camera::device::V1_0::ICameraDevicePreviewCallback
-     */
-    hardware::Return<void> dequeueBuffer(dequeueBuffer_cb _hidl_cb) override;
-    hardware::Return<hardware::camera::common::V1_0::Status>
-            enqueueBuffer(uint64_t bufferId) override;
-    hardware::Return<hardware::camera::common::V1_0::Status>
-            cancelBuffer(uint64_t bufferId) override;
-    hardware::Return<hardware::camera::common::V1_0::Status>
-            setBufferCount(uint32_t count) override;
-    hardware::Return<hardware::camera::common::V1_0::Status>
-            setBuffersGeometry(uint32_t w, uint32_t h,
-                    hardware::graphics::common::V1_0::PixelFormat format) override;
-    hardware::Return<hardware::camera::common::V1_0::Status>
-            setCrop(int32_t left, int32_t top, int32_t right, int32_t bottom) override;
-    hardware::Return<hardware::camera::common::V1_0::Status>
-            setUsage(hardware::graphics::common::V1_0::BufferUsage usage) override;
-    hardware::Return<hardware::camera::common::V1_0::Status>
-            setSwapInterval(int32_t interval) override;
-    hardware::Return<void> getMinUndequeuedBufferCount(
-        getMinUndequeuedBufferCount_cb _hidl_cb) override;
-    hardware::Return<hardware::camera::common::V1_0::Status>
-            setTimestamp(int64_t timestamp) override;
-
-    sp<ANativeWindow>        mPreviewWindow;
-
-    notify_callback               mNotifyCb;
-    data_callback                 mDataCb;
-    data_callback_timestamp       mDataCbTimestamp;
-    data_callback_timestamp_batch mDataCbTimestampBatch;
-    void *mCbUser;
-
-    // Cached values for preview stream parameters
-    static const int NOT_SET = -1;
-    int mPreviewScalingMode;
-    int mPreviewTransform;
-    int mPreviewWidth;
-    int mPreviewHeight;
-    int mPreviewFormat;
-    uint64_t mPreviewUsage;
-    int mPreviewSwapInterval;
-    android_native_rect_t mPreviewCrop;
-
-    struct BufferHasher {
-        size_t operator()(const buffer_handle_t& buf) const {
-            if (buf == nullptr)
-                return 0;
-
-            size_t result = 1;
-            result = 31 * result + buf->numFds;
-            result = 31 * result + buf->numInts;
-            int length = buf->numFds + buf->numInts;
-            for (int i = 0; i < length; i++) {
-                result = 31 * result + buf->data[i];
-            }
-            return result;
-        }
-    };
-
-    struct BufferComparator {
-        bool operator()(const buffer_handle_t& buf1, const buffer_handle_t& buf2) const {
-            if (buf1->numFds == buf2->numFds && buf1->numInts == buf2->numInts) {
-                int length = buf1->numFds + buf1->numInts;
-                for (int i = 0; i < length; i++) {
-                    if (buf1->data[i] != buf2->data[i]) {
-                        return false;
-                    }
-                }
-                return true;
-            }
-            return false;
-        }
-    };
-
-    std::mutex mBufferIdMapLock; // protecting mBufferIdMap and mNextBufferId
-    typedef std::unordered_map<const buffer_handle_t, uint64_t,
-            BufferHasher, BufferComparator> BufferIdMap;
-    // stream ID -> per stream buffer ID map
-    BufferIdMap mBufferIdMap;
-    std::unordered_map<uint64_t, ANativeWindowBuffer*> mReversedBufMap;
-    uint64_t mNextBufferId = 1;
-    static const uint64_t BUFFER_ID_NO_BUFFER = 0;
-
-    std::mutex mHidlMemPoolMapLock; // protecting mHidlMemPoolMap
-    std::unordered_map<int, camera_memory_t*> mHidlMemPoolMap;
-};
-
-};  // namespace android
-
-#endif
diff --git a/services/mediatranscoding/Android.bp b/services/mediatranscoding/Android.bp
index 8cf2d62..2dbcf5a 100644
--- a/services/mediatranscoding/Android.bp
+++ b/services/mediatranscoding/Android.bp
@@ -2,16 +2,25 @@
 cc_library_shared {
     name: "libmediatranscodingservice",
 
-    srcs: ["MediaTranscodingService.cpp"],
+    srcs: [
+        "MediaTranscodingService.cpp",
+        "SimulatedTranscoder.cpp",
+    ],
 
     shared_libs: [
         "libbase",
+        "libbinder",
         "libbinder_ndk",
+        "libcutils",
         "liblog",
         "libmediatranscoding",
         "libutils",
     ],
 
+    export_shared_lib_headers: [
+        "libmediatranscoding",
+    ],
+
     static_libs: [
         "mediatranscoding_aidl_interface-ndk_platform",
     ],
diff --git a/services/mediatranscoding/MediaTranscodingService.cpp b/services/mediatranscoding/MediaTranscodingService.cpp
index 82d4161..ef7d6d2 100644
--- a/services/mediatranscoding/MediaTranscodingService.cpp
+++ b/services/mediatranscoding/MediaTranscodingService.cpp
@@ -16,13 +16,22 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "MediaTranscodingService"
-#include <MediaTranscodingService.h>
+#include "MediaTranscodingService.h"
+
 #include <android/binder_manager.h>
 #include <android/binder_process.h>
+#include <binder/IServiceManager.h>
+#include <cutils/properties.h>
+#include <media/TranscoderWrapper.h>
+#include <media/TranscodingClientManager.h>
+#include <media/TranscodingJobScheduler.h>
+#include <media/TranscodingUidPolicy.h>
 #include <private/android_filesystem_config.h>
 #include <utils/Log.h>
 #include <utils/Vector.h>
 
+#include "SimulatedTranscoder.h"
+
 namespace android {
 
 // Convenience methods for constructing binder::Status objects for error returns
@@ -45,9 +54,14 @@
     }
 }
 
-MediaTranscodingService::MediaTranscodingService()
-      : mTranscodingClientManager(TranscodingClientManager::getInstance()) {
+MediaTranscodingService::MediaTranscodingService(
+        const std::shared_ptr<TranscoderInterface>& transcoder)
+      : mUidPolicy(new TranscodingUidPolicy()),
+        mJobScheduler(new TranscodingJobScheduler(transcoder, mUidPolicy)),
+        mClientManager(new TranscodingClientManager(mJobScheduler)) {
     ALOGV("MediaTranscodingService is created");
+    transcoder->setCallback(mJobScheduler);
+    mUidPolicy->setCallback(mJobScheduler);
 }
 
 MediaTranscodingService::~MediaTranscodingService() {
@@ -56,6 +70,17 @@
 
 binder_status_t MediaTranscodingService::dump(int fd, const char** /*args*/, uint32_t /*numArgs*/) {
     String8 result;
+
+    // TODO(b/161549994): Remove libbinder dependencies for mainline.
+    if (checkCallingPermission(String16("android.permission.DUMP")) == false) {
+        result.format(
+                "Permission Denial: "
+                "can't dump MediaTranscodingService from pid=%d, uid=%d\n",
+                AIBinder_getCallingPid(), AIBinder_getCallingUid());
+        write(fd, result.string(), result.size());
+        return PERMISSION_DENIED;
+    }
+
     const size_t SIZE = 256;
     char buffer[SIZE];
 
@@ -64,14 +89,21 @@
     write(fd, result.string(), result.size());
 
     Vector<String16> args;
-    mTranscodingClientManager.dumpAllClients(fd, args);
+    mClientManager->dumpAllClients(fd, args);
     return OK;
 }
 
 //static
 void MediaTranscodingService::instantiate() {
+    std::shared_ptr<TranscoderInterface> transcoder;
+    if (property_get_bool("debug.transcoding.simulated_transcoder", false)) {
+        transcoder = std::make_shared<SimulatedTranscoder>();
+    } else {
+        transcoder = std::make_shared<TranscoderWrapper>();
+    }
+
     std::shared_ptr<MediaTranscodingService> service =
-            ::ndk::SharedRefBase::make<MediaTranscodingService>();
+            ::ndk::SharedRefBase::make<MediaTranscodingService>(transcoder);
     binder_status_t status =
             AServiceManager_addService(service->asBinder().get(), getServiceName());
     if (status != STATUS_OK) {
@@ -80,19 +112,19 @@
 }
 
 Status MediaTranscodingService::registerClient(
-        const std::shared_ptr<ITranscodingServiceClient>& in_client,
-        const std::string& in_opPackageName, int32_t in_clientUid, int32_t in_clientPid,
-        int32_t* _aidl_return) {
-    if (in_client == nullptr) {
-        ALOGE("Client can not be null");
-        *_aidl_return = kInvalidJobId;
-        return Status::fromServiceSpecificError(ERROR_ILLEGAL_ARGUMENT);
+        const std::shared_ptr<ITranscodingClientCallback>& in_callback,
+        const std::string& in_clientName, const std::string& in_opPackageName, int32_t in_clientUid,
+        int32_t in_clientPid, std::shared_ptr<ITranscodingClient>* _aidl_return) {
+    if (in_callback == nullptr) {
+        *_aidl_return = nullptr;
+        return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Client callback cannot be null!");
     }
 
     int32_t callingPid = AIBinder_getCallingPid();
     int32_t callingUid = AIBinder_getCallingUid();
 
-    // Check if we can trust clientUid. Only privilege caller could forward the uid on app client's behalf.
+    // Check if we can trust clientUid. Only privilege caller could forward the
+    // uid on app client's behalf.
     if (in_clientUid == USE_CALLING_UID) {
         in_clientUid = callingUid;
     } else if (!isTrustedCallingUid(callingUid)) {
@@ -106,7 +138,8 @@
                                 in_clientPid, in_clientUid);
     }
 
-    // Check if we can trust clientPid. Only privilege caller could forward the pid on app client's behalf.
+    // Check if we can trust clientPid. Only privilege caller could forward the
+    // pid on app client's behalf.
     if (in_clientPid == USE_CALLING_PID) {
         in_clientPid = callingPid;
     } else if (!isTrustedCallingUid(callingUid)) {
@@ -120,78 +153,23 @@
                                 in_clientPid, in_clientUid);
     }
 
-    // We know the clientId must be equal to its pid as we assigned client's pid as its clientId.
-    int32_t clientId = in_clientPid;
-
-    // Checks if the client already registers.
-    if (mTranscodingClientManager.isClientIdRegistered(clientId)) {
-        return Status::fromServiceSpecificError(ERROR_ALREADY_EXISTS);
-    }
-
     // Creates the client and uses its process id as client id.
-    std::unique_ptr<TranscodingClientManager::ClientInfo> newClient =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    in_client, clientId, in_clientPid, in_clientUid, in_opPackageName);
-    status_t err = mTranscodingClientManager.addClient(std::move(newClient));
+    std::shared_ptr<ITranscodingClient> newClient;
+
+    status_t err = mClientManager->addClient(in_callback, in_clientPid, in_clientUid, in_clientName,
+                                             in_opPackageName, &newClient);
     if (err != OK) {
-        *_aidl_return = kInvalidClientId;
+        *_aidl_return = nullptr;
         return STATUS_ERROR_FMT(err, "Failed to add client to TranscodingClientManager");
     }
 
-    ALOGD("Assign client: %s pid: %d, uid: %d with id: %d", in_opPackageName.c_str(), in_clientPid,
-          in_clientUid, clientId);
-
-    *_aidl_return = clientId;
-    return Status::ok();
-}
-
-Status MediaTranscodingService::unregisterClient(int32_t clientId, bool* _aidl_return) {
-    ALOGD("unregisterClient id: %d", clientId);
-    int32_t callingUid = AIBinder_getCallingUid();
-    int32_t callingPid = AIBinder_getCallingPid();
-
-    // Only the client with clientId or the trusted caller could unregister the client.
-    if (callingPid != clientId) {
-        if (!isTrustedCallingUid(callingUid)) {
-            ALOGE("Untrusted caller (calling PID %d, UID %d) trying to "
-                  "unregister client with id: %d",
-                  callingUid, callingPid, clientId);
-            *_aidl_return = true;
-            return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
-                                    "Untrusted caller (calling PID %d, UID %d) trying to "
-                                    "unregister client with id: %d",
-                                    callingUid, callingPid, clientId);
-        }
-    }
-
-    *_aidl_return = (mTranscodingClientManager.removeClient(clientId) == OK);
+    *_aidl_return = newClient;
     return Status::ok();
 }
 
 Status MediaTranscodingService::getNumOfClients(int32_t* _aidl_return) {
     ALOGD("MediaTranscodingService::getNumOfClients");
-    *_aidl_return = mTranscodingClientManager.getNumOfClients();
-    return Status::ok();
-}
-
-Status MediaTranscodingService::submitRequest(int32_t /*clientId*/,
-                                              const TranscodingRequestParcel& /*request*/,
-                                              TranscodingJobParcel* /*job*/,
-                                              int32_t* /*_aidl_return*/) {
-    // TODO(hkuang): Add implementation.
-    return Status::ok();
-}
-
-Status MediaTranscodingService::cancelJob(int32_t /*in_clientId*/, int32_t /*in_jobId*/,
-                                          bool* /*_aidl_return*/) {
-    // TODO(hkuang): Add implementation.
-    return Status::ok();
-}
-
-Status MediaTranscodingService::getJobWithId(int32_t /*in_jobId*/,
-                                             TranscodingJobParcel* /*out_job*/,
-                                             bool* /*_aidl_return*/) {
-    // TODO(hkuang): Add implementation.
+    *_aidl_return = mClientManager->getNumOfClients();
     return Status::ok();
 }
 
diff --git a/services/mediatranscoding/MediaTranscodingService.h b/services/mediatranscoding/MediaTranscodingService.h
index cc69727..505239c 100644
--- a/services/mediatranscoding/MediaTranscodingService.h
+++ b/services/mediatranscoding/MediaTranscodingService.h
@@ -19,44 +19,39 @@
 
 #include <aidl/android/media/BnMediaTranscodingService.h>
 #include <binder/IServiceManager.h>
-#include <media/TranscodingClientManager.h>
 
 namespace android {
 
 using Status = ::ndk::ScopedAStatus;
 using ::aidl::android::media::BnMediaTranscodingService;
-using ::aidl::android::media::ITranscodingServiceClient;
+using ::aidl::android::media::ITranscodingClient;
+using ::aidl::android::media::ITranscodingClientCallback;
 using ::aidl::android::media::TranscodingJobParcel;
 using ::aidl::android::media::TranscodingRequestParcel;
+class TranscodingClientManager;
+class TranscodingJobScheduler;
+class TranscoderInterface;
+class UidPolicyInterface;
 
 class MediaTranscodingService : public BnMediaTranscodingService {
 public:
     static constexpr int32_t kInvalidJobId = -1;
     static constexpr int32_t kInvalidClientId = -1;
 
-    MediaTranscodingService();
+    MediaTranscodingService(const std::shared_ptr<TranscoderInterface>& transcoder);
     virtual ~MediaTranscodingService();
 
     static void instantiate();
 
     static const char* getServiceName() { return "media.transcoding"; }
 
-    Status registerClient(const std::shared_ptr<ITranscodingServiceClient>& in_client,
-                          const std::string& in_opPackageName, int32_t in_clientUid,
-                          int32_t in_clientPid, int32_t* _aidl_return) override;
-
-    Status unregisterClient(int32_t clientId, bool* _aidl_return) override;
+    Status registerClient(const std::shared_ptr<ITranscodingClientCallback>& in_callback,
+                          const std::string& in_clientName, const std::string& in_opPackageName,
+                          int32_t in_clientUid, int32_t in_clientPid,
+                          std::shared_ptr<ITranscodingClient>* _aidl_return) override;
 
     Status getNumOfClients(int32_t* _aidl_return) override;
 
-    Status submitRequest(int32_t in_clientId, const TranscodingRequestParcel& in_request,
-                         TranscodingJobParcel* out_job, int32_t* _aidl_return) override;
-
-    Status cancelJob(int32_t in_clientId, int32_t in_jobId, bool* _aidl_return) override;
-
-    Status getJobWithId(int32_t in_jobId, TranscodingJobParcel* out_job,
-                        bool* _aidl_return) override;
-
     virtual inline binder_status_t dump(int /*fd*/, const char** /*args*/, uint32_t /*numArgs*/);
 
 private:
@@ -64,7 +59,9 @@
 
     mutable std::mutex mServiceLock;
 
-    TranscodingClientManager& mTranscodingClientManager;
+    std::shared_ptr<UidPolicyInterface> mUidPolicy;
+    std::shared_ptr<TranscodingJobScheduler> mJobScheduler;
+    std::shared_ptr<TranscodingClientManager> mClientManager;
 };
 
 }  // namespace android
diff --git a/services/mediatranscoding/SimulatedTranscoder.cpp b/services/mediatranscoding/SimulatedTranscoder.cpp
new file mode 100644
index 0000000..97d5f5f
--- /dev/null
+++ b/services/mediatranscoding/SimulatedTranscoder.cpp
@@ -0,0 +1,170 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SimulatedTranscoder"
+#include "SimulatedTranscoder.h"
+
+#include <utils/Log.h>
+
+#include <thread>
+
+namespace android {
+
+//static
+const char* SimulatedTranscoder::toString(Event::Type type) {
+    switch (type) {
+    case Event::Start:
+        return "Start";
+    case Event::Pause:
+        return "Pause";
+    case Event::Resume:
+        return "Resume";
+    default:
+        break;
+    }
+    return "(unknown)";
+}
+
+SimulatedTranscoder::SimulatedTranscoder() {
+    std::thread(&SimulatedTranscoder::threadLoop, this).detach();
+}
+
+void SimulatedTranscoder::setCallback(const std::shared_ptr<TranscoderCallbackInterface>& cb) {
+    mCallback = cb;
+}
+
+void SimulatedTranscoder::start(
+        ClientIdType clientId, JobIdType jobId, const TranscodingRequestParcel& request,
+        const std::shared_ptr<ITranscodingClientCallback>& /*clientCallback*/) {
+    if (request.testConfig.has_value() && request.testConfig->processingTotalTimeMs > 0) {
+        mJobProcessingTimeMs = request.testConfig->processingTotalTimeMs;
+    }
+    ALOGV("%s: job {%d}: processingTime: %lld", __FUNCTION__, jobId,
+          (long long)mJobProcessingTimeMs);
+    queueEvent(Event::Start, clientId, jobId, [=] {
+        auto callback = mCallback.lock();
+        if (callback != nullptr) {
+            callback->onStarted(clientId, jobId);
+        }
+    });
+}
+
+void SimulatedTranscoder::pause(ClientIdType clientId, JobIdType jobId) {
+    queueEvent(Event::Pause, clientId, jobId, [=] {
+        auto callback = mCallback.lock();
+        if (callback != nullptr) {
+            callback->onPaused(clientId, jobId);
+        }
+    });
+}
+
+void SimulatedTranscoder::resume(
+        ClientIdType clientId, JobIdType jobId, const TranscodingRequestParcel& /*request*/,
+        const std::shared_ptr<ITranscodingClientCallback>& /*clientCallback*/) {
+    queueEvent(Event::Resume, clientId, jobId, [=] {
+        auto callback = mCallback.lock();
+        if (callback != nullptr) {
+            callback->onResumed(clientId, jobId);
+        }
+    });
+}
+
+void SimulatedTranscoder::stop(ClientIdType clientId, JobIdType jobId) {
+    queueEvent(Event::Stop, clientId, jobId, nullptr);
+}
+
+void SimulatedTranscoder::queueEvent(Event::Type type, ClientIdType clientId, JobIdType jobId,
+                                     std::function<void()> runnable) {
+    ALOGV("%s: job {%lld, %d}: %s", __FUNCTION__, (long long)clientId, jobId, toString(type));
+
+    auto lock = std::scoped_lock(mLock);
+
+    mQueue.push_back({type, clientId, jobId, runnable});
+    mCondition.notify_one();
+}
+
+void SimulatedTranscoder::threadLoop() {
+    bool running = false;
+    std::chrono::microseconds remainingUs(kJobDurationUs);
+    std::chrono::system_clock::time_point lastRunningTime;
+    Event lastRunningEvent;
+
+    std::unique_lock<std::mutex> lock(mLock);
+    // SimulatedTranscoder currently lives in the transcoding service, as long as
+    // MediaTranscodingService itself.
+    while (true) {
+        // Wait for the next event.
+        while (mQueue.empty()) {
+            if (!running) {
+                mCondition.wait(lock);
+                continue;
+            }
+            // If running, wait for the remaining life of this job. Report finish if timed out.
+            std::cv_status status = mCondition.wait_for(lock, remainingUs);
+            if (status == std::cv_status::timeout) {
+                running = false;
+
+                auto callback = mCallback.lock();
+                if (callback != nullptr) {
+                    lock.unlock();
+                    callback->onFinish(lastRunningEvent.clientId, lastRunningEvent.jobId);
+                    lock.lock();
+                }
+            } else {
+                // Advance last running time and remaining time. This is needed to guard
+                // against bad events (which will be ignored) or spurious wakeups, in that
+                // case we don't want to wait for the same time again.
+                auto now = std::chrono::system_clock::now();
+                remainingUs -= (now - lastRunningTime);
+                lastRunningTime = now;
+            }
+        }
+
+        // Handle the events, adjust state and send updates to client accordingly.
+        while (!mQueue.empty()) {
+            Event event = *mQueue.begin();
+            mQueue.pop_front();
+
+            ALOGV("%s: job {%lld, %d}: %s", __FUNCTION__, (long long)event.clientId, event.jobId,
+                  toString(event.type));
+
+            if (!running && (event.type == Event::Start || event.type == Event::Resume)) {
+                running = true;
+                lastRunningTime = std::chrono::system_clock::now();
+                lastRunningEvent = event;
+                if (event.type == Event::Start) {
+                    remainingUs = std::chrono::milliseconds(mJobProcessingTimeMs);
+                }
+            } else if (running && (event.type == Event::Pause || event.type == Event::Stop)) {
+                running = false;
+                remainingUs -= (std::chrono::system_clock::now() - lastRunningTime);
+            } else {
+                ALOGW("%s: discarding bad event: job {%lld, %d}: %s", __FUNCTION__,
+                      (long long)event.clientId, event.jobId, toString(event.type));
+                continue;
+            }
+
+            if (event.runnable != nullptr) {
+                lock.unlock();
+                event.runnable();
+                lock.lock();
+            }
+        }
+    }
+}
+
+}  // namespace android
diff --git a/services/mediatranscoding/SimulatedTranscoder.h b/services/mediatranscoding/SimulatedTranscoder.h
new file mode 100644
index 0000000..1c359dd
--- /dev/null
+++ b/services/mediatranscoding/SimulatedTranscoder.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_SIMULATED_TRANSCODER_H
+#define ANDROID_MEDIA_SIMULATED_TRANSCODER_H
+
+#include <android-base/thread_annotations.h>
+#include <media/TranscoderInterface.h>
+
+#include <list>
+#include <mutex>
+
+namespace android {
+
+/**
+ * SimulatedTranscoder is currently used to instantiate MediaTranscodingService
+ * on service side for testing, so that we could actually test the IPC calls of
+ * MediaTranscodingService to expose issues that's observable only over IPC.
+ * SimulatedTranscoder is used when useSimulatedTranscoder in TranscodingTestConfig
+ * is set to true.
+ *
+ * SimulatedTranscoder simulates job execution by reporting finish after kJobDurationUs.
+ * Job lifecycle events are reported via progress updates with special progress
+ * numbers (equal to the Event's type).
+ */
+class SimulatedTranscoder : public TranscoderInterface {
+public:
+    struct Event {
+        enum Type { NoEvent, Start, Pause, Resume, Stop, Finished, Failed } type;
+        ClientIdType clientId;
+        JobIdType jobId;
+        std::function<void()> runnable;
+    };
+
+    static constexpr int64_t kJobDurationUs = 1000000;
+
+    SimulatedTranscoder();
+
+    // TranscoderInterface
+    void setCallback(const std::shared_ptr<TranscoderCallbackInterface>& cb) override;
+    void start(ClientIdType clientId, JobIdType jobId, const TranscodingRequestParcel& request,
+               const std::shared_ptr<ITranscodingClientCallback>& clientCallback) override;
+    void pause(ClientIdType clientId, JobIdType jobId) override;
+    void resume(ClientIdType clientId, JobIdType jobId, const TranscodingRequestParcel& request,
+                const std::shared_ptr<ITranscodingClientCallback>& clientCallback) override;
+    void stop(ClientIdType clientId, JobIdType jobId) override;
+    // ~TranscoderInterface
+
+private:
+    std::weak_ptr<TranscoderCallbackInterface> mCallback;
+    std::mutex mLock;
+    std::condition_variable mCondition;
+    std::list<Event> mQueue GUARDED_BY(mLock);
+
+    // Minimum time spent on transcode the video. This is used just for testing.
+    int64_t mJobProcessingTimeMs = kJobDurationUs / 1000;
+
+    static const char* toString(Event::Type type);
+    void queueEvent(Event::Type type, ClientIdType clientId, JobIdType jobId,
+                    std::function<void()> runnable);
+    void threadLoop();
+};
+
+}  // namespace android
+
+#endif  // ANDROID_MEDIA_SIMULATED_TRANSCODER_H
diff --git a/services/mediatranscoding/tests/Android.bp b/services/mediatranscoding/tests/Android.bp
index e0e040c..364a198 100644
--- a/services/mediatranscoding/tests/Android.bp
+++ b/services/mediatranscoding/tests/Android.bp
@@ -19,6 +19,7 @@
         "liblog",
         "libutils",
         "libmediatranscodingservice",
+        "libcutils",
     ],
 
     static_libs: [
@@ -26,10 +27,24 @@
     ],
 }
 
-// MediaTranscodingService unit test
+// MediaTranscodingService unit test using simulated transcoder
 cc_test {
-    name: "mediatranscodingservice_tests",
+    name: "mediatranscodingservice_simulated_tests",
     defaults: ["mediatranscodingservice_test_defaults"],
 
-    srcs: ["mediatranscodingservice_tests.cpp"],
-}
\ No newline at end of file
+    srcs: ["mediatranscodingservice_simulated_tests.cpp"],
+
+    required: [
+        "TranscodingUidPolicy_TestAppA",
+        "TranscodingUidPolicy_TestAppB",
+        "TranscodingUidPolicy_TestAppC",
+    ],
+}
+
+// MediaTranscodingService unit test using real transcoder
+cc_test {
+    name: "mediatranscodingservice_real_tests",
+    defaults: ["mediatranscodingservice_test_defaults"],
+
+    srcs: ["mediatranscodingservice_real_tests.cpp"],
+}
diff --git a/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h b/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
new file mode 100644
index 0000000..53fd7ec
--- /dev/null
+++ b/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
@@ -0,0 +1,445 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaTranscodingService.
+
+#include <aidl/android/media/BnTranscodingClientCallback.h>
+#include <aidl/android/media/IMediaTranscodingService.h>
+#include <aidl/android/media/ITranscodingClient.h>
+#include <aidl/android/media/ITranscodingClientCallback.h>
+#include <aidl/android/media/TranscodingJobParcel.h>
+#include <aidl/android/media/TranscodingJobPriority.h>
+#include <aidl/android/media/TranscodingRequestParcel.h>
+#include <android-base/logging.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <binder/PermissionController.h>
+#include <cutils/multiuser.h>
+#include <fcntl.h>
+#include <gtest/gtest.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <utils/Log.h>
+
+#include <iostream>
+#include <list>
+
+#include "SimulatedTranscoder.h"
+
+namespace android {
+
+namespace media {
+
+using Status = ::ndk::ScopedAStatus;
+using aidl::android::media::BnTranscodingClientCallback;
+using aidl::android::media::IMediaTranscodingService;
+using aidl::android::media::ITranscodingClient;
+using aidl::android::media::ITranscodingClientCallback;
+using aidl::android::media::TranscodingJobParcel;
+using aidl::android::media::TranscodingJobPriority;
+using aidl::android::media::TranscodingRequestParcel;
+using aidl::android::media::TranscodingVideoTrackFormat;
+
+constexpr int32_t kClientUseCallingPid = IMediaTranscodingService::USE_CALLING_PID;
+
+constexpr uid_t kClientUid = 5000;
+#define UID(n) (kClientUid + (n))
+
+constexpr int32_t kClientId = 0;
+#define CLIENT(n) (kClientId + (n))
+
+constexpr const char* kClientName = "TestClient";
+constexpr const char* kClientPackageA = "com.android.tests.transcoding.testapp.A";
+constexpr const char* kClientPackageB = "com.android.tests.transcoding.testapp.B";
+constexpr const char* kClientPackageC = "com.android.tests.transcoding.testapp.C";
+
+constexpr const char* kTestActivityName = "/com.android.tests.transcoding.MainActivity";
+
+static status_t getUidForPackage(String16 packageName, userid_t userId, /*inout*/ uid_t& uid) {
+    PermissionController pc;
+    uid = pc.getPackageUid(packageName, 0);
+    if (uid <= 0) {
+        ALOGE("Unknown package: '%s'", String8(packageName).string());
+        return BAD_VALUE;
+    }
+
+    if (userId < 0) {
+        ALOGE("Invalid user: %d", userId);
+        return BAD_VALUE;
+    }
+
+    uid = multiuser_get_uid(userId, uid);
+    return NO_ERROR;
+}
+
+struct ShellHelper {
+    static bool RunCmd(const std::string& cmdStr) {
+        int ret = system(cmdStr.c_str());
+        if (ret != 0) {
+            ALOGE("Failed to run cmd: %s, exitcode %d", cmdStr.c_str(), ret);
+            return false;
+        }
+        return true;
+    }
+
+    static bool Start(const char* packageName, const char* activityName) {
+        return RunCmd("am start -W " + std::string(packageName) + std::string(activityName) +
+                      " &> /dev/null");
+    }
+
+    static bool Stop(const char* packageName) {
+        return RunCmd("am force-stop " + std::string(packageName));
+    }
+};
+
+struct EventTracker {
+    struct Event {
+        enum { NoEvent, Start, Pause, Resume, Finished, Failed } type;
+        int64_t clientId;
+        int32_t jobId;
+    };
+
+#define DECLARE_EVENT(action)                              \
+    static Event action(int32_t clientId, int32_t jobId) { \
+        return {Event::action, clientId, jobId};           \
+    }
+
+    DECLARE_EVENT(Start);
+    DECLARE_EVENT(Pause);
+    DECLARE_EVENT(Resume);
+    DECLARE_EVENT(Finished);
+    DECLARE_EVENT(Failed);
+
+    static constexpr Event NoEvent = {Event::NoEvent, 0, 0};
+
+    static std::string toString(const Event& event) {
+        std::string eventStr;
+        switch (event.type) {
+        case Event::Start:
+            eventStr = "Start";
+            break;
+        case Event::Pause:
+            eventStr = "Pause";
+            break;
+        case Event::Resume:
+            eventStr = "Resume";
+            break;
+        case Event::Finished:
+            eventStr = "Finished";
+            break;
+        case Event::Failed:
+            eventStr = "Failed";
+            break;
+        default:
+            return "NoEvent";
+        }
+        return "job {" + std::to_string(event.clientId) + ", " + std::to_string(event.jobId) +
+               "}: " + eventStr;
+    }
+
+    // Pop 1 event from front, wait for up to timeoutUs if empty.
+    const Event& pop(int64_t timeoutUs = 0) {
+        std::unique_lock lock(mLock);
+
+        if (mEventQueue.empty() && timeoutUs > 0) {
+            mCondition.wait_for(lock, std::chrono::microseconds(timeoutUs));
+        }
+
+        if (mEventQueue.empty()) {
+            mPoppedEvent = NoEvent;
+        } else {
+            mPoppedEvent = *mEventQueue.begin();
+            mEventQueue.pop_front();
+        }
+
+        return mPoppedEvent;
+    }
+
+    // Push 1 event to back.
+    void append(const Event& event,
+                const TranscodingErrorCode err = TranscodingErrorCode::kNoError) {
+        ALOGD("%s", toString(event).c_str());
+
+        std::unique_lock lock(mLock);
+
+        mEventQueue.push_back(event);
+        mLastErr = err;
+        mCondition.notify_one();
+    }
+
+    void updateProgress(int progress) {
+        std::unique_lock lock(mLock);
+        mLastProgress = progress;
+        mUpdateCount++;
+    }
+
+    int getUpdateCount(int *lastProgress) {
+        std::unique_lock lock(mLock);
+        *lastProgress = mLastProgress;
+        return mUpdateCount;
+    }
+
+    TranscodingErrorCode getLastError() {
+        std::unique_lock lock(mLock);
+        return mLastErr;
+    }
+
+private:
+    std::mutex mLock;
+    std::condition_variable mCondition;
+    Event mPoppedEvent;
+    std::list<Event> mEventQueue;
+    TranscodingErrorCode mLastErr;
+    int mUpdateCount = 0;
+    int mLastProgress = -1;
+};
+
+// Operators for GTest macros.
+bool operator==(const EventTracker::Event& lhs, const EventTracker::Event& rhs) {
+    return lhs.type == rhs.type && lhs.clientId == rhs.clientId && lhs.jobId == rhs.jobId;
+}
+
+std::ostream& operator<<(std::ostream& str, const EventTracker::Event& v) {
+    str << EventTracker::toString(v);
+    return str;
+}
+
+struct TestClientCallback : public BnTranscodingClientCallback, public EventTracker {
+    TestClientCallback(int32_t id) : mClientId(id) {
+        ALOGI("TestClientCallback %d Created", mClientId);
+    }
+
+    virtual ~TestClientCallback() { ALOGI("TestClientCallback %d destroyed", mClientId); }
+
+    Status openFileDescriptor(const std::string& in_fileUri, const std::string& in_mode,
+                              ::ndk::ScopedFileDescriptor* _aidl_return) override {
+        ALOGD("@@@ openFileDescriptor: %s", in_fileUri.c_str());
+        int fd;
+        if (in_mode == "w" || in_mode == "rw") {
+            int kOpenFlags;
+            if (in_mode == "w") {
+                // Write-only, create file if non-existent, truncate existing file.
+                kOpenFlags = O_WRONLY | O_CREAT | O_TRUNC;
+            } else {
+                // Read-Write, create if non-existent, no truncate (service will truncate if needed)
+                kOpenFlags = O_RDWR | O_CREAT;
+            }
+            // User R+W permission.
+            constexpr int kFileMode = S_IRUSR | S_IWUSR;
+            fd = open(in_fileUri.c_str(), kOpenFlags, kFileMode);
+        } else {
+            fd = open(in_fileUri.c_str(), O_RDONLY);
+        }
+        _aidl_return->set(fd);
+        return Status::ok();
+    }
+
+    Status onTranscodingStarted(int32_t in_jobId) override {
+        append(EventTracker::Start(mClientId, in_jobId));
+        return Status::ok();
+    }
+
+    Status onTranscodingPaused(int32_t in_jobId) override {
+        append(EventTracker::Pause(mClientId, in_jobId));
+        return Status::ok();
+    }
+
+    Status onTranscodingResumed(int32_t in_jobId) override {
+        append(EventTracker::Resume(mClientId, in_jobId));
+        return Status::ok();
+    }
+
+    Status onTranscodingFinished(
+            int32_t in_jobId,
+            const ::aidl::android::media::TranscodingResultParcel& /* in_result */) override {
+        append(Finished(mClientId, in_jobId));
+        return Status::ok();
+    }
+
+    Status onTranscodingFailed(int32_t in_jobId,
+                               ::aidl::android::media::TranscodingErrorCode in_errorCode) override {
+        append(Failed(mClientId, in_jobId), in_errorCode);
+        return Status::ok();
+    }
+
+    Status onAwaitNumberOfJobsChanged(int32_t /* in_jobId */, int32_t /* in_oldAwaitNumber */,
+                                      int32_t /* in_newAwaitNumber */) override {
+        return Status::ok();
+    }
+
+    Status onProgressUpdate(int32_t /* in_jobId */, int32_t in_progress) override {
+        updateProgress(in_progress);
+        return Status::ok();
+    }
+
+    int32_t mClientId;
+};
+
+class MediaTranscodingServiceTestBase : public ::testing::Test {
+public:
+    MediaTranscodingServiceTestBase() { ALOGI("MediaTranscodingServiceTestBase created"); }
+
+    virtual ~MediaTranscodingServiceTestBase() {
+        ALOGI("MediaTranscodingServiceTestBase destroyed");
+    }
+
+    void SetUp() override {
+        // Need thread pool to receive callbacks, otherwise oneway callbacks are
+        // silently ignored.
+        ABinderProcess_startThreadPool();
+        ::ndk::SpAIBinder binder(AServiceManager_getService("media.transcoding"));
+        mService = IMediaTranscodingService::fromBinder(binder);
+        if (mService == nullptr) {
+            ALOGE("Failed to connect to the media.trascoding service.");
+            return;
+        }
+        mClientCallback1 = ::ndk::SharedRefBase::make<TestClientCallback>(CLIENT(1));
+        mClientCallback2 = ::ndk::SharedRefBase::make<TestClientCallback>(CLIENT(2));
+        mClientCallback3 = ::ndk::SharedRefBase::make<TestClientCallback>(CLIENT(3));
+    }
+
+    std::shared_ptr<ITranscodingClient> registerOneClient(
+            const char* packageName, const std::shared_ptr<TestClientCallback>& callback,
+            uid_t defaultUid) {
+        uid_t uid;
+        if (getUidForPackage(String16(packageName), 0 /*userId*/, uid) != NO_ERROR) {
+            uid = defaultUid;
+        }
+
+        ALOGD("registering %s with uid %d", packageName, uid);
+
+        std::shared_ptr<ITranscodingClient> client;
+        Status status = mService->registerClient(callback, kClientName, packageName, uid,
+                                                 kClientUseCallingPid, &client);
+        return status.isOk() ? client : nullptr;
+    }
+
+    void registerMultipleClients() {
+        // Register 3 clients.
+        mClient1 = registerOneClient(kClientPackageA, mClientCallback1, UID(1));
+        EXPECT_TRUE(mClient1 != nullptr);
+
+        mClient2 = registerOneClient(kClientPackageB, mClientCallback2, UID(2));
+        EXPECT_TRUE(mClient2 != nullptr);
+
+        mClient3 = registerOneClient(kClientPackageC, mClientCallback3, UID(3));
+        EXPECT_TRUE(mClient3 != nullptr);
+
+        // Check the number of clients.
+        int32_t numOfClients;
+        Status status = mService->getNumOfClients(&numOfClients);
+        EXPECT_TRUE(status.isOk());
+        EXPECT_EQ(3, numOfClients);
+    }
+
+    void unregisterMultipleClients() {
+        Status status;
+
+        // Unregister the clients.
+        status = mClient1->unregister();
+        EXPECT_TRUE(status.isOk());
+
+        status = mClient2->unregister();
+        EXPECT_TRUE(status.isOk());
+
+        status = mClient3->unregister();
+        EXPECT_TRUE(status.isOk());
+
+        // Check the number of clients.
+        int32_t numOfClients;
+        status = mService->getNumOfClients(&numOfClients);
+        EXPECT_TRUE(status.isOk());
+        EXPECT_EQ(0, numOfClients);
+    }
+
+    static constexpr bool success = true;
+    static constexpr bool fail = false;
+
+    template <bool expectation = success>
+    bool submit(const std::shared_ptr<ITranscodingClient>& client, int32_t jobId,
+                const char* sourceFilePath, const char* destinationFilePath,
+                TranscodingJobPriority priority = TranscodingJobPriority::kNormal,
+                int bitrateBps = -1) {
+        constexpr bool shouldSucceed = (expectation == success);
+        bool result;
+        TranscodingRequestParcel request;
+        TranscodingJobParcel job;
+
+        request.sourceFilePath = sourceFilePath;
+        request.destinationFilePath = destinationFilePath;
+        request.priority = priority;
+        if (bitrateBps > 0) {
+            request.requestedVideoTrackFormat.emplace(TranscodingVideoTrackFormat());
+            request.requestedVideoTrackFormat->bitrateBps = bitrateBps;
+        }
+        Status status = client->submitRequest(request, &job, &result);
+
+        EXPECT_TRUE(status.isOk());
+        EXPECT_EQ(result, shouldSucceed);
+        if (shouldSucceed) {
+            EXPECT_EQ(job.jobId, jobId);
+        }
+
+        return status.isOk() && (result == shouldSucceed) && (!shouldSucceed || job.jobId == jobId);
+    }
+
+    template <bool expectation = success>
+    bool cancel(const std::shared_ptr<ITranscodingClient>& client, int32_t jobId) {
+        constexpr bool shouldSucceed = (expectation == success);
+        bool result;
+        Status status = client->cancelJob(jobId, &result);
+
+        EXPECT_TRUE(status.isOk());
+        EXPECT_EQ(result, shouldSucceed);
+
+        return status.isOk() && (result == shouldSucceed);
+    }
+
+    template <bool expectation = success>
+    bool getJob(const std::shared_ptr<ITranscodingClient>& client, int32_t jobId,
+                const char* sourceFilePath, const char* destinationFilePath) {
+        constexpr bool shouldSucceed = (expectation == success);
+        bool result;
+        TranscodingJobParcel job;
+        Status status = client->getJobWithId(jobId, &job, &result);
+
+        EXPECT_TRUE(status.isOk());
+        EXPECT_EQ(result, shouldSucceed);
+        if (shouldSucceed) {
+            EXPECT_EQ(job.jobId, jobId);
+            EXPECT_EQ(job.request.sourceFilePath, sourceFilePath);
+        }
+
+        return status.isOk() && (result == shouldSucceed) &&
+               (!shouldSucceed ||
+                (job.jobId == jobId && job.request.sourceFilePath == sourceFilePath &&
+                 job.request.destinationFilePath == destinationFilePath));
+    }
+
+    void deleteFile(const char* path) { unlink(path); }
+
+    std::shared_ptr<IMediaTranscodingService> mService;
+    std::shared_ptr<TestClientCallback> mClientCallback1;
+    std::shared_ptr<TestClientCallback> mClientCallback2;
+    std::shared_ptr<TestClientCallback> mClientCallback3;
+    std::shared_ptr<ITranscodingClient> mClient1;
+    std::shared_ptr<ITranscodingClient> mClient2;
+    std::shared_ptr<ITranscodingClient> mClient3;
+    const char* mTestName;
+};
+
+}  // namespace media
+}  // namespace android
diff --git a/services/mediatranscoding/tests/README.txt b/services/mediatranscoding/tests/README.txt
new file mode 100644
index 0000000..cde465e
--- /dev/null
+++ b/services/mediatranscoding/tests/README.txt
@@ -0,0 +1,8 @@
+mediatranscodingservice_simulated_tests:
+	Tests media transcoding service with simulated transcoder.
+
+mediatranscodingservice_real_tests:
+	Tests media transcoding service with real transcoder. Uses the same test assets
+	as the MediaTranscoder unit tests. Before running the test, please make sure
+	to push the test assets to /sdcard:
+	adb push $TOP/frameworks/av/media/libmediatranscoding/tests/assets /data/local/tmp/TranscodingTestAssets
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/Android.bp b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/Android.bp
new file mode 100644
index 0000000..95a94fc
--- /dev/null
+++ b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/Android.bp
@@ -0,0 +1,23 @@
+android_test_helper_app {
+    name: "TranscodingUidPolicy_TestAppA",
+    manifest: "TestAppA.xml",
+    static_libs: ["androidx.test.rules"],
+    sdk_version: "test_current",
+    srcs: ["src/**/*.java"],
+}
+
+android_test_helper_app {
+    name: "TranscodingUidPolicy_TestAppB",
+    manifest: "TestAppB.xml",
+    static_libs: ["androidx.test.rules"],
+    sdk_version: "test_current",
+    srcs: ["src/**/*.java"],
+}
+
+android_test_helper_app {
+    name: "TranscodingUidPolicy_TestAppC",
+    manifest: "TestAppC.xml",
+    static_libs: ["androidx.test.rules"],
+    sdk_version: "test_current",
+    srcs: ["src/**/*.java"],
+}
\ No newline at end of file
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppA.xml b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppA.xml
new file mode 100644
index 0000000..91c14a5
--- /dev/null
+++ b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppA.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="com.android.tests.transcoding.testapp.A"
+    android:versionCode="1"
+    android:versionName="1.0" >
+
+    <application android:label="TestAppA">
+        <activity android:name="com.android.tests.transcoding.MainActivity"
+            android:exported="true">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.intent.category.DEFAULT"/>
+                <category android:name="android.intent.category.LAUNCHER" />
+            </intent-filter>
+        </activity>
+    </application>
+</manifest>
+
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppB.xml b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppB.xml
new file mode 100644
index 0000000..4baa35a
--- /dev/null
+++ b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppB.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="com.android.tests.transcoding.testapp.B"
+    android:versionCode="1"
+    android:versionName="1.0" >
+
+    <application android:label="TestAppB">
+        <activity android:name="com.android.tests.transcoding.MainActivity"
+            android:exported="true">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.intent.category.DEFAULT"/>
+                <category android:name="android.intent.category.LAUNCHER" />
+            </intent-filter>
+        </activity>
+    </application>
+</manifest>
+
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppC.xml b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppC.xml
new file mode 100644
index 0000000..3dde3af
--- /dev/null
+++ b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppC.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="com.android.tests.transcoding.testapp.C"
+    android:versionCode="1"
+    android:versionName="1.0" >
+
+    <application android:label="TestAppC">
+        <activity android:name="com.android.tests.transcoding.MainActivity"
+            android:exported="true">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.intent.category.DEFAULT"/>
+                <category android:name="android.intent.category.LAUNCHER" />
+            </intent-filter>
+        </activity>
+    </application>
+</manifest>
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/MainActivity.java b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/MainActivity.java
new file mode 100644
index 0000000..7295073
--- /dev/null
+++ b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/MainActivity.java
@@ -0,0 +1,108 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.tests.transcoding;
+
+import android.app.Activity;
+import android.content.Intent;
+import android.os.Bundle;
+import android.util.Log;
+
+/**
+ * This is an empty activity for testing the UID policy of media transcoding service.
+ */
+public class MainActivity extends Activity {
+    private static final String TAG = "MainActivity";
+
+    // Called at the start of the full lifetime.
+    @Override
+    public void onCreate(Bundle savedInstanceState) {
+        super.onCreate(savedInstanceState);
+        // Initialize Activity and inflate the UI.
+    }
+
+    // Called after onCreate has finished, use to restore UI state
+    @Override
+    public void onRestoreInstanceState(Bundle savedInstanceState) {
+        super.onRestoreInstanceState(savedInstanceState);
+        // Restore UI state from the savedInstanceState.
+        // This bundle has also been passed to onCreate.
+        // Will only be called if the Activity has been
+        // killed by the system since it was last visible.
+    }
+
+    // Called before subsequent visible lifetimes
+    // for an activity process.
+    @Override
+    public void onRestart(){
+        super.onRestart();
+        // Load changes knowing that the Activity has already
+        // been visible within this process.
+    }
+
+    // Called at the start of the visible lifetime.
+    @Override
+    public void onStart(){
+        super.onStart();
+        // Apply any required UI change now that the Activity is visible.
+    }
+
+    // Called at the start of the active lifetime.
+    @Override
+    public void onResume(){
+        super.onResume();
+        // Resume any paused UI updates, threads, or processes required
+        // by the Activity but suspended when it was inactive.
+    }
+
+    // Called to save UI state changes at the
+    // end of the active lifecycle.
+    @Override
+    public void onSaveInstanceState(Bundle savedInstanceState) {
+        // Save UI state changes to the savedInstanceState.
+        // This bundle will be passed to onCreate and
+        // onRestoreInstanceState if the process is
+        // killed and restarted by the run time.
+        super.onSaveInstanceState(savedInstanceState);
+    }
+
+    // Called at the end of the active lifetime.
+    @Override
+    public void onPause(){
+        // Suspend UI updates, threads, or CPU intensive processes
+        // that don't need to be updated when the Activity isn't
+        // the active foreground Activity.
+        super.onPause();
+    }
+
+    // Called at the end of the visible lifetime.
+    @Override
+    public void onStop(){
+        // Suspend remaining UI updates, threads, or processing
+        // that aren't required when the Activity isn't visible.
+        // Persist all edits or state changes
+        // as after this call the process is likely to be killed.
+        super.onStop();
+    }
+
+    // Sometimes called at the end of the full lifetime.
+    @Override
+    public void onDestroy(){
+        // Clean up any resources including ending threads,
+        // closing database connections etc.
+        super.onDestroy();
+    }
+
+}
diff --git a/services/mediatranscoding/tests/build_and_run_all_unit_tests.sh b/services/mediatranscoding/tests/build_and_run_all_unit_tests.sh
old mode 100644
new mode 100755
index bcdc7f7..d66b340
--- a/services/mediatranscoding/tests/build_and_run_all_unit_tests.sh
+++ b/services/mediatranscoding/tests/build_and_run_all_unit_tests.sh
@@ -13,11 +13,30 @@
 
 mm
 
-echo "waiting for device"
+# Push the files onto the device.
+. $ANDROID_BUILD_TOP/frameworks/av/media/libmediatranscoding/tests/assets/push_assets.sh
 
-adb root && adb wait-for-device remount && adb sync
+echo "[==========] installing test apps"
+adb root
+adb install -t -r -g -d $ANDROID_TARGET_OUT_TESTCASES/TranscodingUidPolicy_TestAppA/arm64/TranscodingUidPolicy_TestAppA.apk
+adb install -t -r -g -d $ANDROID_TARGET_OUT_TESTCASES/TranscodingUidPolicy_TestAppB/arm64/TranscodingUidPolicy_TestAppB.apk
+adb install -t -r -g -d $ANDROID_TARGET_OUT_TESTCASES/TranscodingUidPolicy_TestAppC/arm64/TranscodingUidPolicy_TestAppC.apk
 
-echo "========================================"
+echo "[==========] waiting for device and sync"
+adb wait-for-device remount && adb sync
 
-echo "testing mediatranscodingservice"
-adb shell /data/nativetest64/mediatranscodingservice_tests/mediatranscodingservice_tests
+echo "[==========] running simulated tests"
+adb shell setprop debug.transcoding.simulated_transcoder true
+adb shell kill -9 `pid media.transcoding`
+#adb shell /data/nativetest64/mediatranscodingservice_simulated_tests/mediatranscodingservice_simulated_tests
+adb shell /data/nativetest/mediatranscodingservice_simulated_tests/mediatranscodingservice_simulated_tests
+
+echo "[==========] running real tests"
+adb shell setprop debug.transcoding.simulated_transcoder false
+adb shell kill -9 `pid media.transcoding`
+#adb shell /data/nativetest64/mediatranscodingservice_real_tests/mediatranscodingservice_real_tests
+adb shell /data/nativetest/mediatranscodingservice_real_tests/mediatranscodingservice_real_tests
+
+echo "[==========] removing debug properties"
+adb shell setprop debug.transcoding.simulated_transcoder \"\"
+adb shell kill -9 `pid media.transcoding`
diff --git a/services/mediatranscoding/tests/mediatranscodingservice_real_tests.cpp b/services/mediatranscoding/tests/mediatranscodingservice_real_tests.cpp
new file mode 100644
index 0000000..1def4b9
--- /dev/null
+++ b/services/mediatranscoding/tests/mediatranscodingservice_real_tests.cpp
@@ -0,0 +1,295 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaTranscodingService.
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaTranscodingServiceRealTest"
+
+#include "MediaTranscodingServiceTestHelper.h"
+
+/*
+ * Tests media transcoding service with real transcoder.
+ *
+ * Uses the same test assets as the MediaTranscoder unit tests. Before running the test,
+ * please make sure to push the test assets to /sdcard:
+ *
+ * adb push $TOP/frameworks/av/media/libmediatranscoding/transcoder/tests/assets /data/local/tmp/TranscodingTestAssets
+ */
+namespace android {
+
+namespace media {
+
+constexpr int64_t kPaddingUs = 400000;
+constexpr int64_t kJobWithPaddingUs = 10000000 + kPaddingUs;
+constexpr int32_t kBitRate = 8 * 1000 * 1000;  // 8Mbs
+
+constexpr const char* kShortSrcPath =
+        "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+constexpr const char* kLongSrcPath = "/data/local/tmp/TranscodingTestAssets/longtest_15s.mp4";
+
+#define OUTPATH(name) "/data/local/tmp/MediaTranscodingService_" #name ".MP4"
+
+class MediaTranscodingServiceRealTest : public MediaTranscodingServiceTestBase {
+public:
+    MediaTranscodingServiceRealTest() {}
+
+    void deleteFile(const char* path) { unlink(path); }
+};
+
+TEST_F(MediaTranscodingServiceRealTest, TestInvalidSource) {
+    registerMultipleClients();
+
+    const char* srcPath = "bad_file_uri";
+    const char* dstPath = OUTPATH(TestInvalidSource);
+    deleteFile(dstPath);
+
+    // Submit one job.
+    EXPECT_TRUE(submit(mClient1, 0, srcPath, dstPath, TranscodingJobPriority::kNormal, kBitRate));
+
+    // Check expected error.
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Failed(CLIENT(1), 0));
+    EXPECT_EQ(mClientCallback1->getLastError(), TranscodingErrorCode::kErrorIO);
+
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceRealTest, TestPassthru) {
+    registerMultipleClients();
+
+    const char* dstPath = OUTPATH(TestPassthru);
+    deleteFile(dstPath);
+
+    // Submit one job.
+    EXPECT_TRUE(submit(mClient1, 0, kShortSrcPath, dstPath));
+
+    // Wait for job to finish.
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceRealTest, TestTranscodeVideo) {
+    registerMultipleClients();
+
+    const char* dstPath = OUTPATH(TestTranscodeVideo);
+    deleteFile(dstPath);
+
+    // Submit one job.
+    EXPECT_TRUE(
+            submit(mClient1, 0, kShortSrcPath, dstPath, TranscodingJobPriority::kNormal, kBitRate));
+
+    // Wait for job to finish.
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceRealTest, TestTranscodeVideoProgress) {
+    registerMultipleClients();
+
+    const char* dstPath = OUTPATH(TestTranscodeVideoProgress);
+    deleteFile(dstPath);
+
+    // Submit one job.
+    EXPECT_TRUE(
+            submit(mClient1, 0, kLongSrcPath, dstPath, TranscodingJobPriority::kNormal, kBitRate));
+
+    // Wait for job to finish.
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+
+    // Check the progress update messages are received. For this clip (around ~15 second long),
+    // expect at least 10 updates, and the last update should be 100.
+    int lastProgress;
+    EXPECT_GE(mClientCallback1->getUpdateCount(&lastProgress), 10);
+    EXPECT_EQ(lastProgress, 100);
+
+    unregisterMultipleClients();
+}
+
+/*
+ * Test cancel immediately after start.
+ */
+TEST_F(MediaTranscodingServiceRealTest, TestCancelImmediately) {
+    registerMultipleClients();
+
+    const char* srcPath0 = kLongSrcPath;
+    const char* srcPath1 = kShortSrcPath;
+    const char* dstPath0 = OUTPATH(TestCancelImmediately_Job0);
+    const char* dstPath1 = OUTPATH(TestCancelImmediately_Job1);
+
+    deleteFile(dstPath0);
+    deleteFile(dstPath1);
+    // Submit one job, should start immediately.
+    EXPECT_TRUE(submit(mClient1, 0, srcPath0, dstPath0, TranscodingJobPriority::kNormal, kBitRate));
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    EXPECT_TRUE(getJob(mClient1, 0, srcPath0, dstPath0));
+
+    // Test cancel job immediately, getJob should fail after cancel.
+    EXPECT_TRUE(cancel(mClient1, 0));
+    EXPECT_TRUE(getJob<fail>(mClient1, 0, "", ""));
+
+    // Submit new job, new job should start immediately and finish.
+    EXPECT_TRUE(submit(mClient1, 1, srcPath1, dstPath1, TranscodingJobPriority::kNormal, kBitRate));
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 1));
+
+    unregisterMultipleClients();
+}
+
+/*
+ * Test cancel in the middle of transcoding.
+ */
+TEST_F(MediaTranscodingServiceRealTest, TestCancelWhileRunning) {
+    registerMultipleClients();
+
+    const char* srcPath0 = kLongSrcPath;
+    const char* srcPath1 = kShortSrcPath;
+    const char* dstPath0 = OUTPATH(TestCancelWhileRunning_Job0);
+    const char* dstPath1 = OUTPATH(TestCancelWhileRunning_Job1);
+
+    deleteFile(dstPath0);
+    deleteFile(dstPath1);
+    // Submit two jobs, job 0 should start immediately, job 1 should be queued.
+    EXPECT_TRUE(submit(mClient1, 0, srcPath0, dstPath0, TranscodingJobPriority::kNormal, kBitRate));
+    EXPECT_TRUE(submit(mClient1, 1, srcPath1, dstPath1, TranscodingJobPriority::kNormal, kBitRate));
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    EXPECT_TRUE(getJob(mClient1, 0, srcPath0, dstPath0));
+    EXPECT_TRUE(getJob(mClient1, 1, srcPath1, dstPath1));
+
+    // Job 0 (longtest) shouldn't finish in 1 seconds.
+    EXPECT_EQ(mClientCallback1->pop(1000000), EventTracker::NoEvent);
+
+    // Now cancel job 0. Job 1 should start immediately and finish.
+    EXPECT_TRUE(cancel(mClient1, 0));
+    EXPECT_TRUE(getJob<fail>(mClient1, 0, "", ""));
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 1));
+
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceRealTest, TestPauseResumeSingleClient) {
+    registerMultipleClients();
+
+    const char* srcPath0 = kLongSrcPath;
+    const char* srcPath1 = kShortSrcPath;
+    const char* dstPath0 = OUTPATH(TestPauseResumeSingleClient_Job0);
+    const char* dstPath1 = OUTPATH(TestPauseResumeSingleClient_Job1);
+    deleteFile(dstPath0);
+    deleteFile(dstPath1);
+
+    // Submit one offline job, should start immediately.
+    EXPECT_TRUE(submit(mClient1, 0, srcPath0, dstPath0, TranscodingJobPriority::kUnspecified,
+                       kBitRate));
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    // Test get job after starts.
+    EXPECT_TRUE(getJob(mClient1, 0, srcPath0, dstPath0));
+
+    // Submit one realtime job.
+    EXPECT_TRUE(submit(mClient1, 1, srcPath1, dstPath1, TranscodingJobPriority::kNormal, kBitRate));
+
+    // Offline job should pause.
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Pause(CLIENT(1), 0));
+    EXPECT_TRUE(getJob(mClient1, 0, srcPath0, dstPath0));
+
+    // Realtime job should start immediately, and run to finish.
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 1));
+
+    // Test get job after finish fails.
+    EXPECT_TRUE(getJob<fail>(mClient1, 1, "", ""));
+
+    // Then offline job should resume.
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Resume(CLIENT(1), 0));
+    // Test get job after resume.
+    EXPECT_TRUE(getJob(mClient1, 0, srcPath0, dstPath0));
+
+    // Offline job should finish.
+    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+    // Test get job after finish fails.
+    EXPECT_TRUE(getJob<fail>(mClient1, 0, "", ""));
+
+    unregisterMultipleClients();
+}
+
+/*
+ * Basic test for pause/resume with two clients, with one job each.
+ * Top app's job should preempt the other app's job.
+ */
+TEST_F(MediaTranscodingServiceRealTest, TestPauseResumeMultiClients) {
+    ALOGD("TestPauseResumeMultiClients starting...");
+
+    EXPECT_TRUE(ShellHelper::RunCmd("input keyevent KEYCODE_WAKEUP"));
+    EXPECT_TRUE(ShellHelper::RunCmd("wm dismiss-keyguard"));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageC));
+
+    registerMultipleClients();
+
+    const char* srcPath0 = kLongSrcPath;
+    const char* srcPath1 = kShortSrcPath;
+    const char* dstPath0 = OUTPATH(TestPauseResumeMultiClients_Client0);
+    const char* dstPath1 = OUTPATH(TestPauseResumeMultiClients_Client1);
+    deleteFile(dstPath0);
+    deleteFile(dstPath1);
+
+    ALOGD("Moving app A to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
+
+    // Submit job to Client1.
+    ALOGD("Submitting job to client1 (app A) ...");
+    EXPECT_TRUE(submit(mClient1, 0, srcPath0, dstPath0, TranscodingJobPriority::kNormal, kBitRate));
+
+    // Client1's job should start immediately.
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+
+    ALOGD("Moving app B to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageB, kTestActivityName));
+
+    // Client1's job should continue to run, since Client2 (app B) doesn't have any job.
+    EXPECT_EQ(mClientCallback1->pop(1000000), EventTracker::NoEvent);
+
+    // Submit job to Client2.
+    ALOGD("Submitting job to client2 (app B) ...");
+    EXPECT_TRUE(submit(mClient2, 0, srcPath1, dstPath1, TranscodingJobPriority::kNormal, kBitRate));
+
+    // Client1's job should pause, client2's job should start.
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Pause(CLIENT(1), 0));
+    EXPECT_EQ(mClientCallback2->pop(kPaddingUs), EventTracker::Start(CLIENT(2), 0));
+
+    // Client2's job should finish, then Client1's job should resume.
+    EXPECT_EQ(mClientCallback2->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(2), 0));
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Resume(CLIENT(1), 0));
+
+    // Client1's job should finish.
+    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+
+    unregisterMultipleClients();
+
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageC));
+
+    ALOGD("TestPauseResumeMultiClients finished.");
+}
+
+}  // namespace media
+}  // namespace android
diff --git a/services/mediatranscoding/tests/mediatranscodingservice_simulated_tests.cpp b/services/mediatranscoding/tests/mediatranscodingservice_simulated_tests.cpp
new file mode 100644
index 0000000..42b5877
--- /dev/null
+++ b/services/mediatranscoding/tests/mediatranscodingservice_simulated_tests.cpp
@@ -0,0 +1,372 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaTranscodingService.
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaTranscodingServiceSimulatedTest"
+
+#include <aidl/android/media/BnTranscodingClientCallback.h>
+#include <aidl/android/media/IMediaTranscodingService.h>
+#include <aidl/android/media/ITranscodingClient.h>
+#include <aidl/android/media/ITranscodingClientCallback.h>
+#include <aidl/android/media/TranscodingJobParcel.h>
+#include <aidl/android/media/TranscodingJobPriority.h>
+#include <aidl/android/media/TranscodingRequestParcel.h>
+#include <android-base/logging.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <binder/PermissionController.h>
+#include <cutils/multiuser.h>
+#include <gtest/gtest.h>
+#include <utils/Log.h>
+
+#include <iostream>
+#include <list>
+
+#include "MediaTranscodingServiceTestHelper.h"
+#include "SimulatedTranscoder.h"
+
+namespace android {
+
+namespace media {
+
+// Note that -1 is valid and means using calling pid/uid for the service. But only privilege caller could
+// use them. This test is not a privilege caller.
+constexpr int32_t kInvalidClientPid = -5;
+constexpr const char* kInvalidClientName = "";
+constexpr const char* kInvalidClientOpPackageName = "";
+
+constexpr int32_t kClientUseCallingUid = IMediaTranscodingService::USE_CALLING_UID;
+
+constexpr int64_t kPaddingUs = 1000000;
+constexpr int64_t kJobWithPaddingUs = SimulatedTranscoder::kJobDurationUs + kPaddingUs;
+
+constexpr const char* kClientOpPackageName = "TestClientPackage";
+
+class MediaTranscodingServiceSimulatedTest : public MediaTranscodingServiceTestBase {
+public:
+    MediaTranscodingServiceSimulatedTest() {}
+};
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterNullClient) {
+    std::shared_ptr<ITranscodingClient> client;
+
+    // Register the client with null callback.
+    Status status = mService->registerClient(nullptr, kClientName, kClientOpPackageName,
+                                             kClientUseCallingUid, kClientUseCallingPid, &client);
+    EXPECT_FALSE(status.isOk());
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterClientWithInvalidClientPid) {
+    std::shared_ptr<ITranscodingClient> client;
+
+    // Register the client with the service.
+    Status status = mService->registerClient(mClientCallback1, kClientName, kClientOpPackageName,
+                                             kClientUseCallingUid, kInvalidClientPid, &client);
+    EXPECT_FALSE(status.isOk());
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterClientWithInvalidClientName) {
+    std::shared_ptr<ITranscodingClient> client;
+
+    // Register the client with the service.
+    Status status = mService->registerClient(mClientCallback1, kInvalidClientName,
+                                             kInvalidClientOpPackageName, kClientUseCallingUid,
+                                             kClientUseCallingPid, &client);
+    EXPECT_FALSE(status.isOk());
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterClientWithInvalidClientPackageName) {
+    std::shared_ptr<ITranscodingClient> client;
+
+    // Register the client with the service.
+    Status status =
+            mService->registerClient(mClientCallback1, kClientName, kInvalidClientOpPackageName,
+                                     kClientUseCallingUid, kClientUseCallingPid, &client);
+    EXPECT_FALSE(status.isOk());
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterOneClient) {
+    std::shared_ptr<ITranscodingClient> client;
+
+    Status status = mService->registerClient(mClientCallback1, kClientName, kClientOpPackageName,
+                                             kClientUseCallingUid, kClientUseCallingPid, &client);
+    EXPECT_TRUE(status.isOk());
+
+    // Validate the client.
+    EXPECT_TRUE(client != nullptr);
+
+    // Check the number of Clients.
+    int32_t numOfClients;
+    status = mService->getNumOfClients(&numOfClients);
+    EXPECT_TRUE(status.isOk());
+    EXPECT_EQ(1, numOfClients);
+
+    // Unregister the client.
+    status = client->unregister();
+    EXPECT_TRUE(status.isOk());
+
+    // Check the number of Clients.
+    status = mService->getNumOfClients(&numOfClients);
+    EXPECT_TRUE(status.isOk());
+    EXPECT_EQ(0, numOfClients);
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterClientTwice) {
+    std::shared_ptr<ITranscodingClient> client;
+
+    Status status = mService->registerClient(mClientCallback1, kClientName, kClientOpPackageName,
+                                             kClientUseCallingUid, kClientUseCallingPid, &client);
+    EXPECT_TRUE(status.isOk());
+
+    // Validate the client.
+    EXPECT_TRUE(client != nullptr);
+
+    // Register the client again and expects failure.
+    std::shared_ptr<ITranscodingClient> client1;
+    status = mService->registerClient(mClientCallback1, kClientName, kClientOpPackageName,
+                                      kClientUseCallingUid, kClientUseCallingPid, &client1);
+    EXPECT_FALSE(status.isOk());
+
+    // Unregister the client.
+    status = client->unregister();
+    EXPECT_TRUE(status.isOk());
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterMultipleClients) {
+    registerMultipleClients();
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestJobIdIndependence) {
+    registerMultipleClients();
+
+    // Submit 2 requests on client1 first.
+    EXPECT_TRUE(submit(mClient1, 0, "test_source_file", "test_destination_file"));
+    EXPECT_TRUE(submit(mClient1, 1, "test_source_file", "test_destination_file"));
+
+    // Submit 2 requests on client2, jobId should be independent for each client.
+    EXPECT_TRUE(submit(mClient2, 0, "test_source_file", "test_destination_file"));
+    EXPECT_TRUE(submit(mClient2, 1, "test_source_file", "test_destination_file"));
+
+    // Cancel all jobs.
+    EXPECT_TRUE(cancel(mClient1, 0));
+    EXPECT_TRUE(cancel(mClient1, 1));
+    EXPECT_TRUE(cancel(mClient2, 0));
+    EXPECT_TRUE(cancel(mClient2, 1));
+
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestSubmitCancelJobs) {
+    registerMultipleClients();
+
+    // Test jobId assignment.
+    EXPECT_TRUE(submit(mClient1, 0, "test_source_file_0", "test_destination_file"));
+    EXPECT_TRUE(submit(mClient1, 1, "test_source_file_1", "test_destination_file"));
+    EXPECT_TRUE(submit(mClient1, 2, "test_source_file_2", "test_destination_file"));
+
+    // Test submit bad request (no valid sourceFilePath) fails.
+    EXPECT_TRUE(submit<fail>(mClient1, 0, "", ""));
+
+    // Test cancel non-existent job fails.
+    EXPECT_TRUE(cancel<fail>(mClient1, 100));
+
+    // Job 0 should start immediately and finish in 2 seconds, followed by Job 1 start.
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+
+    // Test cancel valid jobId in random order.
+    // Test cancel finished job fails.
+    EXPECT_TRUE(cancel(mClient1, 2));
+    EXPECT_TRUE(cancel<fail>(mClient1, 0));
+    EXPECT_TRUE(cancel(mClient1, 1));
+
+    // Test cancel job again fails.
+    EXPECT_TRUE(cancel<fail>(mClient1, 1));
+
+    // Test no more events arriving after cancel.
+    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::NoEvent);
+
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestGetJobs) {
+    registerMultipleClients();
+
+    // Submit 3 requests.
+    EXPECT_TRUE(submit(mClient1, 0, "test_source_file_0", "test_destination_file_0"));
+    EXPECT_TRUE(submit(mClient1, 1, "test_source_file_1", "test_destination_file_1"));
+    EXPECT_TRUE(submit(mClient1, 2, "test_source_file_2", "test_destination_file_2"));
+
+    // Test get jobs by id.
+    EXPECT_TRUE(getJob(mClient1, 2, "test_source_file_2", "test_destination_file_2"));
+    EXPECT_TRUE(getJob(mClient1, 1, "test_source_file_1", "test_destination_file_1"));
+    EXPECT_TRUE(getJob(mClient1, 0, "test_source_file_0", "test_destination_file_0"));
+
+    // Test get job by invalid id fails.
+    EXPECT_TRUE(getJob<fail>(mClient1, 100, "", ""));
+    EXPECT_TRUE(getJob<fail>(mClient1, -1, "", ""));
+
+    // Test get job after cancel fails.
+    EXPECT_TRUE(cancel(mClient1, 2));
+    EXPECT_TRUE(getJob<fail>(mClient1, 2, "", ""));
+
+    // Job 0 should start immediately and finish in 2 seconds, followed by Job 1 start.
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+
+    // Test get job after finish fails.
+    EXPECT_TRUE(getJob<fail>(mClient1, 0, "", ""));
+
+    // Test get the remaining job 1.
+    EXPECT_TRUE(getJob(mClient1, 1, "test_source_file_1", "test_destination_file_1"));
+
+    // Cancel remaining job 1.
+    EXPECT_TRUE(cancel(mClient1, 1));
+
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestSubmitCancelWithOfflineJobs) {
+    registerMultipleClients();
+
+    // Submit some offline jobs first.
+    EXPECT_TRUE(submit(mClient1, 0, "test_source_file_0", "test_destination_file_0",
+                       TranscodingJobPriority::kUnspecified));
+    EXPECT_TRUE(submit(mClient1, 1, "test_source_file_1", "test_destination_file_1",
+                       TranscodingJobPriority::kUnspecified));
+
+    // Job 0 should start immediately.
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+
+    // Submit more real-time jobs.
+    EXPECT_TRUE(submit(mClient1, 2, "test_source_file_2", "test_destination_file_2"));
+    EXPECT_TRUE(submit(mClient1, 3, "test_source_file_3", "test_destination_file_3"));
+
+    // Job 0 should pause immediately and job 2 should start.
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Pause(CLIENT(1), 0));
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 2));
+
+    // Job 2 should finish in 2 seconds and job 3 should start.
+    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 2));
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 3));
+
+    // Cancel job 3 now
+    EXPECT_TRUE(cancel(mClient1, 3));
+
+    // Job 0 should resume and finish in 2 seconds, followed by job 1 start.
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Resume(CLIENT(1), 0));
+    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+
+    // Cancel remaining job 1.
+    EXPECT_TRUE(cancel(mClient1, 1));
+
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestClientUseAfterUnregister) {
+    std::shared_ptr<ITranscodingClient> client;
+
+    // Register a client, then unregister.
+    Status status = mService->registerClient(mClientCallback1, kClientName, kClientOpPackageName,
+                                             kClientUseCallingUid, kClientUseCallingPid, &client);
+    EXPECT_TRUE(status.isOk());
+
+    status = client->unregister();
+    EXPECT_TRUE(status.isOk());
+
+    // Test various operations on the client, should fail with ERROR_DISCONNECTED.
+    TranscodingJobParcel job;
+    bool result;
+    status = client->getJobWithId(0, &job, &result);
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+
+    status = client->cancelJob(0, &result);
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+
+    TranscodingRequestParcel request;
+    status = client->submitRequest(request, &job, &result);
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestTranscodingUidPolicy) {
+    ALOGD("TestTranscodingUidPolicy starting...");
+
+    EXPECT_TRUE(ShellHelper::RunCmd("input keyevent KEYCODE_WAKEUP"));
+    EXPECT_TRUE(ShellHelper::RunCmd("wm dismiss-keyguard"));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageC));
+
+    registerMultipleClients();
+
+    ALOGD("Moving app A to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
+
+    // Submit 3 requests.
+    ALOGD("Submitting job to client1 (app A) ...");
+    EXPECT_TRUE(submit(mClient1, 0, "test_source_file_0", "test_destination_file_0"));
+    EXPECT_TRUE(submit(mClient1, 1, "test_source_file_1", "test_destination_file_1"));
+    EXPECT_TRUE(submit(mClient1, 2, "test_source_file_2", "test_destination_file_2"));
+
+    // Job 0 should start immediately.
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+
+    ALOGD("Moving app B to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageB, kTestActivityName));
+
+    // Job 0 should continue and finish in 2 seconds, then job 1 should start.
+    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+
+    ALOGD("Submitting job to client2 (app B) ...");
+    EXPECT_TRUE(submit(mClient2, 0, "test_source_file_0", "test_destination_file_0"));
+
+    // Client1's job should pause, client2's job should start.
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Pause(CLIENT(1), 1));
+    EXPECT_EQ(mClientCallback2->pop(kPaddingUs), EventTracker::Start(CLIENT(2), 0));
+
+    ALOGD("Moving app A back to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
+
+    // Client2's job should pause, client1's job 1 should resume.
+    EXPECT_EQ(mClientCallback2->pop(kPaddingUs), EventTracker::Pause(CLIENT(2), 0));
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Resume(CLIENT(1), 1));
+
+    // Client2's job 1 should finish in 2 seconds, then its job 2 should start.
+    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 1));
+    EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 2));
+
+    // After client2's jobs finish, client1's job should resume.
+    EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 2));
+    EXPECT_EQ(mClientCallback2->pop(kPaddingUs), EventTracker::Resume(CLIENT(2), 0));
+
+    unregisterMultipleClients();
+
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageC));
+
+    ALOGD("TestTranscodingUidPolicy finished.");
+}
+
+}  // namespace media
+}  // namespace android
diff --git a/services/mediatranscoding/tests/mediatranscodingservice_tests.cpp b/services/mediatranscoding/tests/mediatranscodingservice_tests.cpp
deleted file mode 100644
index 5a791fe..0000000
--- a/services/mediatranscoding/tests/mediatranscodingservice_tests.cpp
+++ /dev/null
@@ -1,239 +0,0 @@
-/*
- * Copyright (C) 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// Unit Test for MediaTranscoding Service.
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "MediaTranscodingServiceTest"
-
-#include <aidl/android/media/BnTranscodingServiceClient.h>
-#include <aidl/android/media/IMediaTranscodingService.h>
-#include <aidl/android/media/ITranscodingServiceClient.h>
-#include <android-base/logging.h>
-#include <android-base/unique_fd.h>
-#include <android/binder_ibinder_jni.h>
-#include <android/binder_manager.h>
-#include <android/binder_process.h>
-#include <cutils/ashmem.h>
-#include <gtest/gtest.h>
-#include <stdlib.h>
-#include <sys/mman.h>
-#include <utils/Log.h>
-
-namespace android {
-
-namespace media {
-
-using Status = ::ndk::ScopedAStatus;
-using aidl::android::media::BnTranscodingServiceClient;
-using aidl::android::media::IMediaTranscodingService;
-using aidl::android::media::ITranscodingServiceClient;
-
-constexpr int32_t kInvalidClientId = -5;
-
-// Note that -1 is valid and means using calling pid/uid for the service. But only privilege caller could
-// use them. This test is not a privilege caller.
-constexpr int32_t kInvalidClientPid = -5;
-constexpr int32_t kInvalidClientUid = -5;
-constexpr const char* kInvalidClientOpPackageName = "";
-
-constexpr int32_t kClientUseCallingPid = -1;
-constexpr int32_t kClientUseCallingUid = -1;
-constexpr const char* kClientOpPackageName = "TestClient";
-
-class MediaTranscodingServiceTest : public ::testing::Test {
-public:
-    MediaTranscodingServiceTest() { ALOGD("MediaTranscodingServiceTest created"); }
-
-    void SetUp() override {
-        ::ndk::SpAIBinder binder(AServiceManager_getService("media.transcoding"));
-        mService = IMediaTranscodingService::fromBinder(binder);
-        if (mService == nullptr) {
-            ALOGE("Failed to connect to the media.trascoding service.");
-            return;
-        }
-    }
-
-    ~MediaTranscodingServiceTest() { ALOGD("MediaTranscodingingServiceTest destroyed"); }
-
-    std::shared_ptr<IMediaTranscodingService> mService = nullptr;
-};
-
-struct TestClient : public BnTranscodingServiceClient {
-    TestClient(const std::shared_ptr<IMediaTranscodingService>& service) : mService(service) {
-        ALOGD("TestClient Created");
-    }
-
-    Status getName(std::string* _aidl_return) override {
-        *_aidl_return = "test_client";
-        return Status::ok();
-    }
-
-    Status onTranscodingFinished(
-            int32_t /* in_jobId */,
-            const ::aidl::android::media::TranscodingResultParcel& /* in_result */) override {
-        return Status::ok();
-    }
-
-    Status onTranscodingFailed(
-            int32_t /* in_jobId */,
-            ::aidl::android::media::TranscodingErrorCode /*in_errorCode */) override {
-        return Status::ok();
-    }
-
-    Status onAwaitNumberOfJobsChanged(int32_t /* in_jobId */, int32_t /* in_oldAwaitNumber */,
-                                      int32_t /* in_newAwaitNumber */) override {
-        return Status::ok();
-    }
-
-    Status onProgressUpdate(int32_t /* in_jobId */, int32_t /* in_progress */) override {
-        return Status::ok();
-    }
-
-    virtual ~TestClient() { ALOGI("TestClient destroyed"); };
-
-private:
-    std::shared_ptr<IMediaTranscodingService> mService;
-};
-
-TEST_F(MediaTranscodingServiceTest, TestRegisterNullClient) {
-    std::shared_ptr<ITranscodingServiceClient> client = nullptr;
-    int32_t clientId = 0;
-    Status status = mService->registerClient(client, kClientOpPackageName, kClientUseCallingUid,
-                                             kClientUseCallingPid, &clientId);
-    EXPECT_FALSE(status.isOk());
-}
-
-TEST_F(MediaTranscodingServiceTest, TestRegisterClientWithInvalidClientPid) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-    EXPECT_TRUE(client != nullptr);
-
-    // Register the client with the service.
-    int32_t clientId = 0;
-    Status status = mService->registerClient(client, kClientOpPackageName, kClientUseCallingUid,
-                                             kInvalidClientPid, &clientId);
-    EXPECT_FALSE(status.isOk());
-}
-
-TEST_F(MediaTranscodingServiceTest, TestRegisterClientWithInvalidClientUid) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-    EXPECT_TRUE(client != nullptr);
-
-    // Register the client with the service.
-    int32_t clientId = 0;
-    Status status = mService->registerClient(client, kClientOpPackageName, kInvalidClientUid,
-                                             kClientUseCallingPid, &clientId);
-    EXPECT_FALSE(status.isOk());
-}
-
-TEST_F(MediaTranscodingServiceTest, TestRegisterClientWithInvalidClientPackageName) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-    EXPECT_TRUE(client != nullptr);
-
-    // Register the client with the service.
-    int32_t clientId = 0;
-    Status status = mService->registerClient(client, kInvalidClientOpPackageName,
-                                             kClientUseCallingUid, kClientUseCallingPid, &clientId);
-    EXPECT_FALSE(status.isOk());
-}
-
-TEST_F(MediaTranscodingServiceTest, TestRegisterOneClient) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-    EXPECT_TRUE(client != nullptr);
-
-    // Register the client with the service.
-    int32_t clientId = 0;
-    Status status = mService->registerClient(client, kClientOpPackageName, kClientUseCallingPid,
-                                             kClientUseCallingUid, &clientId);
-    ALOGD("client id is %d", clientId);
-    EXPECT_TRUE(status.isOk());
-
-    // Validate the clientId.
-    EXPECT_TRUE(clientId > 0);
-
-    // Check the number of Clients.
-    int32_t numOfClients;
-    status = mService->getNumOfClients(&numOfClients);
-    EXPECT_TRUE(status.isOk());
-    EXPECT_EQ(1, numOfClients);
-
-    // Unregister the client.
-    bool res;
-    status = mService->unregisterClient(clientId, &res);
-    EXPECT_TRUE(status.isOk());
-    EXPECT_TRUE(res);
-}
-
-TEST_F(MediaTranscodingServiceTest, TestUnRegisterClientWithInvalidClientId) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-    EXPECT_TRUE(client != nullptr);
-
-    // Register the client with the service.
-    int32_t clientId = 0;
-    Status status = mService->registerClient(client, kClientOpPackageName, kClientUseCallingUid,
-                                             kClientUseCallingPid, &clientId);
-    ALOGD("client id is %d", clientId);
-    EXPECT_TRUE(status.isOk());
-
-    // Validate the clientId.
-    EXPECT_TRUE(clientId > 0);
-
-    // Check the number of Clients.
-    int32_t numOfClients;
-    status = mService->getNumOfClients(&numOfClients);
-    EXPECT_TRUE(status.isOk());
-    EXPECT_EQ(1, numOfClients);
-
-    // Unregister the client with invalid ID
-    bool res;
-    mService->unregisterClient(kInvalidClientId, &res);
-    EXPECT_FALSE(res);
-
-    // Unregister the valid client.
-    mService->unregisterClient(clientId, &res);
-}
-
-TEST_F(MediaTranscodingServiceTest, TestRegisterClientTwice) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-    EXPECT_TRUE(client != nullptr);
-
-    // Register the client with the service.
-    int32_t clientId = 0;
-    Status status = mService->registerClient(client, kClientOpPackageName, kClientUseCallingUid,
-                                             kClientUseCallingPid, &clientId);
-    EXPECT_TRUE(status.isOk());
-
-    // Validate the clientId.
-    EXPECT_TRUE(clientId > 0);
-
-    // Register the client again and expects failure.
-    status = mService->registerClient(client, kClientOpPackageName, kClientUseCallingUid,
-                                      kClientUseCallingPid, &clientId);
-    EXPECT_FALSE(status.isOk());
-
-    // Unregister the valid client.
-    bool res;
-    mService->unregisterClient(clientId, &res);
-}
-
-}  // namespace media
-}  // namespace android
diff --git a/services/oboeservice/AAudioMixer.cpp b/services/oboeservice/AAudioMixer.cpp
index 1c03b7f..c5d40b8 100644
--- a/services/oboeservice/AAudioMixer.cpp
+++ b/services/oboeservice/AAudioMixer.cpp
@@ -33,25 +33,21 @@
 using android::FifoBuffer;
 using android::fifo_frames_t;
 
-AAudioMixer::~AAudioMixer() {
-    delete[] mOutputBuffer;
-}
-
 void AAudioMixer::allocate(int32_t samplesPerFrame, int32_t framesPerBurst) {
     mSamplesPerFrame = samplesPerFrame;
     mFramesPerBurst = framesPerBurst;
     int32_t samplesPerBuffer = samplesPerFrame * framesPerBurst;
-    mOutputBuffer = new float[samplesPerBuffer];
+    mOutputBuffer = std::make_unique<float[]>(samplesPerBuffer);
     mBufferSizeInBytes = samplesPerBuffer * sizeof(float);
 }
 
 void AAudioMixer::clear() {
-    memset(mOutputBuffer, 0, mBufferSizeInBytes);
+    memset(mOutputBuffer.get(), 0, mBufferSizeInBytes);
 }
 
 int32_t AAudioMixer::mix(int streamIndex, FifoBuffer *fifo, bool allowUnderflow) {
     WrappingBuffer wrappingBuffer;
-    float *destination = mOutputBuffer;
+    float *destination = mOutputBuffer.get();
 
 #if AAUDIO_MIXER_ATRACE_ENABLED
     ATRACE_BEGIN("aaMix");
@@ -117,5 +113,5 @@
 }
 
 float *AAudioMixer::getOutputBuffer() {
-    return mOutputBuffer;
+    return mOutputBuffer.get();
 }
diff --git a/services/oboeservice/AAudioMixer.h b/services/oboeservice/AAudioMixer.h
index d5abc5b..dd466ac 100644
--- a/services/oboeservice/AAudioMixer.h
+++ b/services/oboeservice/AAudioMixer.h
@@ -25,7 +25,6 @@
 class AAudioMixer {
 public:
     AAudioMixer() {}
-    ~AAudioMixer();
 
     void allocate(int32_t samplesPerFrame, int32_t framesPerBurst);
 
@@ -47,7 +46,7 @@
 private:
     void mixPart(float *destination, float *source, int32_t numFrames);
 
-    float   *mOutputBuffer = nullptr;
+    std::unique_ptr<float[]> mOutputBuffer;
     int32_t  mSamplesPerFrame = 0;
     int32_t  mFramesPerBurst = 0;
     int32_t  mBufferSizeInBytes = 0;
diff --git a/services/oboeservice/AAudioServiceEndpointCapture.cpp b/services/oboeservice/AAudioServiceEndpointCapture.cpp
index 37d105b..c603e4e 100644
--- a/services/oboeservice/AAudioServiceEndpointCapture.cpp
+++ b/services/oboeservice/AAudioServiceEndpointCapture.cpp
@@ -40,17 +40,12 @@
     mStreamInternal = &mStreamInternalCapture;
 }
 
-AAudioServiceEndpointCapture::~AAudioServiceEndpointCapture() {
-    delete mDistributionBuffer;
-}
-
 aaudio_result_t AAudioServiceEndpointCapture::open(const aaudio::AAudioStreamRequest &request) {
     aaudio_result_t result = AAudioServiceEndpointShared::open(request);
     if (result == AAUDIO_OK) {
-        delete mDistributionBuffer;
         int distributionBufferSizeBytes = getStreamInternal()->getFramesPerBurst()
                                           * getStreamInternal()->getBytesPerFrame();
-        mDistributionBuffer = new uint8_t[distributionBufferSizeBytes];
+        mDistributionBuffer = std::make_unique<uint8_t[]>(distributionBufferSizeBytes);
     }
     return result;
 }
@@ -67,7 +62,8 @@
         int64_t mmapFramesRead = getStreamInternal()->getFramesRead();
 
         // Read audio data from stream using a blocking read.
-        result = getStreamInternal()->read(mDistributionBuffer, getFramesPerBurst(), timeoutNanos);
+        result = getStreamInternal()->read(mDistributionBuffer.get(),
+                getFramesPerBurst(), timeoutNanos);
         if (result == AAUDIO_ERROR_DISCONNECTED) {
             disconnectRegisteredStreams();
             break;
@@ -107,7 +103,7 @@
                                     getFramesPerBurst()) {
                                 streamShared->incrementXRunCount();
                             } else {
-                                fifo->write(mDistributionBuffer, getFramesPerBurst());
+                                fifo->write(mDistributionBuffer.get(), getFramesPerBurst());
                             }
                             clientFramesWritten = fifo->getWriteCounter();
                         }
diff --git a/services/oboeservice/AAudioServiceEndpointCapture.h b/services/oboeservice/AAudioServiceEndpointCapture.h
index 971da9a..ae5a189 100644
--- a/services/oboeservice/AAudioServiceEndpointCapture.h
+++ b/services/oboeservice/AAudioServiceEndpointCapture.h
@@ -17,6 +17,8 @@
 #ifndef AAUDIO_SERVICE_ENDPOINT_CAPTURE_H
 #define AAUDIO_SERVICE_ENDPOINT_CAPTURE_H
 
+#include <memory>
+
 #include "client/AudioStreamInternal.h"
 #include "client/AudioStreamInternalCapture.h"
 
@@ -28,16 +30,15 @@
 class AAudioServiceEndpointCapture : public AAudioServiceEndpointShared {
 public:
     explicit AAudioServiceEndpointCapture(android::AAudioService &audioService);
-    virtual ~AAudioServiceEndpointCapture();
+    virtual ~AAudioServiceEndpointCapture() = default;
 
     aaudio_result_t open(const aaudio::AAudioStreamRequest &request) override;
 
-
     void *callbackLoop() override;
 
 private:
     AudioStreamInternalCapture  mStreamInternalCapture;
-    uint8_t                    *mDistributionBuffer = nullptr;
+    std::unique_ptr<uint8_t[]>  mDistributionBuffer;
 };
 
 } /* namespace aaudio */