Merge "Use new surface flinger API." into jb-mr1-dev
diff --git a/include/media/stagefright/OMXCodec.h b/include/media/stagefright/OMXCodec.h
index bb9e595..e6739ae 100644
--- a/include/media/stagefright/OMXCodec.h
+++ b/include/media/stagefright/OMXCodec.h
@@ -354,6 +354,8 @@
             const void *data, size_t size,
             unsigned *profile, unsigned *level);
 
+    status_t stopOmxComponent_l();
+
     OMXCodec(const OMXCodec &);
     OMXCodec &operator=(const OMXCodec &);
 };
diff --git a/include/media/stagefright/SurfaceMediaSource.h b/include/media/stagefright/SurfaceMediaSource.h
index 4a8e221..724c68d 100644
--- a/include/media/stagefright/SurfaceMediaSource.h
+++ b/include/media/stagefright/SurfaceMediaSource.h
@@ -73,10 +73,9 @@
 
     // For the MediaSource interface for use by StageFrightRecorder:
     virtual status_t start(MetaData *params = NULL);
-
-    virtual status_t stop() { return reset(); }
-    virtual status_t read(
-            MediaBuffer **buffer, const ReadOptions *options = NULL);
+    virtual status_t stop();
+    virtual status_t read(MediaBuffer **buffer,
+            const ReadOptions *options = NULL);
     virtual sp<MetaData> getFormat();
 
     // Get / Set the frame rate used for encoding. Default fps = 30
@@ -204,8 +203,6 @@
     // is a frame available for dequeuing
     Condition mFrameAvailableCondition;
 
-    status_t reset();
-
     // Avoid copying and equating and default constructor
     DISALLOW_IMPLICIT_CONSTRUCTORS(SurfaceMediaSource);
 };
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 58e4723..6346363 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -1042,9 +1042,14 @@
         void* cookie, int msg, int ext1, int ext2, const Parcel *obj)
 {
     Client* client = static_cast<Client*>(cookie);
+    if (client == NULL) {
+        return;
+    }
 
+    sp<IMediaPlayerClient> c;
     {
         Mutex::Autolock l(client->mLock);
+        c = client->mClient;
         if (msg == MEDIA_PLAYBACK_COMPLETE && client->mNextClient != NULL) {
             if (client->mAudioOutput != NULL)
                 client->mAudioOutput->switchToNextOutput();
@@ -1065,8 +1070,11 @@
         // also access mMetadataUpdated and clears it.
         client->addNewMetadataUpdate(metadata_type);
     }
-    ALOGV("[%d] notify (%p, %d, %d, %d)", client->mConnId, cookie, msg, ext1, ext2);
-    client->mClient->notify(msg, ext1, ext2, obj);
+
+    if (c != NULL) {
+        ALOGV("[%d] notify (%p, %d, %d, %d)", client->mConnId, cookie, msg, ext1, ext2);
+        c->notify(msg, ext1, ext2, obj);
+    }
 }
 
 
diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp
index 5615d0f..d0e306c 100755
--- a/media/libstagefright/OMXCodec.cpp
+++ b/media/libstagefright/OMXCodec.cpp
@@ -3621,7 +3621,11 @@
         }
 
         params->setInt32(kKeyNumBuffers, mPortBuffers[kPortIndexInput].size());
-        return mSource->start(params.get());
+        err = mSource->start(params.get());
+        if (err != OK) {
+            stopOmxComponent_l();
+        }
+        return err;
     }
 
     // Decoder case
@@ -3633,8 +3637,16 @@
 
 status_t OMXCodec::stop() {
     CODEC_LOGV("stop mState=%d", mState);
-
     Mutex::Autolock autoLock(mLock);
+    status_t err = stopOmxComponent_l();
+    mSource->stop();
+
+    CODEC_LOGV("stopped in state %d", mState);
+    return err;
+}
+
+status_t OMXCodec::stopOmxComponent_l() {
+    CODEC_LOGV("stopOmxComponent_l mState=%d", mState);
 
     while (isIntermediateState(mState)) {
         mAsyncCompletion.wait(mLock);
@@ -3732,10 +3744,6 @@
         mLeftOverBuffer = NULL;
     }
 
-    mSource->stop();
-
-    CODEC_LOGV("stopped in state %d", mState);
-
     return OK;
 }
 
diff --git a/media/libstagefright/SurfaceMediaSource.cpp b/media/libstagefright/SurfaceMediaSource.cpp
index f1f444e..f5c8c93 100644
--- a/media/libstagefright/SurfaceMediaSource.cpp
+++ b/media/libstagefright/SurfaceMediaSource.cpp
@@ -46,7 +46,7 @@
     mNumFramesEncoded(0),
     mFirstFrameTimestamp(0)
 {
-    ALOGV("SurfaceMediaSource::SurfaceMediaSource");
+    ALOGV("SurfaceMediaSource");
 
     if (bufferWidth == 0 || bufferHeight == 0) {
         ALOGE("Invalid dimensions %dx%d", bufferWidth, bufferHeight);
@@ -77,21 +77,19 @@
 }
 
 SurfaceMediaSource::~SurfaceMediaSource() {
-    ALOGV("SurfaceMediaSource::~SurfaceMediaSource");
-    if (!mStopped) {
-        reset();
-    }
+    ALOGV("~SurfaceMediaSource");
+    CHECK(mStopped == true);
 }
 
 nsecs_t SurfaceMediaSource::getTimestamp() {
-    ALOGV("SurfaceMediaSource::getTimestamp");
+    ALOGV("getTimestamp");
     Mutex::Autolock lock(mMutex);
     return mCurrentTimestamp;
 }
 
 void SurfaceMediaSource::setFrameAvailableListener(
         const sp<FrameAvailableListener>& listener) {
-    ALOGV("SurfaceMediaSource::setFrameAvailableListener");
+    ALOGV("setFrameAvailableListener");
     Mutex::Autolock lock(mMutex);
     mFrameAvailableListener = listener;
 }
@@ -113,6 +111,7 @@
 
 status_t SurfaceMediaSource::setFrameRate(int32_t fps)
 {
+    ALOGV("setFrameRate");
     Mutex::Autolock lock(mMutex);
     const int MAX_FRAME_RATE = 60;
     if (fps < 0 || fps > MAX_FRAME_RATE) {
@@ -128,13 +127,14 @@
 }
 
 int32_t SurfaceMediaSource::getFrameRate( ) const {
+    ALOGV("getFrameRate");
     Mutex::Autolock lock(mMutex);
     return mFrameRate;
 }
 
 status_t SurfaceMediaSource::start(MetaData *params)
 {
-    ALOGV("started!");
+    ALOGV("start");
 
     mStartTimeNs = 0;
     int64_t startTimeUs;
@@ -146,18 +146,15 @@
 }
 
 
-status_t SurfaceMediaSource::reset()
+status_t SurfaceMediaSource::stop()
 {
-    ALOGV("Reset");
-
+    ALOGV("stop");
     Mutex::Autolock lock(mMutex);
-    // TODO: Add waiting on mFrameCompletedCondition here?
+
     mStopped = true;
-
     mFrameAvailableCondition.signal();
-    mBufferQueue->consumerDisconnect();
 
-    return OK;
+    return mBufferQueue->consumerDisconnect();
 }
 
 sp<MetaData> SurfaceMediaSource::getFormat()
@@ -376,7 +373,6 @@
     Mutex::Autolock lock(mMutex);
 
     mFrameAvailableCondition.signal();
-    mStopped = true;
 
     for (int i = 0; i < BufferQueue::NUM_BUFFER_SLOTS; i++) {
        mBufferSlot[i] = 0;
diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk
index 8cccf49..079599a 100644
--- a/services/camera/libcameraservice/Android.mk
+++ b/services/camera/libcameraservice/Android.mk
@@ -10,7 +10,8 @@
     CameraService.cpp \
     CameraClient.cpp \
     Camera2Client.cpp \
-    Camera2Device.cpp
+    Camera2Device.cpp \
+    CameraMetadata.cpp
 
 LOCAL_SHARED_LIBRARIES:= \
     libui \
diff --git a/services/camera/libcameraservice/Camera2Client.cpp b/services/camera/libcameraservice/Camera2Client.cpp
index aa30501..3f12ed0 100644
--- a/services/camera/libcameraservice/Camera2Client.cpp
+++ b/services/camera/libcameraservice/Camera2Client.cpp
@@ -54,16 +54,14 @@
                 cameraId, cameraFacing, clientPid),
         mDeviceInfo(NULL),
         mPreviewStreamId(NO_STREAM),
-        mPreviewRequest(NULL),
         mCallbackStreamId(NO_STREAM),
         mCallbackHeapId(0),
         mCaptureStreamId(NO_STREAM),
-        mCaptureRequest(NULL),
         mRecordingStreamId(NO_STREAM),
-        mRecordingRequest(NULL),
         mRecordingHeapCount(kDefaultRecordingHeapCount)
 {
     ATRACE_CALL();
+    ALOGV("%s: Created client for camera %d", __FUNCTION__, cameraId);
 
     mDevice = new Camera2Device(cameraId);
 
@@ -83,9 +81,14 @@
 status_t Camera2Client::initialize(camera_module_t *module)
 {
     ATRACE_CALL();
-    ALOGV("%s: E", __FUNCTION__);
+    ALOGV("%s: Initializing client for camera %d", __FUNCTION__, mCameraId);
     status_t res;
 
+    mFrameProcessor = new FrameProcessor(this);
+    String8 frameThreadName = String8::format("Camera2Client[%d]::FrameProcessor",
+            mCameraId);
+    mFrameProcessor->run(frameThreadName.string());
+
     res = mDevice->initialize(module);
     if (res != OK) {
         ALOGE("%s: Camera %d: unable to initialize device: %s (%d)",
@@ -94,7 +97,6 @@
     }
 
     res = mDevice->setNotifyCallback(this);
-    res = mDevice->setFrameListener(this);
 
     res = buildDeviceInfo();
     res = buildDefaultParameters();
@@ -116,13 +118,16 @@
 
 Camera2Client::~Camera2Client() {
     ATRACE_CALL();
-    ALOGV("%s: Camera %d: Shutting down", __FUNCTION__, mCameraId);
+    ALOGV("%s: Camera %d: Shutting down client.", __FUNCTION__, mCameraId);
 
     mDestructionStarted = true;
 
     // Rewrite mClientPid to allow shutdown by CameraService
     mClientPid = getCallingPid();
     disconnect();
+
+    mFrameProcessor->requestExit();
+    ALOGV("%s: Camera %d: Shutdown complete", __FUNCTION__, mCameraId);
 }
 
 status_t Camera2Client::dump(int fd, const Vector<String16>& args) {
@@ -291,33 +296,35 @@
     result.appendFormat("    Recording stream ID: %d\n", mRecordingStreamId);
 
     result.append("  Current requests:\n");
-    if (mPreviewRequest != NULL) {
+    if (mPreviewRequest.entryCount() != 0) {
         result.append("    Preview request:\n");
         write(fd, result.string(), result.size());
-        dump_indented_camera_metadata(mPreviewRequest, fd, 2, 6);
+        mPreviewRequest.dump(fd, 2, 6);
     } else {
         result.append("    Preview request: undefined\n");
         write(fd, result.string(), result.size());
     }
 
-    if (mCaptureRequest != NULL) {
+    if (mCaptureRequest.entryCount() != 0) {
         result = "    Capture request:\n";
         write(fd, result.string(), result.size());
-        dump_indented_camera_metadata(mCaptureRequest, fd, 2, 6);
+        mCaptureRequest.dump(fd, 2, 6);
     } else {
         result = "    Capture request: undefined\n";
         write(fd, result.string(), result.size());
     }
 
-    if (mRecordingRequest != NULL) {
+    if (mRecordingRequest.entryCount() != 0) {
         result = "    Recording request:\n";
         write(fd, result.string(), result.size());
-        dump_indented_camera_metadata(mRecordingRequest, fd, 2, 6);
+        mRecordingRequest.dump(fd, 2, 6);
     } else {
         result = "    Recording request: undefined\n";
         write(fd, result.string(), result.size());
     }
 
+    mFrameProcessor->dump(fd, args);
+
     result = "  Device dump:\n";
     write(fd, result.string(), result.size());
 
@@ -523,7 +530,7 @@
             // Already running preview - need to stop and create a new stream
             // TODO: Optimize this so that we don't wait for old stream to drain
             // before spinning up new stream
-            mDevice->setStreamingRequest(NULL);
+            mDevice->clearStreamingRequest();
             k.mParameters.state = WAITING_FOR_PREVIEW_WINDOW;
             break;
     }
@@ -634,7 +641,7 @@
         }
     }
 
-    if (mPreviewRequest == NULL) {
+    if (mPreviewRequest.entryCount() == 0) {
         res = updatePreviewRequest(params);
         if (res != OK) {
             ALOGE("%s: Camera %d: Unable to create preview request: %s (%d)",
@@ -646,20 +653,21 @@
     if (callbacksEnabled) {
         uint8_t outputStreams[2] =
                 { mPreviewStreamId, mCallbackStreamId };
-        res = updateEntry(mPreviewRequest,
+        res = mPreviewRequest.update(
                 ANDROID_REQUEST_OUTPUT_STREAMS,
                 outputStreams, 2);
     } else {
-        res = updateEntry(mPreviewRequest,
+        uint8_t outputStreams[1] = { mPreviewStreamId };
+        res = mPreviewRequest.update(
                 ANDROID_REQUEST_OUTPUT_STREAMS,
-                &mPreviewStreamId, 1);
+                outputStreams, 1);
     }
     if (res != OK) {
         ALOGE("%s: Camera %d: Unable to set up preview request: %s (%d)",
                 __FUNCTION__, mCameraId, strerror(-res), res);
         return res;
     }
-    res = sort_camera_metadata(mPreviewRequest);
+    res = mPreviewRequest.sort();
     if (res != OK) {
         ALOGE("%s: Camera %d: Error sorting preview request: %s (%d)",
                 __FUNCTION__, mCameraId, strerror(-res), res);
@@ -709,7 +717,7 @@
         case RECORD:
             // no break - identical to preview
         case PREVIEW:
-            mDevice->setStreamingRequest(NULL);
+            mDevice->clearStreamingRequest();
             mDevice->waitUntilDrained();
             // no break
         case WAITING_FOR_PREVIEW_WINDOW: {
@@ -814,7 +822,7 @@
         }
     }
 
-    if (mRecordingRequest == NULL) {
+    if (mRecordingRequest.entryCount() == 0) {
         res = updateRecordingRequest(params);
         if (res != OK) {
             ALOGE("%s: Camera %d: Unable to create recording request: %s (%d)",
@@ -826,12 +834,12 @@
     if (callbacksEnabled) {
         uint8_t outputStreams[3] =
                 { mPreviewStreamId, mRecordingStreamId, mCallbackStreamId };
-        res = updateEntry(mRecordingRequest,
+        res = mRecordingRequest.update(
                 ANDROID_REQUEST_OUTPUT_STREAMS,
                 outputStreams, 3);
     } else {
         uint8_t outputStreams[2] = { mPreviewStreamId, mRecordingStreamId };
-        res = updateEntry(mRecordingRequest,
+        res = mRecordingRequest.update(
                 ANDROID_REQUEST_OUTPUT_STREAMS,
                 outputStreams, 2);
     }
@@ -840,7 +848,7 @@
                 __FUNCTION__, mCameraId, strerror(-res), res);
         return res;
     }
-    res = sort_camera_metadata(mRecordingRequest);
+    res = mRecordingRequest.sort();
     if (res != OK) {
         ALOGE("%s: Camera %d: Error sorting recording request: %s (%d)",
                 __FUNCTION__, mCameraId, strerror(-res), res);
@@ -1043,7 +1051,7 @@
         return res;
     }
 
-    if (mCaptureRequest == NULL) {
+    if (mCaptureRequest.entryCount() == 0) {
         res = updateCaptureRequest(k.mParameters);
         if (res != OK) {
             ALOGE("%s: Camera %d: Can't create still image capture request: "
@@ -1052,8 +1060,6 @@
         }
     }
 
-    camera_metadata_entry_t outputStreams;
-
     bool callbacksEnabled = k.mParameters.previewCallbackFlags &
             CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK;
     bool recordingEnabled = (k.mParameters.state == RECORD);
@@ -1063,29 +1069,29 @@
     switch ( streamSwitch ) {
         case 0: { // No recording, callbacks
             uint8_t streamIds[2] = { mPreviewStreamId, mCaptureStreamId };
-            res = updateEntry(mCaptureRequest, ANDROID_REQUEST_OUTPUT_STREAMS,
-                    &streamIds, 2);
+            res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
+                    streamIds, 2);
             break;
         }
         case 1: { // Recording
             uint8_t streamIds[3] = { mPreviewStreamId, mRecordingStreamId,
                                      mCaptureStreamId };
-            res = updateEntry(mCaptureRequest, ANDROID_REQUEST_OUTPUT_STREAMS,
-                    &streamIds, 3);
+            res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
+                    streamIds, 3);
             break;
         }
         case 2: { // Callbacks
             uint8_t streamIds[3] = { mPreviewStreamId, mCallbackStreamId,
                                      mCaptureStreamId };
-            res = updateEntry(mCaptureRequest, ANDROID_REQUEST_OUTPUT_STREAMS,
-                    &streamIds, 3);
+            res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
+                    streamIds, 3);
             break;
         }
         case 3: { // Both
             uint8_t streamIds[4] = { mPreviewStreamId, mCallbackStreamId,
                                      mRecordingStreamId, mCaptureStreamId };
-            res = updateEntry(mCaptureRequest, ANDROID_REQUEST_OUTPUT_STREAMS,
-                    &streamIds, 4);
+            res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
+                    streamIds, 4);
             break;
         }
     };
@@ -1095,22 +1101,22 @@
                 __FUNCTION__, mCameraId, strerror(-res), res);
         return res;
     }
-    res = sort_camera_metadata(mCaptureRequest);
+    res = mCaptureRequest.sort();
     if (res != OK) {
         ALOGE("%s: Camera %d: Unable to sort capture request: %s (%d)",
                 __FUNCTION__, mCameraId, strerror(-res), res);
         return res;
     }
 
-    camera_metadata_t *captureCopy = clone_camera_metadata(mCaptureRequest);
-    if (captureCopy == NULL) {
+    CameraMetadata captureCopy = mCaptureRequest;
+    if (captureCopy.entryCount() == 0) {
         ALOGE("%s: Camera %d: Unable to copy capture request for HAL device",
                 __FUNCTION__, mCameraId);
         return NO_MEMORY;
     }
 
     if (k.mParameters.state == PREVIEW) {
-        res = mDevice->setStreamingRequest(NULL);
+        res = mDevice->clearStreamingRequest();
         if (res != OK) {
             ALOGE("%s: Camera %d: Unable to stop preview for still capture: "
                     "%s (%d)",
@@ -1182,7 +1188,7 @@
                     previewWidth, previewHeight);
             return BAD_VALUE;
         }
-        camera_metadata_entry_t availablePreviewSizes =
+        camera_metadata_ro_entry_t availablePreviewSizes =
             staticInfo(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES);
         for (i = 0; i < availablePreviewSizes.count; i += 2 ) {
             if (availablePreviewSizes.data.i32[i] == previewWidth &&
@@ -1203,7 +1209,7 @@
     if (previewFpsRange[0] != k.mParameters.previewFpsRange[0] ||
             previewFpsRange[1] != k.mParameters.previewFpsRange[1]) {
         fpsRangeChanged = true;
-        camera_metadata_entry_t availablePreviewFpsRanges =
+        camera_metadata_ro_entry_t availablePreviewFpsRanges =
             staticInfo(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 2);
         for (i = 0; i < availablePreviewFpsRanges.count; i += 2) {
             if ((availablePreviewFpsRanges.data.i32[i] ==
@@ -1229,7 +1235,7 @@
                     "is active!", __FUNCTION__);
             return BAD_VALUE;
         }
-        camera_metadata_entry_t availableFormats =
+        camera_metadata_ro_entry_t availableFormats =
             staticInfo(ANDROID_SCALER_AVAILABLE_FORMATS);
         for (i = 0; i < availableFormats.count; i++) {
             if (availableFormats.data.i32[i] == previewFormat) break;
@@ -1247,7 +1253,7 @@
     if (!fpsRangeChanged) {
         previewFps = newParams.getPreviewFrameRate();
         if (previewFps != k.mParameters.previewFps) {
-            camera_metadata_entry_t availableFrameRates =
+            camera_metadata_ro_entry_t availableFrameRates =
                 staticInfo(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
             for (i = 0; i < availableFrameRates.count; i+=2) {
                 if (availableFrameRates.data.i32[i] == previewFps) break;
@@ -1267,7 +1273,7 @@
     newParams.getPictureSize(&pictureWidth, &pictureHeight);
     if (pictureWidth == k.mParameters.pictureWidth ||
             pictureHeight == k.mParameters.pictureHeight) {
-        camera_metadata_entry_t availablePictureSizes =
+        camera_metadata_ro_entry_t availablePictureSizes =
             staticInfo(ANDROID_SCALER_AVAILABLE_JPEG_SIZES);
         for (i = 0; i < availablePictureSizes.count; i+=2) {
             if (availablePictureSizes.data.i32[i] == pictureWidth &&
@@ -1288,7 +1294,7 @@
             newParams.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
     if (jpegThumbSize[0] != k.mParameters.jpegThumbSize[0] ||
             jpegThumbSize[1] != k.mParameters.jpegThumbSize[1]) {
-        camera_metadata_entry_t availableJpegThumbSizes =
+        camera_metadata_ro_entry_t availableJpegThumbSizes =
             staticInfo(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES);
         for (i = 0; i < availableJpegThumbSizes.count; i+=2) {
             if (availableJpegThumbSizes.data.i32[i] == jpegThumbSize[0] &&
@@ -1392,7 +1398,7 @@
     int wbMode = wbModeStringToEnum(
         newParams.get(CameraParameters::KEY_WHITE_BALANCE) );
     if (wbMode != k.mParameters.wbMode) {
-        camera_metadata_entry_t availableWbModes =
+        camera_metadata_ro_entry_t availableWbModes =
             staticInfo(ANDROID_CONTROL_AWB_AVAILABLE_MODES);
         for (i = 0; i < availableWbModes.count; i++) {
             if (wbMode == availableWbModes.data.u8[i]) break;
@@ -1409,7 +1415,7 @@
     int effectMode = effectModeStringToEnum(
         newParams.get(CameraParameters::KEY_EFFECT) );
     if (effectMode != k.mParameters.effectMode) {
-        camera_metadata_entry_t availableEffectModes =
+        camera_metadata_ro_entry_t availableEffectModes =
             staticInfo(ANDROID_CONTROL_AVAILABLE_EFFECTS);
         for (i = 0; i < availableEffectModes.count; i++) {
             if (effectMode == availableEffectModes.data.u8[i]) break;
@@ -1426,7 +1432,7 @@
     int antibandingMode = abModeStringToEnum(
         newParams.get(CameraParameters::KEY_ANTIBANDING) );
     if (antibandingMode != k.mParameters.antibandingMode) {
-        camera_metadata_entry_t availableAbModes =
+        camera_metadata_ro_entry_t availableAbModes =
             staticInfo(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES);
         for (i = 0; i < availableAbModes.count; i++) {
             if (antibandingMode == availableAbModes.data.u8[i]) break;
@@ -1444,7 +1450,7 @@
         newParams.get(CameraParameters::KEY_SCENE_MODE) );
     if (sceneMode != k.mParameters.sceneMode &&
             sceneMode != ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED) {
-        camera_metadata_entry_t availableSceneModes =
+        camera_metadata_ro_entry_t availableSceneModes =
             staticInfo(ANDROID_CONTROL_AVAILABLE_SCENE_MODES);
         for (i = 0; i < availableSceneModes.count; i++) {
             if (sceneMode == availableSceneModes.data.u8[i]) break;
@@ -1461,7 +1467,7 @@
     Parameters::flashMode_t flashMode = flashModeStringToEnum(
         newParams.get(CameraParameters::KEY_FLASH_MODE) );
     if (flashMode != k.mParameters.flashMode) {
-        camera_metadata_entry_t flashAvailable =
+        camera_metadata_ro_entry_t flashAvailable =
             staticInfo(ANDROID_FLASH_AVAILABLE, 1, 1);
         if (!flashAvailable.data.u8[0] &&
                 flashMode != Parameters::FLASH_MODE_OFF) {
@@ -1470,7 +1476,7 @@
                     newParams.get(CameraParameters::KEY_FLASH_MODE));
             return BAD_VALUE;
         } else if (flashMode == Parameters::FLASH_MODE_RED_EYE) {
-            camera_metadata_entry_t availableAeModes =
+            camera_metadata_ro_entry_t availableAeModes =
                 staticInfo(ANDROID_CONTROL_AE_AVAILABLE_MODES);
             for (i = 0; i < availableAeModes.count; i++) {
                 if (flashMode == availableAeModes.data.u8[i]) break;
@@ -1494,7 +1500,7 @@
         newParams.get(CameraParameters::KEY_FOCUS_MODE));
     if (focusMode != k.mParameters.focusMode) {
         if (focusMode != Parameters::FOCUS_MODE_FIXED) {
-            camera_metadata_entry_t minFocusDistance =
+            camera_metadata_ro_entry_t minFocusDistance =
                 staticInfo(ANDROID_LENS_MINIMUM_FOCUS_DISTANCE);
             if (minFocusDistance.data.f[0] == 0) {
                 ALOGE("%s: Requested focus mode \"%s\" is not available: "
@@ -1503,7 +1509,7 @@
                         newParams.get(CameraParameters::KEY_FOCUS_MODE));
                 return BAD_VALUE;
             } else if (focusMode != Parameters::FOCUS_MODE_INFINITY) {
-                camera_metadata_entry_t availableFocusModes =
+                camera_metadata_ro_entry_t availableFocusModes =
                     staticInfo(ANDROID_CONTROL_AF_AVAILABLE_MODES);
                 for (i = 0; i < availableFocusModes.count; i++) {
                     if (focusMode == availableFocusModes.data.u8[i]) break;
@@ -1534,7 +1540,7 @@
     // EXPOSURE_COMPENSATION
     int exposureCompensation =
         newParams.getInt(CameraParameters::KEY_EXPOSURE_COMPENSATION);
-    camera_metadata_entry_t exposureCompensationRange =
+    camera_metadata_ro_entry_t exposureCompensationRange =
         staticInfo(ANDROID_CONTROL_AE_EXP_COMPENSATION_RANGE);
     if (exposureCompensation < exposureCompensationRange.data.i32[0] ||
             exposureCompensation > exposureCompensationRange.data.i32[1]) {
@@ -1555,7 +1561,7 @@
     Vector<Parameters::Area> meteringAreas;
     res = parseAreas(newParams.get(CameraParameters::KEY_METERING_AREAS),
             &meteringAreas);
-    if (res == OK) res = validateAreas(focusingAreas, max3aRegions);
+    if (res == OK) res = validateAreas(meteringAreas, max3aRegions);
     if (res != OK) {
         ALOGE("%s: Requested metering areas are malformed: %s",
                 __FUNCTION__,
@@ -1581,7 +1587,7 @@
                     __FUNCTION__);
             return BAD_VALUE;
         }
-        camera_metadata_entry_t availableVideoSizes =
+        camera_metadata_ro_entry_t availableVideoSizes =
             staticInfo(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES);
         for (i = 0; i < availableVideoSizes.count; i += 2 ) {
             if (availableVideoSizes.data.i32[i] == videoWidth &&
@@ -1601,7 +1607,7 @@
     // VIDEO_STABILIZATION
     bool videoStabilization = boolFromString(
         newParams.get(CameraParameters::KEY_VIDEO_STABILIZATION) );
-    camera_metadata_entry_t availableVideoStabilizationModes =
+    camera_metadata_ro_entry_t availableVideoStabilizationModes =
         staticInfo(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES);
     if (videoStabilization && availableVideoStabilizationModes.count == 1) {
         ALOGE("%s: Video stabilization not supported", __FUNCTION__);
@@ -1990,106 +1996,130 @@
             __FUNCTION__, newState, triggerId);
 }
 
-void Camera2Client::onNewFrameAvailable() {
+Camera2Client::FrameProcessor::FrameProcessor(wp<Camera2Client> client):
+        Thread(false), mClient(client) {
+}
+
+Camera2Client::FrameProcessor::~FrameProcessor() {
+    ALOGV("%s: Exit", __FUNCTION__);
+}
+
+void Camera2Client::FrameProcessor::dump(int fd, const Vector<String16>& args) {
+    String8 result("    Latest received frame:\n");
+    write(fd, result.string(), result.size());
+    mLastFrame.dump(fd, 2, 6);
+}
+
+bool Camera2Client::FrameProcessor::threadLoop() {
     status_t res;
-    camera_metadata_t *frame = NULL;
-    do {
-        res = mDevice->getNextFrame(&frame);
-        if (res != OK) {
-            ALOGE("%s: Camera %d: Error getting next frame: %s (%d)",
-                    __FUNCTION__, mCameraId, strerror(-res), res);
-            return;
-        }
-        if (frame != NULL) {
-            camera_metadata_entry_t entry;
-            res = find_camera_metadata_entry(frame, ANDROID_REQUEST_FRAME_COUNT,
-                    &entry);
-            if (res != OK) {
-                ALOGE("%s: Camera %d: Error reading frame number: %s (%d)",
-                        __FUNCTION__, mCameraId, strerror(-res), res);
-                break;
-            }
 
-            res = processFrameFaceDetect(frame);
-            if (res != OK) break;
-
-            free_camera_metadata(frame);
-        }
-    } while (frame != NULL);
-
-    if (frame != NULL) {
-        free_camera_metadata(frame);
+    sp<Camera2Device> device;
+    {
+        sp<Camera2Client> client = mClient.promote();
+        if (client == 0) return false;
+        device = client->mDevice;
     }
+
+    res = device->waitForNextFrame(kWaitDuration);
+    if (res == OK) {
+        sp<Camera2Client> client = mClient.promote();
+        if (client == 0) return false;
+        processNewFrames(client);
+    } else if (res != TIMED_OUT) {
+        ALOGE("Camera2Client::FrameProcessor: Error waiting for new "
+                "frames: %s (%d)", strerror(-res), res);
+    }
+
+    return true;
+}
+
+void Camera2Client::FrameProcessor::processNewFrames(sp<Camera2Client> &client) {
+    status_t res;
+    CameraMetadata frame;
+    while ( (res = client->mDevice->getNextFrame(&frame)) == OK) {
+        camera_metadata_entry_t entry;
+        entry = frame.find(ANDROID_REQUEST_FRAME_COUNT);
+        if (entry.count == 0) {
+            ALOGE("%s: Camera %d: Error reading frame number: %s (%d)",
+                    __FUNCTION__, client->mCameraId, strerror(-res), res);
+            break;
+        }
+
+        res = processFaceDetect(frame, client);
+        if (res != OK) break;
+
+        mLastFrame.acquire(frame);
+    }
+    if (res != NOT_ENOUGH_DATA) {
+        ALOGE("%s: Camera %d: Error getting next frame: %s (%d)",
+                __FUNCTION__, client->mCameraId, strerror(-res), res);
+        return;
+    }
+
     return;
 }
 
-status_t Camera2Client::processFrameFaceDetect(camera_metadata_t *frame) {
+status_t Camera2Client::FrameProcessor::processFaceDetect(
+    const CameraMetadata &frame, sp<Camera2Client> &client) {
     status_t res;
-    camera_metadata_entry_t entry;
+    camera_metadata_ro_entry_t entry;
     bool enableFaceDetect;
     {
-        LockedParameters::Key k(mParameters);
+        LockedParameters::Key k(client->mParameters);
         enableFaceDetect = k.mParameters.enableFaceDetect;
     }
-    res = find_camera_metadata_entry(frame, ANDROID_STATS_FACE_DETECT_MODE,
-            &entry);
-    // TODO: Remove this check once things are more compliant. For now, assume that
-    // if we can't find the face detect mode, then it's probably not working.
-    if (res == NAME_NOT_FOUND) {
+    entry = frame.find(ANDROID_STATS_FACE_DETECT_MODE);
+
+    // TODO: This should be an error once implementations are compliant
+    if (entry.count == 0) {
         return OK;
-    } else if (res != OK) {
-        ALOGE("%s: Camera %d: Error reading face mode: %s (%d)",
-                __FUNCTION__, mCameraId, strerror(-res), res);
-        return res;
     }
+
     uint8_t faceDetectMode = entry.data.u8[0];
 
     if (enableFaceDetect && faceDetectMode != ANDROID_STATS_FACE_DETECTION_OFF) {
-        res = find_camera_metadata_entry(frame, ANDROID_STATS_FACE_RECTANGLES,
-                &entry);
-        if (res != OK) {
-            ALOGE("%s: Camera %d: Error reading face rectangles: %s (%d)",
-                    __FUNCTION__, mCameraId, strerror(-res), res);
+        entry = frame.find(ANDROID_STATS_FACE_RECTANGLES);
+        if (entry.count == 0) {
+            ALOGE("%s: Camera %d: Unable to read face rectangles",
+                    __FUNCTION__, client->mCameraId);
             return res;
         }
         camera_frame_metadata metadata;
         metadata.number_of_faces = entry.count / 4;
         if (metadata.number_of_faces >
-                mDeviceInfo->maxFaces) {
+                client->mDeviceInfo->maxFaces) {
             ALOGE("%s: Camera %d: More faces than expected! (Got %d, max %d)",
-                    __FUNCTION__, mCameraId,
-                    metadata.number_of_faces, mDeviceInfo->maxFaces);
+                    __FUNCTION__, client->mCameraId,
+                    metadata.number_of_faces, client->mDeviceInfo->maxFaces);
             return res;
         }
-        int32_t *faceRects = entry.data.i32;
+        const int32_t *faceRects = entry.data.i32;
 
-        res = find_camera_metadata_entry(frame, ANDROID_STATS_FACE_SCORES,
-                &entry);
-        if (res != OK) {
-            ALOGE("%s: Camera %d: Error reading face scores: %s (%d)",
-                    __FUNCTION__, mCameraId, strerror(-res), res);
+        entry = frame.find(ANDROID_STATS_FACE_SCORES);
+        if (entry.count == 0) {
+            ALOGE("%s: Camera %d: Unable to read face scores",
+                    __FUNCTION__, client->mCameraId);
             return res;
         }
-        uint8_t *faceScores = entry.data.u8;
+        const uint8_t *faceScores = entry.data.u8;
 
-        int32_t *faceLandmarks = NULL;
-        int32_t *faceIds = NULL;
+        const int32_t *faceLandmarks = NULL;
+        const int32_t *faceIds = NULL;
 
         if (faceDetectMode == ANDROID_STATS_FACE_DETECTION_FULL) {
-            res = find_camera_metadata_entry(frame, ANDROID_STATS_FACE_LANDMARKS,
-                    &entry);
-            if (res != OK) {
-                ALOGE("%s: Camera %d: Error reading face landmarks: %s (%d)",
-                        __FUNCTION__, mCameraId, strerror(-res), res);
+            entry = frame.find(ANDROID_STATS_FACE_LANDMARKS);
+            if (entry.count == 0) {
+                ALOGE("%s: Camera %d: Unable to read face landmarks",
+                        __FUNCTION__, client->mCameraId);
                 return res;
             }
             faceLandmarks = entry.data.i32;
 
-            res = find_camera_metadata_entry(frame, ANDROID_STATS_FACE_IDS,
-                    &entry);
-            if (res != OK) {
-                ALOGE("%s: Camera %d: Error reading face IDs: %s (%d)",
-                        __FUNCTION__, mCameraId, strerror(-res), res);
+            entry = frame.find(ANDROID_STATS_FACE_IDS);
+
+            if (entry.count == 0) {
+                ALOGE("%s: Camera %d: Unable to read face IDs",
+                        __FUNCTION__, client->mCameraId);
                 return res;
             }
             faceIds = entry.data.i32;
@@ -2101,20 +2131,26 @@
         for (int i = 0; i < metadata.number_of_faces; i++) {
             camera_face_t face;
 
-            face.rect[0] = arrayXToNormalized(faceRects[i*4 + 0]);
-            face.rect[1] = arrayYToNormalized(faceRects[i*4 + 1]);
-            face.rect[2] = arrayXToNormalized(faceRects[i*4 + 2]);
-            face.rect[3] = arrayYToNormalized(faceRects[i*4 + 3]);
+            face.rect[0] = client->arrayXToNormalized(faceRects[i*4 + 0]);
+            face.rect[1] = client->arrayYToNormalized(faceRects[i*4 + 1]);
+            face.rect[2] = client->arrayXToNormalized(faceRects[i*4 + 2]);
+            face.rect[3] = client->arrayYToNormalized(faceRects[i*4 + 3]);
 
             face.score = faceScores[i];
             if (faceDetectMode == ANDROID_STATS_FACE_DETECTION_FULL) {
                 face.id = faceIds[i];
-                face.left_eye[0] = arrayXToNormalized(faceLandmarks[i*6 + 0]);
-                face.left_eye[1] = arrayYToNormalized(faceLandmarks[i*6 + 1]);
-                face.right_eye[0] = arrayXToNormalized(faceLandmarks[i*6 + 2]);
-                face.right_eye[1] = arrayYToNormalized(faceLandmarks[i*6 + 3]);
-                face.mouth[0] = arrayXToNormalized(faceLandmarks[i*6 + 4]);
-                face.mouth[1] = arrayYToNormalized(faceLandmarks[i*6 + 5]);
+                face.left_eye[0] =
+                        client->arrayXToNormalized(faceLandmarks[i*6 + 0]);
+                face.left_eye[1] =
+                        client->arrayYToNormalized(faceLandmarks[i*6 + 1]);
+                face.right_eye[0] =
+                        client->arrayXToNormalized(faceLandmarks[i*6 + 2]);
+                face.right_eye[1] =
+                        client->arrayYToNormalized(faceLandmarks[i*6 + 3]);
+                face.mouth[0] =
+                        client->arrayXToNormalized(faceLandmarks[i*6 + 4]);
+                face.mouth[1] =
+                        client->arrayYToNormalized(faceLandmarks[i*6 + 5]);
             } else {
                 face.id = 0;
                 face.left_eye[0] = face.left_eye[1] = -2000;
@@ -2126,9 +2162,9 @@
 
         metadata.faces = faces.editArray();
         {
-            Mutex::Autolock iccl(mICameraClientLock);
-            if (mCameraClient != NULL) {
-                mCameraClient->dataCallback(CAMERA_MSG_PREVIEW_METADATA,
+            Mutex::Autolock iccl(client->mICameraClientLock);
+            if (client->mCameraClient != NULL) {
+                client->mCameraClient->dataCallback(CAMERA_MSG_PREVIEW_METADATA,
                         NULL, &metadata);
             }
         }
@@ -2426,23 +2462,19 @@
     }
 }
 
-camera_metadata_entry_t Camera2Client::staticInfo(uint32_t tag,
-        size_t minCount, size_t maxCount) {
+camera_metadata_ro_entry_t Camera2Client::staticInfo(uint32_t tag,
+        size_t minCount, size_t maxCount) const {
     status_t res;
-    camera_metadata_entry_t entry;
-    res = find_camera_metadata_entry(mDevice->info(),
-            tag,
-            &entry);
-    if (CC_UNLIKELY( res != OK )) {
+    camera_metadata_ro_entry_t entry = mDevice->info().find(tag);
+
+    if (CC_UNLIKELY( entry.count == 0 )) {
         const char* tagSection = get_camera_metadata_section_name(tag);
         if (tagSection == NULL) tagSection = "<unknown>";
         const char* tagName = get_camera_metadata_tag_name(tag);
         if (tagName == NULL) tagName = "<unknown>";
 
-        ALOGE("Error finding static metadata entry '%s.%s' (%x): %s (%d)",
-                tagSection, tagName, tag, strerror(-res), res);
-        entry.count = 0;
-        entry.data.u8 = NULL;
+        ALOGE("Error finding static metadata entry '%s.%s' (%x)",
+                tagSection, tagName, tag);
     } else if (CC_UNLIKELY(
             (minCount != 0 && entry.count < minCount) ||
             (maxCount != 0 && entry.count > maxCount) ) ) {
@@ -2453,8 +2485,6 @@
         ALOGE("Malformed static metadata entry '%s.%s' (%x):"
                 "Expected between %d and %d values, but got %d values",
                 tagSection, tagName, tag, minCount, maxCount, entry.count);
-        entry.count = 0;
-        entry.data.u8 = NULL;
     }
 
     return entry;
@@ -2469,13 +2499,13 @@
     DeviceInfo *deviceInfo = new DeviceInfo;
     mDeviceInfo = deviceInfo;
 
-    camera_metadata_entry_t activeArraySize =
+    camera_metadata_ro_entry_t activeArraySize =
         staticInfo(ANDROID_SENSOR_ACTIVE_ARRAY_SIZE, 2, 2);
     if (!activeArraySize.count) return NO_INIT;
     deviceInfo->arrayWidth = activeArraySize.data.i32[0];
     deviceInfo->arrayHeight = activeArraySize.data.i32[1];
 
-    camera_metadata_entry_t availableFaceDetectModes =
+    camera_metadata_ro_entry_t availableFaceDetectModes =
         staticInfo(ANDROID_STATS_AVAILABLE_FACE_DETECT_MODES);
     if (!availableFaceDetectModes.count) return NO_INIT;
 
@@ -2504,7 +2534,7 @@
         }
     }
 
-    camera_metadata_entry_t maxFacesDetected =
+    camera_metadata_ro_entry_t maxFacesDetected =
         staticInfo(ANDROID_STATS_MAX_FACE_COUNT, 1, 1);
     if (!maxFacesDetected.count) return NO_INIT;
 
@@ -2520,7 +2550,7 @@
     status_t res;
     CameraParameters params;
 
-    camera_metadata_entry_t availableProcessedSizes =
+    camera_metadata_ro_entry_t availableProcessedSizes =
         staticInfo(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, 2);
     if (!availableProcessedSizes.count) return NO_INIT;
 
@@ -2549,7 +2579,7 @@
                 supportedPreviewSizes);
     }
 
-    camera_metadata_entry_t availableFpsRanges =
+    camera_metadata_ro_entry_t availableFpsRanges =
         staticInfo(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 2);
     if (!availableFpsRanges.count) return NO_INIT;
 
@@ -2580,7 +2610,7 @@
     k.mParameters.previewTransform = degToTransform(0,
             mCameraFacing == CAMERA_FACING_FRONT);
 
-    camera_metadata_entry_t availableFormats =
+    camera_metadata_ro_entry_t availableFormats =
         staticInfo(ANDROID_SCALER_AVAILABLE_FORMATS);
 
     {
@@ -2648,7 +2678,7 @@
                 supportedPreviewFrameRates);
     }
 
-    camera_metadata_entry_t availableJpegSizes =
+    camera_metadata_ro_entry_t availableJpegSizes =
         staticInfo(ANDROID_SCALER_AVAILABLE_JPEG_SIZES, 2);
     if (!availableJpegSizes.count) return NO_INIT;
 
@@ -2675,7 +2705,7 @@
     params.set(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS,
             CameraParameters::PIXEL_FORMAT_JPEG);
 
-    camera_metadata_entry_t availableJpegThumbnailSizes =
+    camera_metadata_ro_entry_t availableJpegThumbnailSizes =
         staticInfo(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, 2);
     if (!availableJpegThumbnailSizes.count) return NO_INIT;
 
@@ -2718,7 +2748,7 @@
     params.set(CameraParameters::KEY_WHITE_BALANCE,
             CameraParameters::WHITE_BALANCE_AUTO);
 
-    camera_metadata_entry_t availableWhiteBalanceModes =
+    camera_metadata_ro_entry_t availableWhiteBalanceModes =
         staticInfo(ANDROID_CONTROL_AWB_AVAILABLE_MODES);
     {
         String8 supportedWhiteBalance;
@@ -2779,7 +2809,7 @@
     params.set(CameraParameters::KEY_EFFECT,
             CameraParameters::EFFECT_NONE);
 
-    camera_metadata_entry_t availableEffects =
+    camera_metadata_ro_entry_t availableEffects =
         staticInfo(ANDROID_CONTROL_AVAILABLE_EFFECTS);
     if (!availableEffects.count) return NO_INIT;
     {
@@ -2839,7 +2869,7 @@
     params.set(CameraParameters::KEY_ANTIBANDING,
             CameraParameters::ANTIBANDING_AUTO);
 
-    camera_metadata_entry_t availableAntibandingModes =
+    camera_metadata_ro_entry_t availableAntibandingModes =
         staticInfo(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES);
     if (!availableAntibandingModes.count) return NO_INIT;
     {
@@ -2881,7 +2911,7 @@
     params.set(CameraParameters::KEY_SCENE_MODE,
             CameraParameters::SCENE_MODE_AUTO);
 
-    camera_metadata_entry_t availableSceneModes =
+    camera_metadata_ro_entry_t availableSceneModes =
         staticInfo(ANDROID_CONTROL_AVAILABLE_SCENE_MODES);
     if (!availableSceneModes.count) return NO_INIT;
     {
@@ -2973,11 +3003,11 @@
         }
     }
 
-    camera_metadata_entry_t flashAvailable =
+    camera_metadata_ro_entry_t flashAvailable =
         staticInfo(ANDROID_FLASH_AVAILABLE, 1, 1);
     if (!flashAvailable.count) return NO_INIT;
 
-    camera_metadata_entry_t availableAeModes =
+    camera_metadata_ro_entry_t availableAeModes =
         staticInfo(ANDROID_CONTROL_AE_AVAILABLE_MODES);
     if (!availableAeModes.count) return NO_INIT;
 
@@ -3009,11 +3039,11 @@
                 CameraParameters::FLASH_MODE_OFF);
     }
 
-    camera_metadata_entry_t minFocusDistance =
+    camera_metadata_ro_entry_t minFocusDistance =
         staticInfo(ANDROID_LENS_MINIMUM_FOCUS_DISTANCE, 1, 1);
     if (!minFocusDistance.count) return NO_INIT;
 
-    camera_metadata_entry_t availableAfModes =
+    camera_metadata_ro_entry_t availableAfModes =
         staticInfo(ANDROID_CONTROL_AF_AVAILABLE_MODES);
     if (!availableAfModes.count) return NO_INIT;
 
@@ -3070,7 +3100,7 @@
                 supportedFocusModes);
     }
 
-    camera_metadata_entry_t max3aRegions =
+    camera_metadata_ro_entry_t max3aRegions =
         staticInfo(ANDROID_CONTROL_MAX_REGIONS, 1, 1);
     if (!max3aRegions.count) return NO_INIT;
 
@@ -3081,14 +3111,14 @@
     k.mParameters.focusingAreas.clear();
     k.mParameters.focusingAreas.add(Parameters::Area(0,0,0,0,0));
 
-    camera_metadata_entry_t availableFocalLengths =
+    camera_metadata_ro_entry_t availableFocalLengths =
         staticInfo(ANDROID_LENS_AVAILABLE_FOCAL_LENGTHS);
     if (!availableFocalLengths.count) return NO_INIT;
 
     float minFocalLength = availableFocalLengths.data.f[0];
     params.setFloat(CameraParameters::KEY_FOCAL_LENGTH, minFocalLength);
 
-    camera_metadata_entry_t sensorSize =
+    camera_metadata_ro_entry_t sensorSize =
         staticInfo(ANDROID_SENSOR_PHYSICAL_SIZE, 2, 2);
     if (!sensorSize.count) return NO_INIT;
 
@@ -3104,7 +3134,7 @@
     params.set(CameraParameters::KEY_EXPOSURE_COMPENSATION,
                 k.mParameters.exposureCompensation);
 
-    camera_metadata_entry_t exposureCompensationRange =
+    camera_metadata_ro_entry_t exposureCompensationRange =
         staticInfo(ANDROID_CONTROL_AE_EXP_COMPENSATION_RANGE, 2, 2);
     if (!exposureCompensationRange.count) return NO_INIT;
 
@@ -3113,7 +3143,7 @@
     params.set(CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION,
             exposureCompensationRange.data.i32[0]);
 
-    camera_metadata_entry_t exposureCompensationStep =
+    camera_metadata_ro_entry_t exposureCompensationStep =
         staticInfo(ANDROID_CONTROL_AE_EXP_COMPENSATION_STEP, 1, 1);
     if (!exposureCompensationStep.count) return NO_INIT;
 
@@ -3143,7 +3173,7 @@
     params.set(CameraParameters::KEY_ZOOM, k.mParameters.zoom);
     params.set(CameraParameters::KEY_MAX_ZOOM, NUM_ZOOM_STEPS - 1);
 
-    camera_metadata_entry_t maxDigitalZoom =
+    camera_metadata_ro_entry_t maxDigitalZoom =
         staticInfo(ANDROID_SCALER_AVAILABLE_MAX_ZOOM, 1, 1);
     if (!maxDigitalZoom.count) return NO_INIT;
 
@@ -3187,7 +3217,7 @@
     params.set(CameraParameters::KEY_VIDEO_STABILIZATION,
             CameraParameters::FALSE);
 
-    camera_metadata_entry_t availableVideoStabilizationModes =
+    camera_metadata_ro_entry_t availableVideoStabilizationModes =
         staticInfo(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES);
     if (!availableVideoStabilizationModes.count) return NO_INIT;
 
@@ -3321,7 +3351,7 @@
 status_t Camera2Client::updatePreviewRequest(const Parameters &params) {
     ATRACE_CALL();
     status_t res;
-    if (mPreviewRequest == NULL) {
+    if (mPreviewRequest.entryCount() == 0) {
         res = mDevice->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
                 &mPreviewRequest);
         if (res != OK) {
@@ -3331,7 +3361,7 @@
         }
     }
 
-    res = updateRequestCommon(mPreviewRequest, params);
+    res = updateRequestCommon(&mPreviewRequest, params);
     if (res != OK) {
         ALOGE("%s: Camera %d: Unable to update common entries of preview "
                 "request: %s (%d)", __FUNCTION__, mCameraId,
@@ -3404,7 +3434,7 @@
     ATRACE_CALL();
     status_t res;
     // Find out buffer size for JPEG
-    camera_metadata_entry_t maxJpegSize =
+    camera_metadata_ro_entry_t maxJpegSize =
             staticInfo(ANDROID_JPEG_MAX_SIZE);
     if (maxJpegSize.count == 0) {
         ALOGE("%s: Camera %d: Can't find ANDROID_JPEG_MAX_SIZE!",
@@ -3471,7 +3501,7 @@
 status_t Camera2Client::updateCaptureRequest(const Parameters &params) {
     ATRACE_CALL();
     status_t res;
-    if (mCaptureRequest == NULL) {
+    if (mCaptureRequest.entryCount() == 0) {
         res = mDevice->createDefaultRequest(CAMERA2_TEMPLATE_STILL_CAPTURE,
                 &mCaptureRequest);
         if (res != OK) {
@@ -3481,7 +3511,7 @@
         }
     }
 
-    res = updateRequestCommon(mCaptureRequest, params);
+    res = updateRequestCommon(&mCaptureRequest, params);
     if (res != OK) {
         ALOGE("%s: Camera %d: Unable to update common entries of capture "
                 "request: %s (%d)", __FUNCTION__, mCameraId,
@@ -3489,46 +3519,39 @@
         return res;
     }
 
-    res = updateEntry(mCaptureRequest,
-            ANDROID_JPEG_THUMBNAIL_SIZE,
+    res = mCaptureRequest.update(ANDROID_JPEG_THUMBNAIL_SIZE,
             params.jpegThumbSize, 2);
     if (res != OK) return res;
-    res = updateEntry(mCaptureRequest,
-            ANDROID_JPEG_THUMBNAIL_QUALITY,
+    res = mCaptureRequest.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
             &params.jpegThumbQuality, 1);
     if (res != OK) return res;
-    res = updateEntry(mCaptureRequest,
-            ANDROID_JPEG_QUALITY,
+    res = mCaptureRequest.update(ANDROID_JPEG_QUALITY,
             &params.jpegQuality, 1);
     if (res != OK) return res;
-    res = updateEntry(mCaptureRequest,
+    res = mCaptureRequest.update(
             ANDROID_JPEG_ORIENTATION,
             &params.jpegRotation, 1);
     if (res != OK) return res;
 
     if (params.gpsEnabled) {
-        res = updateEntry(mCaptureRequest,
+        res = mCaptureRequest.update(
                 ANDROID_JPEG_GPS_COORDINATES,
                 params.gpsCoordinates, 3);
         if (res != OK) return res;
-        res = updateEntry(mCaptureRequest,
+        res = mCaptureRequest.update(
                 ANDROID_JPEG_GPS_TIMESTAMP,
                 &params.gpsTimestamp, 1);
         if (res != OK) return res;
-        res = updateEntry(mCaptureRequest,
+        res = mCaptureRequest.update(
                 ANDROID_JPEG_GPS_PROCESSING_METHOD,
-                params.gpsProcessingMethod.string(),
-                params.gpsProcessingMethod.size());
+                params.gpsProcessingMethod);
         if (res != OK) return res;
     } else {
-        res = deleteEntry(mCaptureRequest,
-                ANDROID_JPEG_GPS_COORDINATES);
+        res = mCaptureRequest.erase(ANDROID_JPEG_GPS_COORDINATES);
         if (res != OK) return res;
-        res = deleteEntry(mCaptureRequest,
-                ANDROID_JPEG_GPS_TIMESTAMP);
+        res = mCaptureRequest.erase(ANDROID_JPEG_GPS_TIMESTAMP);
         if (res != OK) return res;
-        res = deleteEntry(mCaptureRequest,
-                ANDROID_JPEG_GPS_PROCESSING_METHOD);
+        res = mCaptureRequest.erase(ANDROID_JPEG_GPS_PROCESSING_METHOD);
         if (res != OK) return res;
     }
 
@@ -3538,7 +3561,7 @@
 status_t Camera2Client::updateRecordingRequest(const Parameters &params) {
     ATRACE_CALL();
     status_t res;
-    if (mRecordingRequest == NULL) {
+    if (mRecordingRequest.entryCount() == 0) {
         res = mDevice->createDefaultRequest(CAMERA2_TEMPLATE_VIDEO_RECORD,
                 &mRecordingRequest);
         if (res != OK) {
@@ -3548,7 +3571,7 @@
         }
     }
 
-    res = updateRequestCommon(mRecordingRequest, params);
+    res = updateRequestCommon(&mRecordingRequest, params);
     if (res != OK) {
         ALOGE("%s: Camera %d: Unable to update common entries of recording "
                 "request: %s (%d)", __FUNCTION__, mCameraId,
@@ -3616,36 +3639,34 @@
     return OK;
 }
 
-status_t Camera2Client::updateRequestCommon(camera_metadata_t *request,
+status_t Camera2Client::updateRequestCommon(CameraMetadata *request,
         const Parameters &params) {
     ATRACE_CALL();
     status_t res;
-    res = updateEntry(request,
-            ANDROID_CONTROL_AE_TARGET_FPS_RANGE, params.previewFpsRange, 2);
+    res = request->update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+            params.previewFpsRange, 2);
     if (res != OK) return res;
 
     uint8_t wbMode = params.autoWhiteBalanceLock ?
-            ANDROID_CONTROL_AWB_LOCKED : params.wbMode;
-    res = updateEntry(request,
-            ANDROID_CONTROL_AWB_MODE, &wbMode, 1);
+            (uint8_t)ANDROID_CONTROL_AWB_LOCKED : params.wbMode;
+    res = request->update(ANDROID_CONTROL_AWB_MODE,
+            &wbMode, 1);
     if (res != OK) return res;
-    res = updateEntry(request,
-            ANDROID_CONTROL_EFFECT_MODE, &params.effectMode, 1);
+    res = request->update(ANDROID_CONTROL_EFFECT_MODE,
+            &params.effectMode, 1);
     if (res != OK) return res;
-    res = updateEntry(request,
-            ANDROID_CONTROL_AE_ANTIBANDING_MODE,
+    res = request->update(ANDROID_CONTROL_AE_ANTIBANDING_MODE,
             &params.antibandingMode, 1);
     if (res != OK) return res;
 
     uint8_t controlMode =
             (params.sceneMode == ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED) ?
             ANDROID_CONTROL_AUTO : ANDROID_CONTROL_USE_SCENE_MODE;
-    res = updateEntry(request,
-            ANDROID_CONTROL_MODE, &controlMode, 1);
+    res = request->update(ANDROID_CONTROL_MODE,
+            &controlMode, 1);
     if (res != OK) return res;
     if (controlMode == ANDROID_CONTROL_USE_SCENE_MODE) {
-        res = updateEntry(request,
-                ANDROID_CONTROL_SCENE_MODE,
+        res = request->update(ANDROID_CONTROL_SCENE_MODE,
                 &params.sceneMode, 1);
         if (res != OK) return res;
     }
@@ -3672,11 +3693,11 @@
     }
     if (params.autoExposureLock) aeMode = ANDROID_CONTROL_AE_LOCKED;
 
-    res = updateEntry(request,
-            ANDROID_FLASH_MODE, &flashMode, 1);
+    res = request->update(ANDROID_FLASH_MODE,
+            &flashMode, 1);
     if (res != OK) return res;
-    res = updateEntry(request,
-            ANDROID_CONTROL_AE_MODE, &aeMode, 1);
+    res = request->update(ANDROID_CONTROL_AE_MODE,
+            &aeMode, 1);
     if (res != OK) return res;
 
     float focusDistance = 0; // infinity focus in diopters
@@ -3698,54 +3719,71 @@
                     mCameraId, params.focusMode);
             return BAD_VALUE;
     }
-    res = updateEntry(request,
-            ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1);
+    res = request->update(ANDROID_LENS_FOCUS_DISTANCE,
+            &focusDistance, 1);
     if (res != OK) return res;
-    res = updateEntry(request,
-            ANDROID_CONTROL_AF_MODE, &focusMode, 1);
+    res = request->update(ANDROID_CONTROL_AF_MODE,
+            &focusMode, 1);
     if (res != OK) return res;
 
     size_t focusingAreasSize = params.focusingAreas.size() * 5;
     int32_t *focusingAreas = new int32_t[focusingAreasSize];
     for (size_t i = 0; i < focusingAreasSize; i += 5) {
-        focusingAreas[i + 0] = params.focusingAreas[i].left;
-        focusingAreas[i + 1] = params.focusingAreas[i].top;
-        focusingAreas[i + 2] = params.focusingAreas[i].right;
-        focusingAreas[i + 3] = params.focusingAreas[i].bottom;
+        if (params.focusingAreas[i].weight != 0) {
+            focusingAreas[i + 0] = normalizedXToArray(params.focusingAreas[i].left);
+            focusingAreas[i + 1] = normalizedYToArray(params.focusingAreas[i].top);
+            focusingAreas[i + 2] = normalizedXToArray(params.focusingAreas[i].right);
+            focusingAreas[i + 3] = normalizedYToArray(params.focusingAreas[i].bottom);
+        } else {
+            focusingAreas[i + 0] = 0;
+            focusingAreas[i + 1] = 0;
+            focusingAreas[i + 2] = 0;
+            focusingAreas[i + 3] = 0;
+        }
         focusingAreas[i + 4] = params.focusingAreas[i].weight;
     }
-    res = updateEntry(request,
-            ANDROID_CONTROL_AF_REGIONS, focusingAreas,focusingAreasSize);
+    res = request->update(ANDROID_CONTROL_AF_REGIONS,
+            focusingAreas,focusingAreasSize);
     if (res != OK) return res;
     delete[] focusingAreas;
 
-    res = updateEntry(request,
-            ANDROID_CONTROL_AE_EXP_COMPENSATION,
+    res = request->update(ANDROID_CONTROL_AE_EXP_COMPENSATION,
             &params.exposureCompensation, 1);
     if (res != OK) return res;
 
     size_t meteringAreasSize = params.meteringAreas.size() * 5;
     int32_t *meteringAreas = new int32_t[meteringAreasSize];
     for (size_t i = 0; i < meteringAreasSize; i += 5) {
-        meteringAreas[i + 0] = params.meteringAreas[i].left;
-        meteringAreas[i + 1] = params.meteringAreas[i].top;
-        meteringAreas[i + 2] = params.meteringAreas[i].right;
-        meteringAreas[i + 3] = params.meteringAreas[i].bottom;
+        if (params.meteringAreas[i].weight != 0) {
+            meteringAreas[i + 0] =
+                normalizedXToArray(params.meteringAreas[i].left);
+            meteringAreas[i + 1] =
+                normalizedYToArray(params.meteringAreas[i].top);
+            meteringAreas[i + 2] =
+                normalizedXToArray(params.meteringAreas[i].right);
+            meteringAreas[i + 3] =
+                normalizedYToArray(params.meteringAreas[i].bottom);
+        } else {
+            meteringAreas[i + 0] = 0;
+            meteringAreas[i + 1] = 0;
+            meteringAreas[i + 2] = 0;
+            meteringAreas[i + 3] = 0;
+        }
         meteringAreas[i + 4] = params.meteringAreas[i].weight;
     }
-    res = updateEntry(request,
-            ANDROID_CONTROL_AE_REGIONS, meteringAreas, meteringAreasSize);
+    res = request->update(ANDROID_CONTROL_AE_REGIONS,
+            meteringAreas, meteringAreasSize);
     if (res != OK) return res;
 
-    res = updateEntry(request,
-            ANDROID_CONTROL_AWB_REGIONS, meteringAreas, meteringAreasSize);
+    res = request->update(ANDROID_CONTROL_AWB_REGIONS,
+            meteringAreas, meteringAreasSize);
     if (res != OK) return res;
     delete[] meteringAreas;
 
     // Need to convert zoom index into a crop rectangle. The rectangle is
     // chosen to maximize its area on the sensor
 
-    camera_metadata_entry_t maxDigitalZoom =
+    camera_metadata_ro_entry_t maxDigitalZoom =
             staticInfo(ANDROID_SCALER_AVAILABLE_MAX_ZOOM);
     float zoomIncrement = (maxDigitalZoom.data.f[0] - 1) /
             (NUM_ZOOM_STEPS-1);
@@ -3765,8 +3803,8 @@
     zoomTop = (mDeviceInfo->arrayHeight - zoomHeight) / 2;
 
     int32_t cropRegion[3] = { zoomLeft, zoomTop, zoomWidth };
-    res = updateEntry(request,
-            ANDROID_SCALER_CROP_REGION, cropRegion, 3);
+    res = request->update(ANDROID_SCALER_CROP_REGION,
+            cropRegion, 3);
     if (res != OK) return res;
 
     // TODO: Decide how to map recordingHint, or whether just to ignore it
@@ -3774,22 +3812,28 @@
     uint8_t vstabMode = params.videoStabilization ?
             ANDROID_CONTROL_VIDEO_STABILIZATION_ON :
             ANDROID_CONTROL_VIDEO_STABILIZATION_OFF;
-    res = updateEntry(request,
-            ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
+    res = request->update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
             &vstabMode, 1);
     if (res != OK) return res;
 
     uint8_t faceDetectMode = params.enableFaceDetect ?
             mDeviceInfo->bestFaceDetectMode :
             (uint8_t)ANDROID_STATS_FACE_DETECTION_OFF;
-    res = updateEntry(request,
-            ANDROID_STATS_FACE_DETECT_MODE,
+    res = request->update(ANDROID_STATS_FACE_DETECT_MODE,
             &faceDetectMode, 1);
     if (res != OK) return res;
 
     return OK;
 }
 
+int Camera2Client::normalizedXToArray(int x) const {
+    return (x + 1000) * (mDeviceInfo->arrayWidth - 1) / 2000;
+}
+
+int Camera2Client::normalizedYToArray(int y) const {
+    return (y + 1000) * (mDeviceInfo->arrayHeight - 1) / 2000;
+}
+
 int Camera2Client::arrayXToNormalized(int width) const {
     return width * 2000 / (mDeviceInfo->arrayWidth - 1) - 1000;
 }
@@ -3798,50 +3842,6 @@
     return height * 2000 / (mDeviceInfo->arrayHeight - 1) - 1000;
 }
 
-status_t Camera2Client::updateEntry(camera_metadata_t *buffer,
-        uint32_t tag, const void *data, size_t data_count) {
-    camera_metadata_entry_t entry;
-    status_t res;
-    res = find_camera_metadata_entry(buffer, tag, &entry);
-    if (res == NAME_NOT_FOUND) {
-        res = add_camera_metadata_entry(buffer,
-                tag, data, data_count);
-    } else if (res == OK) {
-        res = update_camera_metadata_entry(buffer,
-                entry.index, data, data_count, NULL);
-    }
-
-    if (res != OK) {
-        ALOGE("%s: Unable to update metadata entry %s.%s (%x): %s (%d)",
-                __FUNCTION__, get_camera_metadata_section_name(tag),
-                get_camera_metadata_tag_name(tag), tag, strerror(-res), res);
-    }
-    return res;
-}
-
-status_t Camera2Client::deleteEntry(camera_metadata_t *buffer, uint32_t tag) {
-    camera_metadata_entry_t entry;
-    status_t res;
-    res = find_camera_metadata_entry(buffer, tag, &entry);
-    if (res == NAME_NOT_FOUND) {
-        return OK;
-    } else if (res != OK) {
-        ALOGE("%s: Error looking for entry %s.%s (%x): %s %d",
-                __FUNCTION__,
-                get_camera_metadata_section_name(tag),
-                get_camera_metadata_tag_name(tag), tag, strerror(-res), res);
-        return res;
-    }
-    res = delete_camera_metadata_entry(buffer, entry.index);
-    if (res != OK) {
-        ALOGE("%s: Error deleting entry %s.%s (%x): %s %d",
-                __FUNCTION__,
-                get_camera_metadata_section_name(tag),
-                get_camera_metadata_tag_name(tag), tag, strerror(-res), res);
-    }
-    return res;
-}
-
 int Camera2Client::formatStringToEnum(const char *format) {
     return
         !strcmp(format, CameraParameters::PIXEL_FORMAT_YUV422SP) ?
@@ -4125,7 +4125,7 @@
             return width * height * 2;
         case HAL_PIXEL_FORMAT_YV12: {      // YV12
             size_t ySize = stride * height;
-            size_t uvStride = (stride / 2 + 0xF) & ~0x10;
+            size_t uvStride = (stride / 2 + 0xF) & ~0xF;
             size_t uvSize = uvStride * height / 2;
             return ySize + uvSize * 2;
         }
diff --git a/services/camera/libcameraservice/Camera2Client.h b/services/camera/libcameraservice/Camera2Client.h
index 028d458..a3ab128 100644
--- a/services/camera/libcameraservice/Camera2Client.h
+++ b/services/camera/libcameraservice/Camera2Client.h
@@ -33,8 +33,7 @@
  */
 class Camera2Client :
         public CameraService::Client,
-        public Camera2Device::NotificationListener,
-        public Camera2Device::FrameListener
+        public Camera2Device::NotificationListener
 {
 public:
     // ICamera interface (see ICamera for details)
@@ -83,7 +82,6 @@
     virtual void notifyAutoExposure(uint8_t newState, int triggerId);
     virtual void notifyAutoWhitebalance(uint8_t newState, int triggerId);
 
-    virtual void onNewFrameAvailable();
 private:
     enum State {
         DISCONNECTED,
@@ -153,10 +151,10 @@
         int64_t gpsTimestamp;
         String8 gpsProcessingMethod;
 
-        int wbMode;
-        int effectMode;
-        int antibandingMode;
-        int sceneMode;
+        uint8_t wbMode;
+        uint8_t effectMode;
+        uint8_t antibandingMode;
+        uint8_t sceneMode;
 
         enum flashMode_t {
             FLASH_MODE_OFF = 0,
@@ -298,14 +296,34 @@
     // Used with stream IDs
     static const int NO_STREAM = -1;
 
-    /* Output frame metadata processing methods */
+    /* Output frame metadata processing thread.  This thread waits for new
+     * frames from the device, and analyzes them as necessary.
+     */
+    class FrameProcessor: public Thread {
+      public:
+        FrameProcessor(wp<Camera2Client> client);
+        ~FrameProcessor();
 
-    status_t processFrameFaceDetect(camera_metadata_t *frame);
+        void dump(int fd, const Vector<String16>& args);
+      private:
+        static const nsecs_t kWaitDuration = 10000000; // 10 ms
+        wp<Camera2Client> mClient;
+
+        virtual bool threadLoop();
+
+        void processNewFrames(sp<Camera2Client> &client);
+        status_t processFaceDetect(const CameraMetadata &frame,
+                sp<Camera2Client> &client);
+
+        CameraMetadata mLastFrame;
+    };
+
+    sp<FrameProcessor> mFrameProcessor;
 
     /* Preview related members */
 
     int mPreviewStreamId;
-    camera_metadata_t *mPreviewRequest;
+    CameraMetadata mPreviewRequest;
     sp<IBinder> mPreviewSurface;
     sp<ANativeWindow> mPreviewWindow;
 
@@ -351,7 +369,7 @@
         Camera2Client *mParent;
     };
     sp<CaptureWaiter>  mCaptureWaiter;
-    camera_metadata_t *mCaptureRequest;
+    CameraMetadata mCaptureRequest;
     sp<Camera2Heap>    mCaptureHeap;
     // Handle captured image buffers
     void onCaptureAvailable();
@@ -375,7 +393,7 @@
         Camera2Client *mParent;
     };
     sp<RecordingWaiter>  mRecordingWaiter;
-    camera_metadata_t *mRecordingRequest;
+    CameraMetadata mRecordingRequest;
     sp<Camera2Heap> mRecordingHeap;
 
     static const size_t kDefaultRecordingHeapCount = 8;
@@ -431,8 +449,8 @@
     // checking the number of values in the entry. 0 for max/minCount means to
     // do no bounds check in that direction. In case of error, the entry data
     // pointer is null and the count is 0.
-    camera_metadata_entry_t staticInfo(uint32_t tag,
-            size_t minCount=0, size_t maxCount=0);
+    camera_metadata_ro_entry_t staticInfo(uint32_t tag,
+            size_t minCount=0, size_t maxCount=0) const;
 
     // Extract frequently-used camera static information into mDeviceInfo
     status_t buildDeviceInfo();
@@ -441,24 +459,16 @@
     status_t buildDefaultParameters();
 
     // Update parameters all requests use, based on mParameters
-    status_t updateRequestCommon(camera_metadata_t *request, const Parameters &params);
+    status_t updateRequestCommon(CameraMetadata *request, const Parameters &params);
 
     // Map from sensor active array pixel coordinates to normalized camera
     // parameter coordinates. The former are (0,0)-(array width - 1, array height
     // - 1), the latter from (-1000,-1000)-(1000,1000)
+    int normalizedXToArray(int x) const;
+    int normalizedYToArray(int y) const;
     int arrayXToNormalized(int width) const;
     int arrayYToNormalized(int height) const;
 
-    // Update specific metadata entry with new values. Adds entry if it does not
-    // exist, which will invalidate sorting
-    static status_t updateEntry(camera_metadata_t *buffer,
-            uint32_t tag, const void *data, size_t data_count);
-
-    // Remove metadata entry. Will invalidate sorting. If entry does not exist,
-    // does nothing.
-    static status_t deleteEntry(camera_metadata_t *buffer,
-            uint32_t tag);
-
     // Convert camera1 preview format string to camera2 enum
     static int formatStringToEnum(const char *format);
     static const char *formatEnumToString(int format);
diff --git a/services/camera/libcameraservice/Camera2Device.cpp b/services/camera/libcameraservice/Camera2Device.cpp
index 583701d..35c4e74 100644
--- a/services/camera/libcameraservice/Camera2Device.cpp
+++ b/services/camera/libcameraservice/Camera2Device.cpp
@@ -33,12 +33,12 @@
         mId(id),
         mDevice(NULL)
 {
-    ALOGV("%s: E", __FUNCTION__);
+    ALOGV("%s: Created device for camera %d", __FUNCTION__, id);
 }
 
 Camera2Device::~Camera2Device()
 {
-    ALOGV("%s: E", __FUNCTION__);
+    ALOGV("%s: Shutting down device for camera %d", __FUNCTION__, mId);
     if (mDevice) {
         status_t res;
         res = mDevice->common.close(&mDevice->common);
@@ -49,11 +49,12 @@
         }
         mDevice = NULL;
     }
+    ALOGV("%s: Shutdown complete", __FUNCTION__);
 }
 
 status_t Camera2Device::initialize(camera_module_t *module)
 {
-    ALOGV("%s: E", __FUNCTION__);
+    ALOGV("%s: Initializing device for camera %d", __FUNCTION__, mId);
 
     status_t res;
     char name[10];
@@ -155,25 +156,28 @@
     return res;
 }
 
-camera_metadata_t *Camera2Device::info() {
+const CameraMetadata& Camera2Device::info() const {
     ALOGVV("%s: E", __FUNCTION__);
 
     return mDeviceInfo;
 }
 
-status_t Camera2Device::capture(camera_metadata_t* request) {
+status_t Camera2Device::capture(CameraMetadata &request) {
     ALOGV("%s: E", __FUNCTION__);
 
-    mRequestQueue.enqueue(request);
+    mRequestQueue.enqueue(request.release());
     return OK;
 }
 
 
-status_t Camera2Device::setStreamingRequest(camera_metadata_t* request) {
+status_t Camera2Device::setStreamingRequest(const CameraMetadata &request) {
     ALOGV("%s: E", __FUNCTION__);
+    CameraMetadata streamRequest(request);
+    return mRequestQueue.setStreamSlot(streamRequest.release());
+}
 
-    mRequestQueue.setStreamSlot(request);
-    return OK;
+status_t Camera2Device::clearStreamingRequest() {
+    return mRequestQueue.setStreamSlot(NULL);
 }
 
 status_t Camera2Device::createStream(sp<ANativeWindow> consumer,
@@ -269,10 +273,14 @@
 }
 
 status_t Camera2Device::createDefaultRequest(int templateId,
-        camera_metadata_t **request) {
+        CameraMetadata *request) {
+    status_t err;
     ALOGV("%s: E", __FUNCTION__);
-    return mDevice->ops->construct_default_request(
-        mDevice, templateId, request);
+    camera_metadata_t *rawRequest;
+    err = mDevice->ops->construct_default_request(
+        mDevice, templateId, &rawRequest);
+    request->acquire(rawRequest);
+    return err;
 }
 
 status_t Camera2Device::waitUntilDrained() {
@@ -340,12 +348,20 @@
     }
 }
 
-status_t Camera2Device::setFrameListener(FrameListener *listener) {
-    return mFrameQueue.setListener(listener);
+status_t Camera2Device::waitForNextFrame(nsecs_t timeout) {
+    return mFrameQueue.waitForBuffer(timeout);
 }
 
-status_t Camera2Device::getNextFrame(camera_metadata_t **frame) {
-    return mFrameQueue.dequeue(frame);
+status_t Camera2Device::getNextFrame(CameraMetadata *frame) {
+    status_t res;
+    camera_metadata_t *rawFrame;
+    res = mFrameQueue.dequeue(&rawFrame);
+    if (rawFrame  == NULL) {
+        return NOT_ENOUGH_DATA;
+    } else if (res == OK) {
+        frame->acquire(rawFrame);
+    }
+    return res;
 }
 
 status_t Camera2Device::triggerAutofocus(uint32_t id) {
@@ -392,13 +408,6 @@
 }
 
 /**
- * Camera2Device::FrameListener
- */
-
-Camera2Device::FrameListener::~FrameListener() {
-}
-
-/**
  * Camera2Device::MetadataQueue
  */
 
@@ -407,8 +416,7 @@
             mFrameCount(0),
             mCount(0),
             mStreamSlotCount(0),
-            mSignalConsumer(true),
-            mListener(NULL)
+            mSignalConsumer(true)
 {
     camera2_request_queue_src_ops::dequeue_request = consumer_dequeue;
     camera2_request_queue_src_ops::request_count = consumer_buffer_count;
@@ -526,12 +534,6 @@
     return OK;
 }
 
-status_t Camera2Device::MetadataQueue::setListener(FrameListener *listener) {
-    Mutex::Autolock l(mMutex);
-    mListener = listener;
-    return OK;
-}
-
 status_t Camera2Device::MetadataQueue::setStreamSlot(camera_metadata_t *buf)
 {
     ALOGV("%s: E", __FUNCTION__);
@@ -644,13 +646,6 @@
         res = mDevice->ops->notify_request_queue_not_empty(mDevice);
         mMutex.lock();
     }
-    if (mListener != NULL) {
-        FrameListener *listener = mListener;
-        mMutex.unlock();
-        ALOGVV("%s: Signaling listener", __FUNCTION__);
-        listener->onNewFrameAvailable();
-        mMutex.lock();
-    }
     return res;
 }
 
diff --git a/services/camera/libcameraservice/Camera2Device.h b/services/camera/libcameraservice/Camera2Device.h
index 790b946..223d77a 100644
--- a/services/camera/libcameraservice/Camera2Device.h
+++ b/services/camera/libcameraservice/Camera2Device.h
@@ -27,6 +27,7 @@
 #include <utils/Vector.h>
 
 #include "hardware/camera2.h"
+#include "CameraMetadata.h"
 
 namespace android {
 
@@ -41,21 +42,26 @@
     status_t dump(int fd, const Vector<String16>& args);
 
     /**
-     * Get a pointer to the device's static characteristics metadata buffer
+     * The device's static characteristics metadata buffer
      */
-    camera_metadata_t* info();
+    const CameraMetadata& info() const;
 
     /**
      * Submit request for capture. The Camera2Device takes ownership of the
      * passed-in buffer.
      */
-    status_t capture(camera_metadata_t *request);
+    status_t capture(CameraMetadata &request);
 
     /**
      * Submit request for streaming. The Camera2Device makes a copy of the
      * passed-in buffer and the caller retains ownership.
      */
-    status_t setStreamingRequest(camera_metadata_t *request);
+    status_t setStreamingRequest(const CameraMetadata &request);
+
+    /**
+     * Clear the streaming request slot.
+     */
+    status_t clearStreamingRequest();
 
     /**
      * Create an output stream of the requested size and format.
@@ -92,8 +98,7 @@
      * Create a metadata buffer with fields that the HAL device believes are
      * best for the given use case
      */
-    status_t createDefaultRequest(int templateId,
-            camera_metadata_t **request);
+    status_t createDefaultRequest(int templateId, CameraMetadata *request);
 
     /**
      * Wait until all requests have been processed. Returns INVALID_OPERATION if
@@ -124,25 +129,16 @@
     status_t setNotifyCallback(NotificationListener *listener);
 
     /**
-     * Abstract class for HAL frame available notifications
+     * Wait for a new frame to be produced, with timeout in nanoseconds.
+     * Returns TIMED_OUT when no frame produced within the specified duration
      */
-    class FrameListener {
-      public:
-        virtual void onNewFrameAvailable() = 0;
-      protected:
-        virtual ~FrameListener();
-    };
-
-    /**
-     * Set a frame listener to be notified about new frames.
-     */
-    status_t setFrameListener(FrameListener *listener);
+    status_t waitForNextFrame(nsecs_t timeout);
 
     /**
      * Get next metadata frame from the frame queue. Returns NULL if the queue
      * is empty; caller takes ownership of the metadata buffer.
      */
-    status_t getNextFrame(camera_metadata_t **frame);
+    status_t getNextFrame(CameraMetadata *frame);
 
     /**
      * Trigger auto-focus. The latest ID used in a trigger autofocus or cancel
@@ -170,7 +166,7 @@
     const int mId;
     camera2_device_t *mDevice;
 
-    camera_metadata_t *mDeviceInfo;
+    CameraMetadata mDeviceInfo;
     vendor_tag_query_ops_t *mVendorTagOps;
 
     /**
@@ -201,7 +197,6 @@
         status_t dequeue(camera_metadata_t **buf, bool incrementCount = true);
         int      getBufferCount();
         status_t waitForBuffer(nsecs_t timeout);
-        status_t setListener(FrameListener *listener);
 
         // Set repeating buffer(s); if the queue is empty on a dequeue call, the
         // queue copies the contents of the stream slot into the queue, and then
@@ -230,7 +225,6 @@
         List<camera_metadata_t*> mStreamSlot;
 
         bool mSignalConsumer;
-        FrameListener *mListener;
 
         static MetadataQueue* getInstance(
             const camera2_frame_queue_dst_ops_t *q);
diff --git a/services/camera/libcameraservice/CameraMetadata.cpp b/services/camera/libcameraservice/CameraMetadata.cpp
new file mode 100644
index 0000000..b402115
--- /dev/null
+++ b/services/camera/libcameraservice/CameraMetadata.cpp
@@ -0,0 +1,290 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "CameraMetadata"
+#include <utils/Log.h>
+#include <utils/Errors.h>
+
+#include "CameraMetadata.h"
+
+namespace android {
+
+CameraMetadata::CameraMetadata() :
+        mBuffer(NULL) {
+}
+
+CameraMetadata::CameraMetadata(size_t entryCapacity, size_t dataCapacity)
+{
+    mBuffer = allocate_camera_metadata(entryCapacity, dataCapacity);
+}
+
+CameraMetadata::CameraMetadata(const CameraMetadata &other) {
+    mBuffer = clone_camera_metadata(other.mBuffer);
+}
+
+CameraMetadata &CameraMetadata::operator=(const CameraMetadata &other) {
+    return operator=(other.mBuffer);
+}
+
+CameraMetadata &CameraMetadata::operator=(const camera_metadata_t *buffer) {
+    if (CC_LIKELY(buffer != mBuffer)) {
+        camera_metadata_t *newBuffer = clone_camera_metadata(buffer);
+        clear();
+        mBuffer = newBuffer;
+    }
+    return *this;
+}
+
+CameraMetadata::~CameraMetadata() {
+    clear();
+}
+
+camera_metadata_t* CameraMetadata::release() {
+    camera_metadata_t *released = mBuffer;
+    mBuffer = NULL;
+    return released;
+}
+
+void CameraMetadata::clear() {
+    if (mBuffer) {
+        free_camera_metadata(mBuffer);
+        mBuffer = NULL;
+    }
+}
+
+void CameraMetadata::acquire(camera_metadata_t *buffer) {
+    clear();
+    mBuffer = buffer;
+}
+
+void CameraMetadata::acquire(CameraMetadata &other) {
+    acquire(other.release());
+}
+
+status_t CameraMetadata::append(const CameraMetadata &other) {
+    return append_camera_metadata(mBuffer, other.mBuffer);
+}
+
+size_t CameraMetadata::entryCount() const {
+    return (mBuffer == NULL) ? 0 :
+            get_camera_metadata_entry_count(mBuffer);
+}
+
+status_t CameraMetadata::sort() {
+    return sort_camera_metadata(mBuffer);
+}
+
+status_t CameraMetadata::checkType(uint32_t tag, uint8_t expectedType) {
+    int tagType = get_camera_metadata_tag_type(tag);
+    if ( CC_UNLIKELY(tagType == -1)) {
+        ALOGE("Update metadata entry: Unknown tag %d", tag);
+        return INVALID_OPERATION;
+    }
+    if ( CC_UNLIKELY(tagType != expectedType) ) {
+        ALOGE("Mismatched tag type when updating entry %s (%d) of type %s; "
+                "got type %s data instead ",
+                get_camera_metadata_tag_name(tag), tag,
+                camera_metadata_type_names[tagType],
+                camera_metadata_type_names[expectedType]);
+        return INVALID_OPERATION;
+    }
+    return OK;
+}
+
+status_t CameraMetadata::update(uint32_t tag,
+        const int32_t *data, size_t data_count) {
+    status_t res;
+    if ( (res = checkType(tag, TYPE_INT32)) != OK) {
+        return res;
+    }
+    return update(tag, (const void*)data, data_count);
+}
+
+status_t CameraMetadata::update(uint32_t tag,
+        const uint8_t *data, size_t data_count) {
+    status_t res;
+    if ( (res = checkType(tag, TYPE_BYTE)) != OK) {
+        return res;
+    }
+    return update(tag, (const void*)data, data_count);
+}
+
+status_t CameraMetadata::update(uint32_t tag,
+        const float *data, size_t data_count) {
+    status_t res;
+    if ( (res = checkType(tag, TYPE_FLOAT)) != OK) {
+        return res;
+    }
+    return update(tag, (const void*)data, data_count);
+}
+
+status_t CameraMetadata::update(uint32_t tag,
+        const int64_t *data, size_t data_count) {
+    status_t res;
+    if ( (res = checkType(tag, TYPE_INT64)) != OK) {
+        return res;
+    }
+    return update(tag, (const void*)data, data_count);
+}
+
+status_t CameraMetadata::update(uint32_t tag,
+        const double *data, size_t data_count) {
+    status_t res;
+    if ( (res = checkType(tag, TYPE_DOUBLE)) != OK) {
+        return res;
+    }
+    return update(tag, (const void*)data, data_count);
+}
+
+status_t CameraMetadata::update(uint32_t tag,
+        const camera_metadata_rational_t *data, size_t data_count) {
+    status_t res;
+    if ( (res = checkType(tag, TYPE_RATIONAL)) != OK) {
+        return res;
+    }
+    return update(tag, (const void*)data, data_count);
+}
+
+status_t CameraMetadata::update(uint32_t tag,
+        const String8 &string) {
+    status_t res;
+    if ( (res = checkType(tag, TYPE_BYTE)) != OK) {
+        return res;
+    }
+    return update(tag, (const void*)string.string(), string.size());
+}
+
+status_t CameraMetadata::update(uint32_t tag, const void *data,
+        size_t data_count) {
+    status_t res;
+    int type = get_camera_metadata_tag_type(tag);
+    if (type == -1) {
+        ALOGE("%s: Tag %d not found", __FUNCTION__, tag);
+        return BAD_VALUE;
+    }
+    size_t data_size = calculate_camera_metadata_entry_data_size(type,
+            data_count);
+
+    res = resizeIfNeeded(1, data_size);
+
+    if (res == OK) {
+        camera_metadata_entry_t entry;
+        res = find_camera_metadata_entry(mBuffer, tag, &entry);
+        if (res == NAME_NOT_FOUND) {
+            res = add_camera_metadata_entry(mBuffer,
+                    tag, data, data_count);
+        } else if (res == OK) {
+            res = update_camera_metadata_entry(mBuffer,
+                    entry.index, data, data_count, NULL);
+        }
+    }
+
+    if (res != OK) {
+        ALOGE("%s: Unable to update metadata entry %s.%s (%x): %s (%d)",
+                __FUNCTION__, get_camera_metadata_section_name(tag),
+                get_camera_metadata_tag_name(tag), tag, strerror(-res), res);
+    }
+    return res;
+}
+
+camera_metadata_entry_t CameraMetadata::find(uint32_t tag) {
+    status_t res;
+    camera_metadata_entry entry;
+    res = find_camera_metadata_entry(mBuffer, tag, &entry);
+    if (CC_UNLIKELY( res != OK )) {
+        entry.count = 0;
+        entry.data.u8 = NULL;
+    }
+    return entry;
+}
+
+camera_metadata_ro_entry_t CameraMetadata::find(uint32_t tag) const {
+    status_t res;
+    camera_metadata_ro_entry entry;
+    res = find_camera_metadata_ro_entry(mBuffer, tag, &entry);
+    if (CC_UNLIKELY( res != OK )) {
+        entry.count = 0;
+        entry.data.u8 = NULL;
+    }
+    return entry;
+}
+
+status_t CameraMetadata::erase(uint32_t tag) {
+    camera_metadata_entry_t entry;
+    status_t res;
+    res = find_camera_metadata_entry(mBuffer, tag, &entry);
+    if (res == NAME_NOT_FOUND) {
+        return OK;
+    } else if (res != OK) {
+        ALOGE("%s: Error looking for entry %s.%s (%x): %s %d",
+                __FUNCTION__,
+                get_camera_metadata_section_name(tag),
+                get_camera_metadata_tag_name(tag), tag, strerror(-res), res);
+        return res;
+    }
+    res = delete_camera_metadata_entry(mBuffer, entry.index);
+    if (res != OK) {
+        ALOGE("%s: Error deleting entry %s.%s (%x): %s %d",
+                __FUNCTION__,
+                get_camera_metadata_section_name(tag),
+                get_camera_metadata_tag_name(tag), tag, strerror(-res), res);
+    }
+    return res;
+}
+
+void CameraMetadata::dump(int fd, int verbosity, int indentation) const {
+    dump_indented_camera_metadata(mBuffer, fd, verbosity, indentation);
+}
+
+status_t CameraMetadata::resizeIfNeeded(size_t extraEntries, size_t extraData) {
+    if (mBuffer == NULL) {
+        mBuffer = allocate_camera_metadata(extraEntries * 2, extraData * 2);
+        if (mBuffer == NULL) {
+            ALOGE("%s: Can't allocate larger metadata buffer", __FUNCTION__);
+            return NO_MEMORY;
+        }
+    } else {
+        size_t currentEntryCount = get_camera_metadata_entry_count(mBuffer);
+        size_t currentEntryCap = get_camera_metadata_entry_capacity(mBuffer);
+        size_t newEntryCount = currentEntryCount +
+                extraEntries;
+        newEntryCount = (newEntryCount > currentEntryCap) ?
+                newEntryCount * 2 : currentEntryCap;
+
+        size_t currentDataCount = get_camera_metadata_data_count(mBuffer);
+        size_t currentDataCap = get_camera_metadata_data_capacity(mBuffer);
+        size_t newDataCount = currentDataCount +
+                extraData;
+        newDataCount = (newDataCount > currentDataCap) ?
+                newDataCount * 2 : currentDataCap;
+
+        if (newEntryCount > currentEntryCap ||
+                newDataCount > currentDataCap) {
+            camera_metadata_t *oldBuffer = mBuffer;
+            mBuffer = allocate_camera_metadata(newEntryCount,
+                    newDataCount);
+            if (mBuffer == NULL) {
+                ALOGE("%s: Can't allocate larger metadata buffer", __FUNCTION__);
+                return NO_MEMORY;
+            }
+            append_camera_metadata(mBuffer, oldBuffer);
+            free_camera_metadata(oldBuffer);
+        }
+    }
+    return OK;
+}
+
+}; // namespace android
diff --git a/services/camera/libcameraservice/CameraMetadata.h b/services/camera/libcameraservice/CameraMetadata.h
new file mode 100644
index 0000000..afb8318
--- /dev/null
+++ b/services/camera/libcameraservice/CameraMetadata.h
@@ -0,0 +1,166 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_METADATA_CPP
+#define ANDROID_SERVERS_CAMERA_METADATA_CPP
+
+#include "system/camera_metadata.h"
+#include <utils/String8.h>
+#include <utils/Vector.h>
+
+namespace android {
+
+/**
+ * A convenience wrapper around the C-based camera_metadata_t library.
+ */
+class CameraMetadata {
+  public:
+    /** Creates an empty object; best used when expecting to acquire contents
+     * from elsewhere */
+    CameraMetadata();
+    /** Creates an object with space for entryCapacity entries, with
+     * dataCapacity extra storage */
+    CameraMetadata(size_t entryCapacity, size_t dataCapacity = 10);
+
+    ~CameraMetadata();
+
+    /** Takes ownership of passed-in buffer */
+    CameraMetadata(camera_metadata_t *buffer);
+    /** Clones the metadata */
+    CameraMetadata(const CameraMetadata &other);
+
+    /**
+     * Assignment clones metadata buffer.
+     */
+    CameraMetadata &operator=(const CameraMetadata &other);
+    CameraMetadata &operator=(const camera_metadata_t *buffer);
+
+    /**
+     * Release a raw metadata buffer to the caller. After this call,
+     * CameraMetadata no longer references the buffer, and the caller takes
+     * responsibility for freeing the raw metadata buffer (using
+     * free_camera_metadata()), or for handing it to another CameraMetadata
+     * instance.
+     */
+    camera_metadata_t* release();
+
+    /**
+     * Clear the metadata buffer and free all storage used by it
+     */
+    void clear();
+
+    /**
+     * Acquire a raw metadata buffer from the caller. After this call,
+     * the caller no longer owns the raw buffer, and must not free or manipulate it.
+     * If CameraMetadata already contains metadata, it is freed.
+     */
+    void acquire(camera_metadata_t* buffer);
+
+    /**
+     * Acquires raw buffer from other CameraMetadata object. After the call, the argument
+     * object no longer has any metadata.
+     */
+    void acquire(CameraMetadata &other);
+
+    /**
+     * Append metadata from another CameraMetadata object.
+     */
+    status_t append(const CameraMetadata &other);
+
+    /**
+     * Number of metadata entries.
+     */
+    size_t entryCount() const;
+
+    /**
+     * Sort metadata buffer for faster find
+     */
+    status_t sort();
+
+    /**
+     * Update metadata entry. Will create entry if it doesn't exist already, and
+     * will reallocate the buffer if insufficient space exists. Overloaded for
+     * the various types of valid data.
+     */
+    status_t update(uint32_t tag,
+            const uint8_t *data, size_t data_count);
+    status_t update(uint32_t tag,
+            const int32_t *data, size_t data_count);
+    status_t update(uint32_t tag,
+            const float *data, size_t data_count);
+    status_t update(uint32_t tag,
+            const int64_t *data, size_t data_count);
+    status_t update(uint32_t tag,
+            const double *data, size_t data_count);
+    status_t update(uint32_t tag,
+            const camera_metadata_rational_t *data, size_t data_count);
+    status_t update(uint32_t tag,
+            const String8 &string);
+
+    template<typename T>
+    status_t update(uint32_t tag, Vector<T> data) {
+        return update(tag, data.array(), data.size());
+    }
+
+    /**
+     * Get metadata entry by tag id
+     */
+    camera_metadata_entry find(uint32_t tag);
+
+    /**
+     * Get metadata entry by tag id, with no editing
+     */
+    camera_metadata_ro_entry find(uint32_t tag) const;
+
+    /**
+     * Delete metadata entry by tag
+     */
+    status_t erase(uint32_t tag);
+
+    /**
+     * Dump contents into FD for debugging. The verbosity levels are
+     * 0: Tag entry information only, no data values
+     * 1: Level 0 plus at most 16 data values per entry
+     * 2: All information
+     *
+     * The indentation parameter sets the number of spaces to add to the start
+     * each line of output.
+     */
+    void dump(int fd, int verbosity = 1, int indentation = 0) const;
+
+  private:
+    camera_metadata_t *mBuffer;
+
+    /**
+     * Check if tag has a given type
+     */
+    status_t checkType(uint32_t tag, uint8_t expectedType);
+
+    /**
+     * Base update entry method
+     */
+    status_t update(uint32_t tag, const void *data, size_t data_count);
+
+    /**
+     * Resize metadata buffer if needed by reallocating it and copying it over.
+     */
+    status_t resizeIfNeeded(size_t extraEntries, size_t extraData);
+
+};
+
+}; // namespace android
+
+#endif