CameraService: Remove device HALv2 support, other cleanup
HALv2 only ever shipped with Nexus 10, and has been fully superceded by
HALv3. Remove it to allow for various code simplifications and cleanup.
- Remove Camera2Device
- Remove various special-case codepaths for supporting Camera2Device
- Remove CameraDeviceFactory, since it only creates Camera3Devices now
- Remove BurstCapture and associated CaptureSequence/Parameters code
- Remove old ZslProcessor and simplify ZslProcessor hierarchy to be
just ZslProcessor3, which is renamed to just ZslProcessor
- Add service-init-time check for unsupported device versions
- Fix assorted compiler warnings, some old, some new
- Remove references to HALv2 when possible
Bug: 25866588
Change-Id: Ia1063264d315f9b742ec5cdd0483539310894f5e
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index 572fb72..78a1b58 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -160,6 +160,7 @@
virtual void onDeviceError(CameraErrorCode errorCode,
const CaptureResultExtras& resultExtras) {
+ (void) resultExtras;
ALOGE("%s: onDeviceError occurred with: %d", __FUNCTION__, static_cast<int>(errorCode));
Mutex::Autolock l(mLock);
mError = true;
@@ -177,6 +178,8 @@
virtual void onCaptureStarted(const CaptureResultExtras& resultExtras,
int64_t timestamp) {
+ (void) resultExtras;
+ (void) timestamp;
Mutex::Autolock l(mLock);
mLastStatus = RUNNING;
mStatusesHit.push_back(mLastStatus);
@@ -186,6 +189,8 @@
virtual void onResultReceived(const CameraMetadata& metadata,
const CaptureResultExtras& resultExtras) {
+ (void) metadata;
+ (void) resultExtras;
Mutex::Autolock l(mLock);
mLastStatus = SENT_RESULT;
mStatusesHit.push_back(mLastStatus);
@@ -193,6 +198,7 @@
}
virtual void onPrepared(int streamId) {
+ (void) streamId;
Mutex::Autolock l(mLock);
mLastStatus = PREPARED;
mStatusesHit.push_back(mLastStatus);
@@ -465,6 +471,7 @@
callbacks->clearStatus();
int requestId3 = device->submitRequestList(requestList, /*streaming*/false,
/*out*/&lastFrameNumber);
+ EXPECT_LE(0, requestId3);
EXPECT_TRUE(callbacks->waitForStatus(TestCameraDeviceCallbacks::SENT_RESULT));
EXPECT_TRUE(callbacks->waitForIdle());
EXPECT_LE(lastFrameNumberPrev, lastFrameNumber);
diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk
index 45900c4..9ba8f3f 100644
--- a/services/camera/libcameraservice/Android.mk
+++ b/services/camera/libcameraservice/Android.mk
@@ -22,7 +22,6 @@
LOCAL_SRC_FILES:= \
CameraService.cpp \
- CameraDeviceFactory.cpp \
CameraFlashlight.cpp \
common/Camera2ClientBase.cpp \
common/CameraDeviceBase.cpp \
@@ -35,14 +34,10 @@
api1/client2/StreamingProcessor.cpp \
api1/client2/JpegProcessor.cpp \
api1/client2/CallbackProcessor.cpp \
- api1/client2/ZslProcessor.cpp \
- api1/client2/ZslProcessorInterface.cpp \
- api1/client2/BurstCapture.cpp \
api1/client2/JpegCompressor.cpp \
api1/client2/CaptureSequencer.cpp \
- api1/client2/ZslProcessor3.cpp \
+ api1/client2/ZslProcessor.cpp \
api2/CameraDeviceClient.cpp \
- device2/Camera2Device.cpp \
device3/Camera3Device.cpp \
device3/Camera3Stream.cpp \
device3/Camera3IOStreamBase.cpp \
diff --git a/services/camera/libcameraservice/CameraDeviceFactory.cpp b/services/camera/libcameraservice/CameraDeviceFactory.cpp
deleted file mode 100644
index 6589e27..0000000
--- a/services/camera/libcameraservice/CameraDeviceFactory.cpp
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// #define LOG_NDEBUG 0
-#define LOG_TAG "CameraDeviceFactory"
-#include <utils/Log.h>
-
-#include "CameraService.h"
-#include "CameraDeviceFactory.h"
-#include "common/CameraDeviceBase.h"
-#include "device2/Camera2Device.h"
-#include "device3/Camera3Device.h"
-
-namespace android {
-
-wp<CameraService> CameraDeviceFactory::sService;
-
-sp<CameraDeviceBase> CameraDeviceFactory::createDevice(int cameraId) {
-
- sp<CameraService> svc = sService.promote();
- if (svc == 0) {
- ALOGE("%s: No service registered", __FUNCTION__);
- return NULL;
- }
-
- int deviceVersion = svc->getDeviceVersion(cameraId, /*facing*/NULL);
-
- sp<CameraDeviceBase> device;
-
- switch (deviceVersion) {
- case CAMERA_DEVICE_API_VERSION_2_0:
- case CAMERA_DEVICE_API_VERSION_2_1:
- device = new Camera2Device(cameraId);
- break;
- case CAMERA_DEVICE_API_VERSION_3_0:
- case CAMERA_DEVICE_API_VERSION_3_1:
- case CAMERA_DEVICE_API_VERSION_3_2:
- case CAMERA_DEVICE_API_VERSION_3_3:
- device = new Camera3Device(cameraId);
- break;
- default:
- ALOGE("%s: Camera %d: Unknown HAL device version %d",
- __FUNCTION__, cameraId, deviceVersion);
- device = NULL;
- break;
- }
-
- ALOGV_IF(device != 0, "Created a new camera device for version %d",
- deviceVersion);
-
- return device;
-}
-
-void CameraDeviceFactory::registerService(wp<CameraService> service) {
- ALOGV("%s: Registered service %p", __FUNCTION__,
- service.promote().get());
-
- sService = service;
-}
-
-}; // namespace android
diff --git a/services/camera/libcameraservice/CameraDeviceFactory.h b/services/camera/libcameraservice/CameraDeviceFactory.h
deleted file mode 100644
index 236dc56..0000000
--- a/services/camera/libcameraservice/CameraDeviceFactory.h
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_SERVERS_CAMERA_CAMERADEVICEFACTORY_H
-#define ANDROID_SERVERS_CAMERA_CAMERADEVICEFACTORY_H
-
-#include <utils/RefBase.h>
-
-namespace android {
-
-class CameraDeviceBase;
-class CameraService;
-
-/**
- * Create the right instance of Camera2Device or Camera3Device
- * automatically based on the device version.
- */
-class CameraDeviceFactory : public virtual RefBase {
- public:
- static void registerService(wp<CameraService> service);
-
- // Prerequisite: Call registerService.
- static sp<CameraDeviceBase> createDevice(int cameraId);
- private:
- CameraDeviceFactory(wp<CameraService> service);
-
- static wp<CameraService> sService;
-};
-
-}; // namespace android
-
-#endif
diff --git a/services/camera/libcameraservice/CameraFlashlight.cpp b/services/camera/libcameraservice/CameraFlashlight.cpp
index 406c1c4..0afd945 100644
--- a/services/camera/libcameraservice/CameraFlashlight.cpp
+++ b/services/camera/libcameraservice/CameraFlashlight.cpp
@@ -27,7 +27,7 @@
#include "gui/IGraphicBufferConsumer.h"
#include "gui/BufferQueue.h"
#include "camera/camera2/CaptureRequest.h"
-#include "CameraDeviceFactory.h"
+#include "device3/Camera3Device.h"
namespace android {
@@ -78,7 +78,7 @@
deviceVersion = info.device_version;
}
- if (deviceVersion >= CAMERA_DEVICE_API_VERSION_2_0) {
+ if (deviceVersion >= CAMERA_DEVICE_API_VERSION_3_0) {
CameraDeviceClientFlashControl *flashControl =
new CameraDeviceClientFlashControl(*mCameraModule,
*mCallbacks);
@@ -193,8 +193,6 @@
}
bool CameraFlashlight::hasFlashUnit(const String8& cameraId) {
- status_t res;
-
Mutex::Autolock l(mLock);
return hasFlashUnitLocked(cameraId);
}
@@ -302,7 +300,8 @@
/////////////////////////////////////////////////////////////////////
ModuleFlashControl::ModuleFlashControl(CameraModule& cameraModule,
const camera_module_callbacks_t& callbacks) :
- mCameraModule(&cameraModule) {
+ mCameraModule(&cameraModule) {
+ (void) callbacks;
}
ModuleFlashControl::~ModuleFlashControl() {
@@ -478,7 +477,7 @@
}
sp<CameraDeviceBase> device =
- CameraDeviceFactory::createDevice(atoi(cameraId.string()));
+ new Camera3Device(atoi(cameraId.string()));
if (device == NULL) {
return NO_MEMORY;
}
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index ebf6779..846d790 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -55,7 +55,6 @@
#include "api1/Camera2Client.h"
#include "api2/CameraDeviceClient.h"
#include "utils/CameraTraces.h"
-#include "CameraDeviceFactory.h"
namespace android {
@@ -246,8 +245,6 @@
mModule->setCallbacks(this);
}
- CameraDeviceFactory::registerService(this);
-
CameraService::pingCameraServiceProxy();
}
@@ -364,7 +361,8 @@
res = setTorchStatusLocked(cameraId, newStatus);
if (res) {
- ALOGE("%s: Failed to set the torch status", __FUNCTION__, (uint32_t)newStatus);
+ ALOGE("%s: Failed to set the torch status to %d: %s (%d)", __FUNCTION__,
+ (uint32_t)newStatus, strerror(-res), res);
return;
}
@@ -481,7 +479,6 @@
Vector<Size> sizes;
Vector<Size> jpegSizes;
Vector<int32_t> formats;
- const char* supportedPreviewFormats;
{
shimParams.getSupportedPreviewSizes(/*out*/sizes);
shimParams.getSupportedPreviewFormats(/*out*/formats);
@@ -559,7 +556,7 @@
int facing;
status_t ret = OK;
if (mModule->getModuleApiVersion() < CAMERA_MODULE_API_VERSION_2_0 ||
- getDeviceVersion(cameraId, &facing) <= CAMERA_DEVICE_API_VERSION_2_1 ) {
+ getDeviceVersion(cameraId, &facing) < CAMERA_DEVICE_API_VERSION_3_0) {
/**
* Backwards compatibility mode for old HALs:
* - Convert CameraInfo into static CameraMetadata properties.
@@ -725,8 +722,6 @@
return -EOPNOTSUPP;
}
break;
- case CAMERA_DEVICE_API_VERSION_2_0:
- case CAMERA_DEVICE_API_VERSION_2_1:
case CAMERA_DEVICE_API_VERSION_3_0:
case CAMERA_DEVICE_API_VERSION_3_1:
case CAMERA_DEVICE_API_VERSION_3_2:
@@ -1306,7 +1301,6 @@
// update the link to client's death
Mutex::Autolock al(mTorchClientMapMutex);
ssize_t index = mTorchClientMap.indexOfKey(id);
- BatteryNotifier& notifier(BatteryNotifier::getInstance());
if (enabled) {
if (index == NAME_NOT_FOUND) {
mTorchClientMap.add(id, clientBinder);
@@ -1463,8 +1457,6 @@
switch(deviceVersion) {
case CAMERA_DEVICE_API_VERSION_1_0:
- case CAMERA_DEVICE_API_VERSION_2_0:
- case CAMERA_DEVICE_API_VERSION_2_1:
case CAMERA_DEVICE_API_VERSION_3_0:
case CAMERA_DEVICE_API_VERSION_3_1:
if (apiVersion == API_VERSION_2) {
@@ -1555,9 +1547,29 @@
/**
* Check camera capabilities, such as support for basic color operation
+ * Also check that the device HAL version is still in support
*/
int CameraService::checkCameraCapabilities(int id, camera_info info, int *latestStrangeCameraId) {
+ // Verify the device version is in the supported range
+ switch (info.device_version) {
+ case CAMERA_DEVICE_API_VERSION_1_0:
+ case CAMERA_DEVICE_API_VERSION_3_0:
+ case CAMERA_DEVICE_API_VERSION_3_1:
+ case CAMERA_DEVICE_API_VERSION_3_2:
+ case CAMERA_DEVICE_API_VERSION_3_3:
+ // in support
+ break;
+ case CAMERA_DEVICE_API_VERSION_2_0:
+ case CAMERA_DEVICE_API_VERSION_2_1:
+ // no longer supported
+ default:
+ ALOGE("%s: Device %d has HAL version %x, which is not supported",
+ __FUNCTION__, id, info.device_version);
+ logServiceError("Unsupported device HAL version", NO_INIT);
+ return NO_INIT;
+ }
+
// Assume all devices pre-v3.3 are backward-compatible
bool isBackwardCompatible = true;
if (mModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_0
@@ -1591,10 +1603,10 @@
ALOGE("%s: Normal camera ID %d higher than strange camera ID %d. "
"This is not allowed due backward-compatibility requirements",
__FUNCTION__, id, *latestStrangeCameraId);
- logServiceError("Invalid order of camera devices", ENODEV);
+ logServiceError("Invalid order of camera devices", NO_INIT);
mNumberOfCameras = 0;
mNumberOfNormalCameras = 0;
- return INVALID_OPERATION;
+ return NO_INIT;
}
}
return OK;
@@ -1752,7 +1764,7 @@
void CameraService::logServiceError(const char* msg, int errorCode) {
String8 curTime = getFormattedCurrentTime();
- logEvent(String8::format("SERVICE ERROR: %s : %d (%s)", msg, errorCode, strerror(errorCode)));
+ logEvent(String8::format("SERVICE ERROR: %s : %d (%s)", msg, errorCode, strerror(-errorCode)));
}
status_t CameraService::onTransact(uint32_t code, const Parcel& data, Parcel* reply,
@@ -2073,6 +2085,8 @@
void CameraService::Client::notifyError(ICameraDeviceCallbacks::CameraErrorCode errorCode,
const CaptureResultExtras& resultExtras) {
+ (void) errorCode;
+ (void) resultExtras;
if (mRemoteCallback != NULL) {
mRemoteCallback->notifyCallback(CAMERA_MSG_ERROR, CAMERA_ERROR_RELEASED, 0);
} else {
@@ -2340,7 +2354,7 @@
result.appendFormat(" Resource Cost: %d\n", state.second->getCost());
result.appendFormat(" Conflicting Devices:");
for (auto& id : conflicting) {
- result.appendFormat(" %s", cameraId.string());
+ result.appendFormat(" %s", id.string());
}
if (conflicting.size() == 0) {
result.appendFormat(" NONE");
@@ -2348,7 +2362,7 @@
result.appendFormat("\n");
result.appendFormat(" Device version: %#x\n", deviceVersion);
- if (deviceVersion >= CAMERA_DEVICE_API_VERSION_2_0) {
+ if (deviceVersion >= CAMERA_DEVICE_API_VERSION_3_0) {
result.appendFormat(" Device static metadata:\n");
write(fd, result.string(), result.size());
dump_indented_camera_metadata(info.static_camera_characteristics,
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 4b0eeb7..c5fe69f 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -620,6 +620,7 @@
/**
* Add a event log message that a serious service-level error has occured
+ * The errorCode should be one of the Android Errors
*/
void logServiceError(const char* msg, int errorCode);
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 4338d64..175920f 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -32,7 +32,6 @@
#include "api1/client2/CaptureSequencer.h"
#include "api1/client2/CallbackProcessor.h"
#include "api1/client2/ZslProcessor.h"
-#include "api1/client2/ZslProcessor3.h"
#define ALOG1(...) ALOGD_IF(gLogLevel >= 1, __VA_ARGS__);
#define ALOG2(...) ALOGD_IF(gLogLevel >= 2, __VA_ARGS__);
@@ -111,30 +110,11 @@
mCameraId);
mJpegProcessor->run(threadName.string());
- switch (mDeviceVersion) {
- case CAMERA_DEVICE_API_VERSION_2_0: {
- sp<ZslProcessor> zslProc =
- new ZslProcessor(this, mCaptureSequencer);
- mZslProcessor = zslProc;
- mZslProcessorThread = zslProc;
- break;
- }
- case CAMERA_DEVICE_API_VERSION_3_0:
- case CAMERA_DEVICE_API_VERSION_3_1:
- case CAMERA_DEVICE_API_VERSION_3_2:
- case CAMERA_DEVICE_API_VERSION_3_3: {
- sp<ZslProcessor3> zslProc =
- new ZslProcessor3(this, mCaptureSequencer);
- mZslProcessor = zslProc;
- mZslProcessorThread = zslProc;
- break;
- }
- default:
- break;
- }
+ mZslProcessor = new ZslProcessor(this, mCaptureSequencer);
+
threadName = String8::format("C2-%d-ZslProc",
mCameraId);
- mZslProcessorThread->run(threadName.string());
+ mZslProcessor->run(threadName.string());
mCallbackProcessor = new CallbackProcessor(this);
threadName = String8::format("C2-%d-CallbkProc",
@@ -414,7 +394,7 @@
mFrameProcessor->requestExit();
mCaptureSequencer->requestExit();
mJpegProcessor->requestExit();
- mZslProcessorThread->requestExit();
+ mZslProcessor->requestExit();
mCallbackProcessor->requestExit();
ALOGV("Camera %d: Waiting for threads", mCameraId);
@@ -428,7 +408,7 @@
mFrameProcessor->join();
mCaptureSequencer->join();
mJpegProcessor->join();
- mZslProcessorThread->join();
+ mZslProcessor->join();
mCallbackProcessor->join();
mBinderSerializationLock.lock();
@@ -442,9 +422,6 @@
mCallbackProcessor->deleteStream();
mZslProcessor->deleteStream();
- // Remove all ZSL stream state before disconnect; needed to work around b/15408128.
- mZslProcessor->disconnect();
-
ALOGV("Camera %d: Disconnecting device", mCameraId);
mDevice->disconnect();
@@ -761,8 +738,8 @@
// We could wait to create the JPEG output stream until first actual use
// (first takePicture call). However, this would substantially increase the
- // first capture latency on HAL3 devices, and potentially on some HAL2
- // devices. So create it unconditionally at preview start. As a drawback,
+ // first capture latency on HAL3 devices.
+ // So create it unconditionally at preview start. As a drawback,
// this increases gralloc memory consumption for applications that don't
// ever take a picture. Do not enter this mode when jpeg stream will slow
// down preview.
@@ -1069,35 +1046,33 @@
}
}
- // On current HALs, clean up ZSL before transitioning into recording
- if (mDeviceVersion != CAMERA_DEVICE_API_VERSION_2_0) {
- if (mZslProcessor->getStreamId() != NO_STREAM) {
- ALOGV("%s: Camera %d: Clearing out zsl stream before "
- "creating recording stream", __FUNCTION__, mCameraId);
- res = mStreamingProcessor->stopStream();
- if (res != OK) {
- ALOGE("%s: Camera %d: Can't stop streaming to delete callback stream",
- __FUNCTION__, mCameraId);
- return res;
- }
- res = mDevice->waitUntilDrained();
- if (res != OK) {
- ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
- __FUNCTION__, mCameraId, strerror(-res), res);
- }
- res = mZslProcessor->clearZslQueue();
- if (res != OK) {
- ALOGE("%s: Camera %d: Can't clear zsl queue",
- __FUNCTION__, mCameraId);
- return res;
- }
- res = mZslProcessor->deleteStream();
- if (res != OK) {
- ALOGE("%s: Camera %d: Unable to delete zsl stream before "
- "record: %s (%d)", __FUNCTION__, mCameraId,
- strerror(-res), res);
- return res;
- }
+ // Clean up ZSL before transitioning into recording
+ if (mZslProcessor->getStreamId() != NO_STREAM) {
+ ALOGV("%s: Camera %d: Clearing out zsl stream before "
+ "creating recording stream", __FUNCTION__, mCameraId);
+ res = mStreamingProcessor->stopStream();
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Can't stop streaming to delete callback stream",
+ __FUNCTION__, mCameraId);
+ return res;
+ }
+ res = mDevice->waitUntilDrained();
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
+ __FUNCTION__, mCameraId, strerror(-res), res);
+ }
+ res = mZslProcessor->clearZslQueue();
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Can't clear zsl queue",
+ __FUNCTION__, mCameraId);
+ return res;
+ }
+ res = mZslProcessor->deleteStream();
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Unable to delete zsl stream before "
+ "record: %s (%d)", __FUNCTION__, mCameraId,
+ strerror(-res), res);
+ return res;
}
}
@@ -1105,56 +1080,43 @@
// and we can't fail record start without stagefright asserting.
params.previewCallbackFlags = 0;
- if (mDeviceVersion != CAMERA_DEVICE_API_VERSION_2_0) {
- // For newer devices, may need to reconfigure video snapshot JPEG sizes
- // during recording startup, so need a more complex sequence here to
- // ensure an early stream reconfiguration doesn't happen
- bool recordingStreamNeedsUpdate;
- res = mStreamingProcessor->recordingStreamNeedsUpdate(params, &recordingStreamNeedsUpdate);
+ // May need to reconfigure video snapshot JPEG sizes
+ // during recording startup, so need a more complex sequence here to
+ // ensure an early stream reconfiguration doesn't happen
+ bool recordingStreamNeedsUpdate;
+ res = mStreamingProcessor->recordingStreamNeedsUpdate(params, &recordingStreamNeedsUpdate);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Can't query recording stream",
+ __FUNCTION__, mCameraId);
+ return res;
+ }
+
+ if (recordingStreamNeedsUpdate) {
+ // Need to stop stream here so updateProcessorStream won't trigger configureStream
+ // Right now camera device cannot handle configureStream failure gracefully
+ // when device is streaming
+ res = mStreamingProcessor->stopStream();
if (res != OK) {
- ALOGE("%s: Camera %d: Can't query recording stream",
- __FUNCTION__, mCameraId);
+ ALOGE("%s: Camera %d: Can't stop streaming to update record "
+ "stream", __FUNCTION__, mCameraId);
return res;
}
-
- if (recordingStreamNeedsUpdate) {
- // Need to stop stream here so updateProcessorStream won't trigger configureStream
- // Right now camera device cannot handle configureStream failure gracefully
- // when device is streaming
- res = mStreamingProcessor->stopStream();
- if (res != OK) {
- ALOGE("%s: Camera %d: Can't stop streaming to update record "
- "stream", __FUNCTION__, mCameraId);
- return res;
- }
- res = mDevice->waitUntilDrained();
- if (res != OK) {
- ALOGE("%s: Camera %d: Waiting to stop streaming failed: "
- "%s (%d)", __FUNCTION__, mCameraId,
- strerror(-res), res);
- }
-
- res = updateProcessorStream<
- StreamingProcessor,
- &StreamingProcessor::updateRecordingStream>(
- mStreamingProcessor,
- params);
- if (res != OK) {
- ALOGE("%s: Camera %d: Unable to update recording stream: "
- "%s (%d)", __FUNCTION__, mCameraId,
- strerror(-res), res);
- return res;
- }
- }
- } else {
- // Maintain call sequencing for HALv2 devices.
- res = updateProcessorStream<
- StreamingProcessor,
- &StreamingProcessor::updateRecordingStream>(mStreamingProcessor,
- params);
+ res = mDevice->waitUntilDrained();
if (res != OK) {
- ALOGE("%s: Camera %d: Unable to update recording stream: %s (%d)",
- __FUNCTION__, mCameraId, strerror(-res), res);
+ ALOGE("%s: Camera %d: Waiting to stop streaming failed: "
+ "%s (%d)", __FUNCTION__, mCameraId,
+ strerror(-res), res);
+ }
+
+ res = updateProcessorStream<
+ StreamingProcessor,
+ &StreamingProcessor::updateRecordingStream>(
+ mStreamingProcessor,
+ params);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Unable to update recording stream: "
+ "%s (%d)", __FUNCTION__, mCameraId,
+ strerror(-res), res);
return res;
}
}
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index d50bf63..e1e18c9 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -24,7 +24,7 @@
#include "api1/client2/FrameProcessor.h"
//#include "api1/client2/StreamingProcessor.h"
//#include "api1/client2/JpegProcessor.h"
-//#include "api1/client2/ZslProcessorInterface.h"
+//#include "api1/client2/ZslProcessor.h"
//#include "api1/client2/CaptureSequencer.h"
//#include "api1/client2/CallbackProcessor.h"
@@ -34,7 +34,7 @@
class StreamingProcessor;
class JpegProcessor;
-class ZslProcessorInterface;
+class ZslProcessor;
class CaptureSequencer;
class CallbackProcessor;
@@ -43,7 +43,7 @@
class IMemory;
/**
* Interface between android.hardware.Camera API and Camera HAL device for versions
- * CAMERA_DEVICE_API_VERSION_2_0 and 3_0.
+ * CAMERA_DEVICE_API_VERSION_3_0 and above.
*/
class Camera2Client :
public Camera2ClientBase<CameraService::Client>
@@ -204,12 +204,7 @@
sp<camera2::CaptureSequencer> mCaptureSequencer;
sp<camera2::JpegProcessor> mJpegProcessor;
- sp<camera2::ZslProcessorInterface> mZslProcessor;
- sp<Thread> mZslProcessorThread;
-
- /** Notification-related members */
-
- bool mAfInMotion;
+ sp<camera2::ZslProcessor> mZslProcessor;
/** Utility members */
bool mLegacyMode;
diff --git a/services/camera/libcameraservice/api1/client2/BurstCapture.cpp b/services/camera/libcameraservice/api1/client2/BurstCapture.cpp
deleted file mode 100644
index 5502dcb..0000000
--- a/services/camera/libcameraservice/api1/client2/BurstCapture.cpp
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "Camera2-BurstCapture"
-
-#include <utils/Log.h>
-#include <utils/Trace.h>
-
-#include "BurstCapture.h"
-
-#include "api1/Camera2Client.h"
-#include "api1/client2/JpegCompressor.h"
-
-namespace android {
-namespace camera2 {
-
-BurstCapture::BurstCapture(wp<Camera2Client> client, wp<CaptureSequencer> sequencer):
- mCaptureStreamId(NO_STREAM),
- mClient(client),
- mSequencer(sequencer)
-{
-}
-
-BurstCapture::~BurstCapture() {
-}
-
-status_t BurstCapture::start(Vector<CameraMetadata> &/*metadatas*/,
- int32_t /*firstCaptureId*/) {
- ALOGE("Not completely implemented");
- return INVALID_OPERATION;
-}
-
-void BurstCapture::onFrameAvailable(const BufferItem &/*item*/) {
- ALOGV("%s", __FUNCTION__);
- Mutex::Autolock l(mInputMutex);
- if(!mInputChanged) {
- mInputChanged = true;
- mInputSignal.signal();
- }
-}
-
-bool BurstCapture::threadLoop() {
- status_t res;
- {
- Mutex::Autolock l(mInputMutex);
- while(!mInputChanged) {
- res = mInputSignal.waitRelative(mInputMutex, kWaitDuration);
- if(res == TIMED_OUT) return true;
- }
- mInputChanged = false;
- }
-
- do {
- sp<Camera2Client> client = mClient.promote();
- if(client == 0) return false;
- ALOGV("%s: Calling processFrameAvailable()", __FUNCTION__);
- res = processFrameAvailable(client);
- } while(res == OK);
-
- return true;
-}
-
-CpuConsumer::LockedBuffer* BurstCapture::jpegEncode(
- CpuConsumer::LockedBuffer *imgBuffer,
- int /*quality*/)
-{
- ALOGV("%s", __FUNCTION__);
-
- CpuConsumer::LockedBuffer *imgEncoded = new CpuConsumer::LockedBuffer;
- uint8_t *data = new uint8_t[ANDROID_JPEG_MAX_SIZE];
- imgEncoded->data = data;
- imgEncoded->width = imgBuffer->width;
- imgEncoded->height = imgBuffer->height;
- imgEncoded->stride = imgBuffer->stride;
-
- Vector<CpuConsumer::LockedBuffer*> buffers;
- buffers.push_back(imgBuffer);
- buffers.push_back(imgEncoded);
-
- sp<JpegCompressor> jpeg = new JpegCompressor();
- jpeg->start(buffers, 1);
-
- bool success = jpeg->waitForDone(10 * 1e9);
- if(success) {
- return buffers[1];
- }
- else {
- ALOGE("%s: JPEG encode timed out", __FUNCTION__);
- return NULL; // TODO: maybe change function return value to status_t
- }
-}
-
-status_t BurstCapture::processFrameAvailable(sp<Camera2Client> &/*client*/) {
- ALOGE("Not implemented");
- return INVALID_OPERATION;
-}
-
-} // namespace camera2
-} // namespace android
diff --git a/services/camera/libcameraservice/api1/client2/BurstCapture.h b/services/camera/libcameraservice/api1/client2/BurstCapture.h
deleted file mode 100644
index c3b7722..0000000
--- a/services/camera/libcameraservice/api1/client2/BurstCapture.h
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_SERVERS_CAMERA_BURST_CAPTURE_H
-#define ANDROID_SERVERS_CAMERA_BURST_CAPTURE_H
-
-#include <camera/CameraMetadata.h>
-#include <binder/MemoryBase.h>
-#include <binder/MemoryHeapBase.h>
-#include <gui/CpuConsumer.h>
-
-#include "device2/Camera2Device.h"
-
-namespace android {
-
-class Camera2Client;
-
-namespace camera2 {
-
-class CaptureSequencer;
-
-class BurstCapture : public virtual Thread,
- public virtual CpuConsumer::FrameAvailableListener
-{
-public:
- BurstCapture(wp<Camera2Client> client, wp<CaptureSequencer> sequencer);
- virtual ~BurstCapture();
-
- virtual void onFrameAvailable(const BufferItem& item);
- virtual status_t start(Vector<CameraMetadata> &metadatas, int32_t firstCaptureId);
-
-protected:
- Mutex mInputMutex;
- bool mInputChanged;
- Condition mInputSignal;
- int mCaptureStreamId;
- wp<Camera2Client> mClient;
- wp<CaptureSequencer> mSequencer;
-
- // Should only be accessed by processing thread
- enum {
- NO_STREAM = -1
- };
-
- CpuConsumer::LockedBuffer* jpegEncode(
- CpuConsumer::LockedBuffer *imgBuffer,
- int quality);
-
- virtual status_t processFrameAvailable(sp<Camera2Client> &client);
-
-private:
- virtual bool threadLoop();
- static const nsecs_t kWaitDuration = 10000000; // 10 ms
-};
-
-} // namespace camera2
-} // namespace android
-
-#endif
diff --git a/services/camera/libcameraservice/api1/client2/CallbackProcessor.h b/services/camera/libcameraservice/api1/client2/CallbackProcessor.h
index a290536..a22442f 100644
--- a/services/camera/libcameraservice/api1/client2/CallbackProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.h
@@ -33,7 +33,7 @@
namespace camera2 {
-class Parameters;
+struct Parameters;
/***
* Still image capture output image processing
@@ -75,7 +75,6 @@
sp<CpuConsumer> mCallbackConsumer;
sp<Surface> mCallbackWindow;
sp<Camera2Heap> mCallbackHeap;
- int mCallbackHeapId;
size_t mCallbackHeapHead, mCallbackHeapFree;
virtual bool threadLoop();
diff --git a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
index 5f7fd74..61e1442 100644
--- a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
+++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
@@ -26,9 +26,8 @@
#include "api1/Camera2Client.h"
#include "api1/client2/CaptureSequencer.h"
-#include "api1/client2/BurstCapture.h"
#include "api1/client2/Parameters.h"
-#include "api1/client2/ZslProcessorInterface.h"
+#include "api1/client2/ZslProcessor.h"
namespace android {
namespace camera2 {
@@ -59,7 +58,7 @@
ALOGV("%s: Exit", __FUNCTION__);
}
-void CaptureSequencer::setZslProcessor(wp<ZslProcessorInterface> processor) {
+void CaptureSequencer::setZslProcessor(wp<ZslProcessor> processor) {
Mutex::Autolock l(mInputMutex);
mZslProcessor = processor;
}
@@ -111,6 +110,7 @@
void CaptureSequencer::notifyShutter(const CaptureResultExtras& resultExtras,
nsecs_t timestamp) {
ATRACE_CALL();
+ (void) timestamp;
Mutex::Autolock l(mInputMutex);
if (!mHalNotifiedShutter && resultExtras.requestId == mShutterCaptureId) {
mHalNotifiedShutter = true;
@@ -174,8 +174,6 @@
"STANDARD_PRECAPTURE_WAIT",
"STANDARD_CAPTURE",
"STANDARD_CAPTURE_WAIT",
- "BURST_CAPTURE_START",
- "BURST_CAPTURE_WAIT",
"DONE",
"ERROR",
"UNKNOWN"
@@ -192,8 +190,6 @@
&CaptureSequencer::manageStandardPrecaptureWait,
&CaptureSequencer::manageStandardCapture,
&CaptureSequencer::manageStandardCaptureWait,
- &CaptureSequencer::manageBurstCaptureStart,
- &CaptureSequencer::manageBurstCaptureWait,
&CaptureSequencer::manageDone,
};
@@ -293,7 +289,7 @@
}
takePictureCounter = l.mParameters.takePictureCounter;
}
- sp<ZslProcessorInterface> processor = mZslProcessor.promote();
+ sp<ZslProcessor> processor = mZslProcessor.promote();
if (processor != 0) {
ALOGV("%s: Memory optimization, clearing ZSL queue",
__FUNCTION__);
@@ -336,10 +332,6 @@
return DONE;
}
- if(l.mParameters.lightFx != Parameters::LIGHTFX_NONE &&
- l.mParameters.state == Parameters::STILL_CAPTURE) {
- nextState = BURST_CAPTURE_START;
- }
else if (l.mParameters.zslMode &&
l.mParameters.state == Parameters::STILL_CAPTURE &&
l.mParameters.flashMode != Parameters::FLASH_MODE_ON) {
@@ -361,7 +353,7 @@
sp<Camera2Client> &client) {
ALOGV("%s", __FUNCTION__);
status_t res;
- sp<ZslProcessorInterface> processor = mZslProcessor.promote();
+ sp<ZslProcessor> processor = mZslProcessor.promote();
if (processor == 0) {
ALOGE("%s: No ZSL queue to use!", __FUNCTION__);
return DONE;
@@ -664,76 +656,6 @@
return STANDARD_CAPTURE_WAIT;
}
-CaptureSequencer::CaptureState CaptureSequencer::manageBurstCaptureStart(
- sp<Camera2Client> &client) {
- ALOGV("%s", __FUNCTION__);
- status_t res;
- ATRACE_CALL();
-
- // check which burst mode is set, create respective burst object
- {
- SharedParameters::Lock l(client->getParameters());
-
- res = updateCaptureRequest(l.mParameters, client);
- if(res != OK) {
- return DONE;
- }
-
- //
- // check for burst mode type in mParameters here
- //
- mBurstCapture = new BurstCapture(client, this);
- }
-
- res = mCaptureRequest.update(ANDROID_REQUEST_ID, &mCaptureId, 1);
- if (res == OK) {
- res = mCaptureRequest.sort();
- }
- if (res != OK) {
- ALOGE("%s: Camera %d: Unable to set up still capture request: %s (%d)",
- __FUNCTION__, client->getCameraId(), strerror(-res), res);
- return DONE;
- }
-
- CameraMetadata captureCopy = mCaptureRequest;
- if (captureCopy.entryCount() == 0) {
- ALOGE("%s: Camera %d: Unable to copy capture request for HAL device",
- __FUNCTION__, client->getCameraId());
- return DONE;
- }
-
- Vector<CameraMetadata> requests;
- requests.push(mCaptureRequest);
- res = mBurstCapture->start(requests, mCaptureId);
- mTimeoutCount = kMaxTimeoutsForCaptureEnd * 10;
- return BURST_CAPTURE_WAIT;
-}
-
-CaptureSequencer::CaptureState CaptureSequencer::manageBurstCaptureWait(
- sp<Camera2Client> &/*client*/) {
- status_t res;
- ATRACE_CALL();
- while (!mNewCaptureReceived) {
- res = mNewCaptureSignal.waitRelative(mInputMutex, kWaitDuration);
- if (res == TIMED_OUT) {
- mTimeoutCount--;
- break;
- }
- }
-
- if (mTimeoutCount <= 0) {
- ALOGW("Timed out waiting for burst capture to complete");
- return DONE;
- }
- if (mNewCaptureReceived) {
- mNewCaptureReceived = false;
- // TODO: update mCaptureId to last burst's capture ID + 1?
- return DONE;
- }
-
- return BURST_CAPTURE_WAIT;
-}
-
status_t CaptureSequencer::updateCaptureRequest(const Parameters ¶ms,
sp<Camera2Client> &client) {
ATRACE_CALL();
diff --git a/services/camera/libcameraservice/api1/client2/CaptureSequencer.h b/services/camera/libcameraservice/api1/client2/CaptureSequencer.h
index 10252fb..b05207e 100644
--- a/services/camera/libcameraservice/api1/client2/CaptureSequencer.h
+++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.h
@@ -34,8 +34,7 @@
namespace camera2 {
-class ZslProcessorInterface;
-class BurstCapture;
+class ZslProcessor;
/**
* Manages the still image capture process for
@@ -49,7 +48,7 @@
~CaptureSequencer();
// Get reference to the ZslProcessor, which holds the ZSL buffers and frames
- void setZslProcessor(wp<ZslProcessorInterface> processor);
+ void setZslProcessor(wp<ZslProcessor> processor);
// Begin still image capture
status_t startCapture(int msgType);
@@ -113,8 +112,7 @@
static const int kMaxTimeoutsForCaptureEnd = 40; // 4 sec
wp<Camera2Client> mClient;
- wp<ZslProcessorInterface> mZslProcessor;
- sp<BurstCapture> mBurstCapture;
+ wp<ZslProcessor> mZslProcessor;
enum CaptureState {
IDLE,
@@ -126,8 +124,6 @@
STANDARD_PRECAPTURE_WAIT,
STANDARD_CAPTURE,
STANDARD_CAPTURE_WAIT,
- BURST_CAPTURE_START,
- BURST_CAPTURE_WAIT,
DONE,
ERROR,
NUM_CAPTURE_STATES
@@ -165,9 +161,6 @@
CaptureState manageStandardCapture(sp<Camera2Client> &client);
CaptureState manageStandardCaptureWait(sp<Camera2Client> &client);
- CaptureState manageBurstCaptureStart(sp<Camera2Client> &client);
- CaptureState manageBurstCaptureWait(sp<Camera2Client> &client);
-
CaptureState manageDone(sp<Camera2Client> &client);
// Utility methods
diff --git a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
index 40d53b3..6490682 100644
--- a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
@@ -170,7 +170,7 @@
entry = frame.find(ANDROID_SCALER_CROP_REGION);
if (entry.count < 4) {
- ALOGE("%s: Camera %d: Unable to read crop region (count = %d)",
+ ALOGE("%s: Camera %d: Unable to read crop region (count = %zu)",
__FUNCTION__, client->getCameraId(), entry.count);
return res;
}
diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
index bd9786f..3923853 100644
--- a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
@@ -108,7 +108,7 @@
return NO_MEMORY;
}
}
- ALOGV("%s: Camera %d: JPEG capture heap now %d bytes; requested %d bytes",
+ ALOGV("%s: Camera %d: JPEG capture heap now %zu bytes; requested %zd bytes",
__FUNCTION__, mId, mCaptureHeap->getSize(), maxJpegSize);
if (mCaptureStreamId != NO_STREAM) {
diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.h b/services/camera/libcameraservice/api1/client2/JpegProcessor.h
index fbdae11..ac6f5c7 100644
--- a/services/camera/libcameraservice/api1/client2/JpegProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.h
@@ -35,7 +35,7 @@
namespace camera2 {
class CaptureSequencer;
-class Parameters;
+struct Parameters;
/***
* Still image capture output image processing
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index fc5ebac..f901dda 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -913,8 +913,6 @@
ALOGI("%s: zslMode: %d slowJpegMode %d", __FUNCTION__, zslMode, slowJpegMode);
- lightFx = LIGHTFX_NONE;
-
state = STOPPED;
paramsFlattened = params.flatten();
@@ -1864,10 +1862,6 @@
ALOGE("%s: Video stabilization not supported", __FUNCTION__);
}
- // LIGHTFX
- validatedParams.lightFx = lightFxStringToEnum(
- newParams.get(CameraParameters::KEY_LIGHTFX));
-
/** Update internal parameters */
*this = validatedParams;
@@ -1959,7 +1953,7 @@
if (res != OK) return res;
// android.hardware.Camera requires that when face detect is enabled, the
- // camera is in a face-priority mode. HAL2 splits this into separate parts
+ // camera is in a face-priority mode. HAL3.x splits this into separate parts
// (face detection statistics and face priority scene mode). Map from other
// to the other.
bool sceneModeActive =
@@ -2501,18 +2495,6 @@
}
}
-Parameters::Parameters::lightFxMode_t Parameters::lightFxStringToEnum(
- const char *lightFxMode) {
- return
- !lightFxMode ?
- Parameters::LIGHTFX_NONE :
- !strcmp(lightFxMode, CameraParameters::LIGHTFX_LOWLIGHT) ?
- Parameters::LIGHTFX_LOWLIGHT :
- !strcmp(lightFxMode, CameraParameters::LIGHTFX_HDR) ?
- Parameters::LIGHTFX_HDR :
- Parameters::LIGHTFX_NONE;
-}
-
status_t Parameters::parseAreas(const char *areasCStr,
Vector<Parameters::Area> *areas) {
static const size_t NUM_FIELDS = 5;
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h
index 972d007..c5bbf63 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.h
+++ b/services/camera/libcameraservice/api1/client2/Parameters.h
@@ -136,12 +136,6 @@
bool recordingHint;
bool videoStabilization;
- enum lightFxMode_t {
- LIGHTFX_NONE = 0,
- LIGHTFX_LOWLIGHT,
- LIGHTFX_HDR
- } lightFx;
-
CameraParameters2 params;
String8 paramsFlattened;
@@ -307,7 +301,6 @@
static const char* flashModeEnumToString(flashMode_t flashMode);
static focusMode_t focusModeStringToEnum(const char *focusMode);
static const char* focusModeEnumToString(focusMode_t focusMode);
- static lightFxMode_t lightFxStringToEnum(const char *lightFxMode);
static status_t parseAreas(const char *areasCStr,
Vector<Area> *areas);
@@ -330,7 +323,7 @@
static const int kFpsToApiScale = 1000;
// Transform from (-1000,-1000)-(1000,1000) normalized coords from camera
- // API to HAL2 (0,0)-(activePixelArray.width/height) coordinates
+ // API to HAL3 (0,0)-(activePixelArray.width/height) coordinates
int normalizedXToArray(int x) const;
int normalizedYToArray(int y) const;
@@ -350,7 +343,7 @@
private:
// Convert from viewfinder crop-region relative array coordinates
- // to HAL2 sensor array coordinates
+ // to HAL3 sensor array coordinates
int cropXToArray(int x) const;
int cropYToArray(int y) const;
diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.h b/services/camera/libcameraservice/api1/client2/StreamingProcessor.h
index e0cad3a..0b17eae 100644
--- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.h
@@ -31,7 +31,7 @@
namespace camera2 {
-class Parameters;
+struct Parameters;
class Camera2Heap;
/**
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
index 0b79b31..b127472 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2012 The Android Open Source Project
+ * Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -22,7 +22,7 @@
#ifdef LOG_NNDEBUG
#define ALOGVV(...) ALOGV(__VA_ARGS__)
#else
-#define ALOGVV(...) ((void)0)
+#define ALOGVV(...) if (0) ALOGV(__VA_ARGS__)
#endif
#include <inttypes.h>
@@ -35,6 +35,7 @@
#include "api1/Camera2Client.h"
#include "api1/client2/CaptureSequencer.h"
#include "api1/client2/ZslProcessor.h"
+#include "device3/Camera3Device.h"
namespace android {
namespace camera2 {
@@ -43,35 +44,55 @@
sp<Camera2Client> client,
wp<CaptureSequencer> sequencer):
Thread(false),
+ mLatestClearedBufferTimestamp(0),
mState(RUNNING),
mClient(client),
- mDevice(client->getCameraDevice()),
mSequencer(sequencer),
mId(client->getCameraId()),
- mDeleted(false),
- mZslBufferAvailable(false),
mZslStreamId(NO_STREAM),
- mZslReprocessStreamId(NO_STREAM),
mFrameListHead(0),
- mZslQueueHead(0),
- mZslQueueTail(0) {
- mZslQueue.insertAt(0, kZslBufferDepth);
- mFrameList.insertAt(0, kFrameListDepth);
+ mHasFocuser(false) {
+ // Initialize buffer queue and frame list based on pipeline max depth.
+ size_t pipelineMaxDepth = kDefaultMaxPipelineDepth;
+ if (client != 0) {
+ sp<Camera3Device> device =
+ static_cast<Camera3Device*>(client->getCameraDevice().get());
+ if (device != 0) {
+ camera_metadata_ro_entry_t entry =
+ device->info().find(ANDROID_REQUEST_PIPELINE_MAX_DEPTH);
+ if (entry.count == 1) {
+ pipelineMaxDepth = entry.data.u8[0];
+ } else {
+ ALOGW("%s: Unable to find the android.request.pipelineMaxDepth,"
+ " use default pipeline max depth %d", __FUNCTION__,
+ kDefaultMaxPipelineDepth);
+ }
+
+ entry = device->info().find(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE);
+ if (entry.count > 0 && entry.data.f[0] != 0.) {
+ mHasFocuser = true;
+ }
+ }
+ }
+
+ ALOGV("%s: Initialize buffer queue and frame list depth based on max pipeline depth (%zu)",
+ __FUNCTION__, pipelineMaxDepth);
+ // Need to keep buffer queue longer than metadata queue because sometimes buffer arrives
+ // earlier than metadata which causes the buffer corresponding to oldest metadata being
+ // removed.
+ mFrameListDepth = pipelineMaxDepth;
+ mBufferQueueDepth = mFrameListDepth + 1;
+
+
+ mZslQueue.insertAt(0, mBufferQueueDepth);
+ mFrameList.insertAt(0, mFrameListDepth);
sp<CaptureSequencer> captureSequencer = mSequencer.promote();
if (captureSequencer != 0) captureSequencer->setZslProcessor(this);
}
ZslProcessor::~ZslProcessor() {
ALOGV("%s: Exit", __FUNCTION__);
- disconnect();
-}
-
-void ZslProcessor::onFrameAvailable(const BufferItem& /*item*/) {
- Mutex::Autolock l(mInputMutex);
- if (!mZslBufferAvailable) {
- mZslBufferAvailable = true;
- mZslBufferAvailableSignal.signal();
- }
+ deleteStream();
}
void ZslProcessor::onResultAvailable(const CaptureResult &result) {
@@ -81,35 +102,27 @@
camera_metadata_ro_entry_t entry;
entry = result.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
nsecs_t timestamp = entry.data.i64[0];
- (void)timestamp;
- ALOGVV("Got preview frame for timestamp %" PRId64, timestamp);
+ if (entry.count == 0) {
+ ALOGE("%s: metadata doesn't have timestamp, skip this result", __FUNCTION__);
+ return;
+ }
+
+ entry = result.mMetadata.find(ANDROID_REQUEST_FRAME_COUNT);
+ if (entry.count == 0) {
+ ALOGE("%s: metadata doesn't have frame number, skip this result", __FUNCTION__);
+ return;
+ }
+ int32_t frameNumber = entry.data.i32[0];
+
+ ALOGVV("Got preview metadata for frame %d with timestamp %" PRId64, frameNumber, timestamp);
if (mState != RUNNING) return;
+ // Corresponding buffer has been cleared. No need to push into mFrameList
+ if (timestamp <= mLatestClearedBufferTimestamp) return;
+
mFrameList.editItemAt(mFrameListHead) = result.mMetadata;
- mFrameListHead = (mFrameListHead + 1) % kFrameListDepth;
-
- findMatchesLocked();
-}
-
-void ZslProcessor::onBufferReleased(buffer_handle_t *handle) {
- Mutex::Autolock l(mInputMutex);
-
- // Verify that the buffer is in our queue
- size_t i = 0;
- for (; i < mZslQueue.size(); i++) {
- if (&(mZslQueue[i].buffer.mGraphicBuffer->handle) == handle) break;
- }
- if (i == mZslQueue.size()) {
- ALOGW("%s: Released buffer %p not found in queue",
- __FUNCTION__, handle);
- }
-
- // Erase entire ZSL queue since we've now completed the capture and preview
- // is stopped.
- clearZslQueueLocked();
-
- mState = RUNNING;
+ mFrameListHead = (mFrameListHead + 1) % mFrameListDepth;
}
status_t ZslProcessor::updateStream(const Parameters ¶ms) {
@@ -124,25 +137,13 @@
ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
return INVALID_OPERATION;
}
- sp<CameraDeviceBase> device = mDevice.promote();
+ sp<Camera3Device> device =
+ static_cast<Camera3Device*>(client->getCameraDevice().get());
if (device == 0) {
ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
return INVALID_OPERATION;
}
- if (mZslConsumer == 0) {
- // Create CPU buffer queue endpoint
- sp<IGraphicBufferProducer> producer;
- sp<IGraphicBufferConsumer> consumer;
- BufferQueue::createBufferQueue(&producer, &consumer);
- mZslConsumer = new BufferItemConsumer(consumer,
- GRALLOC_USAGE_HW_CAMERA_ZSL,
- kZslBufferDepth);
- mZslConsumer->setFrameAvailableListener(this);
- mZslConsumer->setName(String8("Camera2-ZslConsumer"));
- mZslWindow = new Surface(producer);
- }
-
if (mZslStreamId != NO_STREAM) {
// Check if stream parameters have to change
uint32_t currentWidth, currentHeight;
@@ -151,57 +152,50 @@
if (res != OK) {
ALOGE("%s: Camera %d: Error querying capture output stream info: "
"%s (%d)", __FUNCTION__,
- mId, strerror(-res), res);
+ client->getCameraId(), strerror(-res), res);
return res;
}
if (currentWidth != (uint32_t)params.fastInfo.arrayWidth ||
currentHeight != (uint32_t)params.fastInfo.arrayHeight) {
- res = device->deleteReprocessStream(mZslReprocessStreamId);
- if (res != OK) {
- ALOGE("%s: Camera %d: Unable to delete old reprocess stream "
- "for ZSL: %s (%d)", __FUNCTION__,
- mId, strerror(-res), res);
- return res;
- }
- ALOGV("%s: Camera %d: Deleting stream %d since the buffer dimensions changed",
- __FUNCTION__, mId, mZslStreamId);
+ ALOGV("%s: Camera %d: Deleting stream %d since the buffer "
+ "dimensions changed",
+ __FUNCTION__, client->getCameraId(), mZslStreamId);
res = device->deleteStream(mZslStreamId);
- if (res != OK) {
+ if (res == -EBUSY) {
+ ALOGV("%s: Camera %d: Device is busy, call updateStream again "
+ " after it becomes idle", __FUNCTION__, mId);
+ return res;
+ } else if(res != OK) {
ALOGE("%s: Camera %d: Unable to delete old output stream "
"for ZSL: %s (%d)", __FUNCTION__,
- mId, strerror(-res), res);
+ client->getCameraId(), strerror(-res), res);
return res;
}
mZslStreamId = NO_STREAM;
}
}
- mDeleted = false;
-
if (mZslStreamId == NO_STREAM) {
// Create stream for HAL production
// TODO: Sort out better way to select resolution for ZSL
- int streamType = params.quirks.useZslFormat ?
- (int)CAMERA2_HAL_PIXEL_FORMAT_ZSL :
- (int)HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
- res = device->createStream(mZslWindow,
- params.fastInfo.arrayWidth, params.fastInfo.arrayHeight, streamType,
- HAL_DATASPACE_UNKNOWN, CAMERA3_STREAM_ROTATION_0, &mZslStreamId);
+
+ // Note that format specified internally in Camera3ZslStream
+ res = device->createZslStream(
+ params.fastInfo.arrayWidth, params.fastInfo.arrayHeight,
+ mBufferQueueDepth,
+ &mZslStreamId,
+ &mZslStream);
if (res != OK) {
- ALOGE("%s: Camera %d: Can't create output stream for ZSL: "
- "%s (%d)", __FUNCTION__, mId,
+ ALOGE("%s: Camera %d: Can't create ZSL stream: "
+ "%s (%d)", __FUNCTION__, client->getCameraId(),
strerror(-res), res);
return res;
}
- res = device->createReprocessStreamFromStream(mZslStreamId,
- &mZslReprocessStreamId);
- if (res != OK) {
- ALOGE("%s: Camera %d: Can't create reprocess stream for ZSL: "
- "%s (%d)", __FUNCTION__, mId,
- strerror(-res), res);
- return res;
- }
+
+ // Only add the camera3 buffer listener when the stream is created.
+ mZslStream->addBufferListener(this);
}
+
client->registerFrameListener(Camera2Client::kPreviewRequestIdStart,
Camera2Client::kPreviewRequestIdEnd,
this,
@@ -212,47 +206,32 @@
status_t ZslProcessor::deleteStream() {
ATRACE_CALL();
- Mutex::Autolock l(mInputMutex);
- // WAR(b/15408128): do not delete stream unless client is being disconnected.
- mDeleted = true;
- return OK;
-}
-
-status_t ZslProcessor::disconnect() {
- ATRACE_CALL();
status_t res;
Mutex::Autolock l(mInputMutex);
if (mZslStreamId != NO_STREAM) {
- sp<CameraDeviceBase> device = mDevice.promote();
+ sp<Camera2Client> client = mClient.promote();
+ if (client == 0) {
+ ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
+ return INVALID_OPERATION;
+ }
+
+ sp<Camera3Device> device =
+ reinterpret_cast<Camera3Device*>(client->getCameraDevice().get());
if (device == 0) {
ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
return INVALID_OPERATION;
}
- clearZslQueueLocked();
-
- res = device->deleteReprocessStream(mZslReprocessStreamId);
- if (res != OK) {
- ALOGE("%s: Camera %d: Cannot delete ZSL reprocessing stream %d: "
- "%s (%d)", __FUNCTION__, mId,
- mZslReprocessStreamId, strerror(-res), res);
- return res;
- }
-
- mZslReprocessStreamId = NO_STREAM;
res = device->deleteStream(mZslStreamId);
if (res != OK) {
ALOGE("%s: Camera %d: Cannot delete ZSL output stream %d: "
- "%s (%d)", __FUNCTION__, mId,
+ "%s (%d)", __FUNCTION__, client->getCameraId(),
mZslStreamId, strerror(-res), res);
return res;
}
- mZslWindow.clear();
- mZslConsumer.clear();
-
mZslStreamId = NO_STREAM;
}
return OK;
@@ -263,6 +242,46 @@
return mZslStreamId;
}
+status_t ZslProcessor::updateRequestWithDefaultStillRequest(CameraMetadata &request) const {
+ sp<Camera2Client> client = mClient.promote();
+ if (client == 0) {
+ ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
+ return INVALID_OPERATION;
+ }
+ sp<Camera3Device> device =
+ static_cast<Camera3Device*>(client->getCameraDevice().get());
+ if (device == 0) {
+ ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
+ return INVALID_OPERATION;
+ }
+
+ CameraMetadata stillTemplate;
+ device->createDefaultRequest(CAMERA3_TEMPLATE_STILL_CAPTURE, &stillTemplate);
+
+ // Find some of the post-processing tags, and assign the value from template to the request.
+ // Only check the aberration mode and noise reduction mode for now, as they are very important
+ // for image quality.
+ uint32_t postProcessingTags[] = {
+ ANDROID_NOISE_REDUCTION_MODE,
+ ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
+ ANDROID_COLOR_CORRECTION_MODE,
+ ANDROID_TONEMAP_MODE,
+ ANDROID_SHADING_MODE,
+ ANDROID_HOT_PIXEL_MODE,
+ ANDROID_EDGE_MODE
+ };
+
+ camera_metadata_entry_t entry;
+ for (size_t i = 0; i < sizeof(postProcessingTags) / sizeof(uint32_t); i++) {
+ entry = stillTemplate.find(postProcessingTags[i]);
+ if (entry.count > 0) {
+ request.update(postProcessingTags[i], entry.data.u8, 1);
+ }
+ }
+
+ return OK;
+}
+
status_t ZslProcessor::pushToReprocess(int32_t requestId) {
ALOGV("%s: Send in reprocess request with id %d",
__FUNCTION__, requestId);
@@ -279,21 +298,30 @@
dumpZslQueue(-1);
}
- if (mZslQueueTail != mZslQueueHead) {
- CameraMetadata request;
- size_t index = mZslQueueTail;
- while (index != mZslQueueHead) {
- if (!mZslQueue[index].frame.isEmpty()) {
- request = mZslQueue[index].frame;
- break;
- }
- index = (index + 1) % kZslBufferDepth;
- }
- if (index == mZslQueueHead) {
- ALOGV("%s: ZSL queue has no valid frames to send yet.",
- __FUNCTION__);
- return NOT_ENOUGH_DATA;
- }
+ size_t metadataIdx;
+ nsecs_t candidateTimestamp = getCandidateTimestampLocked(&metadataIdx);
+
+ if (candidateTimestamp == -1) {
+ ALOGE("%s: Could not find good candidate for ZSL reprocessing",
+ __FUNCTION__);
+ return NOT_ENOUGH_DATA;
+ }
+
+ res = mZslStream->enqueueInputBufferByTimestamp(candidateTimestamp,
+ /*actualTimestamp*/NULL);
+
+ if (res == mZslStream->NO_BUFFER_AVAILABLE) {
+ ALOGV("%s: No ZSL buffers yet", __FUNCTION__);
+ return NOT_ENOUGH_DATA;
+ } else if (res != OK) {
+ ALOGE("%s: Unable to push buffer for reprocessing: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+
+ {
+ CameraMetadata request = mFrameList[metadataIdx];
+
// Verify that the frame is reasonable for reprocessing
camera_metadata_entry_t entry;
@@ -310,25 +338,51 @@
return NOT_ENOUGH_DATA;
}
- buffer_handle_t *handle =
- &(mZslQueue[index].buffer.mGraphicBuffer->handle);
-
uint8_t requestType = ANDROID_REQUEST_TYPE_REPROCESS;
res = request.update(ANDROID_REQUEST_TYPE,
&requestType, 1);
+ if (res != OK) {
+ ALOGE("%s: Unable to update request type",
+ __FUNCTION__);
+ return INVALID_OPERATION;
+ }
+
int32_t inputStreams[1] =
- { mZslReprocessStreamId };
- if (res == OK) request.update(ANDROID_REQUEST_INPUT_STREAMS,
+ { mZslStreamId };
+ res = request.update(ANDROID_REQUEST_INPUT_STREAMS,
inputStreams, 1);
+ if (res != OK) {
+ ALOGE("%s: Unable to update request input streams",
+ __FUNCTION__);
+ return INVALID_OPERATION;
+ }
+
+ uint8_t captureIntent =
+ static_cast<uint8_t>(ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE);
+ res = request.update(ANDROID_CONTROL_CAPTURE_INTENT,
+ &captureIntent, 1);
+ if (res != OK ) {
+ ALOGE("%s: Unable to update request capture intent",
+ __FUNCTION__);
+ return INVALID_OPERATION;
+ }
+
+ // TODO: Shouldn't we also update the latest preview frame?
int32_t outputStreams[1] =
{ client->getCaptureStreamId() };
- if (res == OK) request.update(ANDROID_REQUEST_OUTPUT_STREAMS,
+ res = request.update(ANDROID_REQUEST_OUTPUT_STREAMS,
outputStreams, 1);
+ if (res != OK) {
+ ALOGE("%s: Unable to update request output streams",
+ __FUNCTION__);
+ return INVALID_OPERATION;
+ }
+
res = request.update(ANDROID_REQUEST_ID,
&requestId, 1);
-
if (res != OK ) {
- ALOGE("%s: Unable to update frame to a reprocess request", __FUNCTION__);
+ ALOGE("%s: Unable to update frame to a reprocess request",
+ __FUNCTION__);
return INVALID_OPERATION;
}
@@ -336,17 +390,9 @@
if (res != OK) {
ALOGE("%s: Camera %d: Unable to stop preview for ZSL capture: "
"%s (%d)",
- __FUNCTION__, mId, strerror(-res), res);
+ __FUNCTION__, client->getCameraId(), strerror(-res), res);
return INVALID_OPERATION;
}
- // TODO: have push-and-clear be atomic
- res = client->getCameraDevice()->pushReprocessBuffer(mZslReprocessStreamId,
- handle, this);
- if (res != OK) {
- ALOGE("%s: Unable to push buffer for reprocessing: %s (%d)",
- __FUNCTION__, strerror(-res), res);
- return res;
- }
// Update JPEG settings
{
@@ -355,25 +401,30 @@
if (res != OK) {
ALOGE("%s: Camera %d: Unable to update JPEG entries of ZSL "
"capture request: %s (%d)", __FUNCTION__,
- mId,
+ client->getCameraId(),
strerror(-res), res);
return res;
}
}
+ // Update post-processing settings
+ res = updateRequestWithDefaultStillRequest(request);
+ if (res != OK) {
+ ALOGW("%s: Unable to update post-processing tags, the reprocessed image quality "
+ "may be compromised", __FUNCTION__);
+ }
+
mLatestCapturedRequest = request;
res = client->getCameraDevice()->capture(request);
if (res != OK ) {
- ALOGE("%s: Unable to send ZSL reprocess request to capture: %s (%d)",
- __FUNCTION__, strerror(-res), res);
+ ALOGE("%s: Unable to send ZSL reprocess request to capture: %s"
+ " (%d)", __FUNCTION__, strerror(-res), res);
return res;
}
mState = LOCKED;
- } else {
- ALOGV("%s: No ZSL buffers yet", __FUNCTION__);
- return NOT_ENOUGH_DATA;
}
+
return OK;
}
@@ -386,17 +437,20 @@
}
status_t ZslProcessor::clearZslQueueLocked() {
- for (size_t i = 0; i < mZslQueue.size(); i++) {
- if (mZslQueue[i].buffer.mTimestamp != 0) {
- mZslConsumer->releaseBuffer(mZslQueue[i].buffer);
- }
- mZslQueue.replaceAt(i);
+ if (mZslStream != 0) {
+ // clear result metadata list first.
+ clearZslResultQueueLocked();
+ return mZslStream->clearInputRingBuffer(&mLatestClearedBufferTimestamp);
}
- mZslQueueHead = 0;
- mZslQueueTail = 0;
return OK;
}
+void ZslProcessor::clearZslResultQueueLocked() {
+ mFrameList.clear();
+ mFrameListHead = 0;
+ mFrameList.insertAt(0, mFrameListDepth);
+}
+
void ZslProcessor::dump(int fd, const Vector<String16>& /*args*/) const {
Mutex::Autolock l(mInputMutex);
if (!mLatestCapturedRequest.isEmpty()) {
@@ -411,128 +465,9 @@
}
bool ZslProcessor::threadLoop() {
- status_t res;
-
- {
- Mutex::Autolock l(mInputMutex);
- while (!mZslBufferAvailable) {
- res = mZslBufferAvailableSignal.waitRelative(mInputMutex,
- kWaitDuration);
- if (res == TIMED_OUT) return true;
- }
- mZslBufferAvailable = false;
- }
-
- do {
- res = processNewZslBuffer();
- } while (res == OK);
-
- return true;
-}
-
-status_t ZslProcessor::processNewZslBuffer() {
- ATRACE_CALL();
- status_t res;
- sp<BufferItemConsumer> zslConsumer;
- {
- Mutex::Autolock l(mInputMutex);
- if (mZslConsumer == 0) return OK;
- zslConsumer = mZslConsumer;
- }
- ALOGVV("Trying to get next buffer");
- BufferItem item;
- res = zslConsumer->acquireBuffer(&item, 0);
- if (res != OK) {
- if (res != BufferItemConsumer::NO_BUFFER_AVAILABLE) {
- ALOGE("%s: Camera %d: Error receiving ZSL image buffer: "
- "%s (%d)", __FUNCTION__,
- mId, strerror(-res), res);
- } else {
- ALOGVV(" No buffer");
- }
- return res;
- }
-
- Mutex::Autolock l(mInputMutex);
-
- if (mState == LOCKED) {
- ALOGVV("In capture, discarding new ZSL buffers");
- zslConsumer->releaseBuffer(item);
- return OK;
- }
-
- ALOGVV("Got ZSL buffer: head: %d, tail: %d", mZslQueueHead, mZslQueueTail);
-
- if ( (mZslQueueHead + 1) % kZslBufferDepth == mZslQueueTail) {
- ALOGVV("Releasing oldest buffer");
- zslConsumer->releaseBuffer(mZslQueue[mZslQueueTail].buffer);
- mZslQueue.replaceAt(mZslQueueTail);
- mZslQueueTail = (mZslQueueTail + 1) % kZslBufferDepth;
- }
-
- ZslPair &queueHead = mZslQueue.editItemAt(mZslQueueHead);
-
- queueHead.buffer = item;
- queueHead.frame.release();
-
- mZslQueueHead = (mZslQueueHead + 1) % kZslBufferDepth;
-
- ALOGVV(" Acquired buffer, timestamp %" PRId64, queueHead.buffer.mTimestamp);
-
- findMatchesLocked();
-
- return OK;
-}
-
-void ZslProcessor::findMatchesLocked() {
- ALOGVV("Scanning");
- for (size_t i = 0; i < mZslQueue.size(); i++) {
- ZslPair &queueEntry = mZslQueue.editItemAt(i);
- nsecs_t bufferTimestamp = queueEntry.buffer.mTimestamp;
- IF_ALOGV() {
- camera_metadata_entry_t entry;
- nsecs_t frameTimestamp = 0;
- if (!queueEntry.frame.isEmpty()) {
- entry = queueEntry.frame.find(ANDROID_SENSOR_TIMESTAMP);
- frameTimestamp = entry.data.i64[0];
- }
- ALOGVV(" %d: b: %" PRId64 "\tf: %" PRId64, i,
- bufferTimestamp, frameTimestamp );
- }
- if (queueEntry.frame.isEmpty() && bufferTimestamp != 0) {
- // Have buffer, no matching frame. Look for one
- for (size_t j = 0; j < mFrameList.size(); j++) {
- bool match = false;
- CameraMetadata &frame = mFrameList.editItemAt(j);
- if (!frame.isEmpty()) {
- camera_metadata_entry_t entry;
- entry = frame.find(ANDROID_SENSOR_TIMESTAMP);
- if (entry.count == 0) {
- ALOGE("%s: Can't find timestamp in frame!",
- __FUNCTION__);
- continue;
- }
- nsecs_t frameTimestamp = entry.data.i64[0];
- if (bufferTimestamp == frameTimestamp) {
- ALOGVV("%s: Found match %" PRId64, __FUNCTION__,
- frameTimestamp);
- match = true;
- } else {
- int64_t delta = abs(bufferTimestamp - frameTimestamp);
- if ( delta < 1000000) {
- ALOGVV("%s: Found close match %" PRId64 " (delta %" PRId64 ")",
- __FUNCTION__, bufferTimestamp, delta);
- match = true;
- }
- }
- }
- if (match) {
- queueEntry.frame.acquire(frame);
- break;
- }
- }
- }
- }
+ // TODO: remove dependency on thread. For now, shut thread down right
+ // away.
+ return false;
}
void ZslProcessor::dumpZslQueue(int fd) const {
@@ -567,5 +502,174 @@
}
}
+bool ZslProcessor::isFixedFocusMode(uint8_t afMode) const {
+ switch (afMode) {
+ case ANDROID_CONTROL_AF_MODE_AUTO:
+ case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
+ case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
+ case ANDROID_CONTROL_AF_MODE_MACRO:
+ return false;
+ break;
+ case ANDROID_CONTROL_AF_MODE_OFF:
+ case ANDROID_CONTROL_AF_MODE_EDOF:
+ return true;
+ default:
+ ALOGE("%s: unknown focus mode %d", __FUNCTION__, afMode);
+ return false;
+ }
+}
+
+nsecs_t ZslProcessor::getCandidateTimestampLocked(size_t* metadataIdx) const {
+ /**
+ * Find the smallest timestamp we know about so far
+ * - ensure that aeState is either converged or locked
+ */
+
+ size_t idx = 0;
+ nsecs_t minTimestamp = -1;
+
+ size_t emptyCount = mFrameList.size();
+
+ for (size_t j = 0; j < mFrameList.size(); j++) {
+ const CameraMetadata &frame = mFrameList[j];
+ if (!frame.isEmpty()) {
+
+ emptyCount--;
+
+ camera_metadata_ro_entry_t entry;
+ entry = frame.find(ANDROID_SENSOR_TIMESTAMP);
+ if (entry.count == 0) {
+ ALOGE("%s: Can't find timestamp in frame!",
+ __FUNCTION__);
+ continue;
+ }
+ nsecs_t frameTimestamp = entry.data.i64[0];
+ if (minTimestamp > frameTimestamp || minTimestamp == -1) {
+
+ entry = frame.find(ANDROID_CONTROL_AE_STATE);
+
+ if (entry.count == 0) {
+ /**
+ * This is most likely a HAL bug. The aeState field is
+ * mandatory, so it should always be in a metadata packet.
+ */
+ ALOGW("%s: ZSL queue frame has no AE state field!",
+ __FUNCTION__);
+ continue;
+ }
+ if (entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_CONVERGED &&
+ entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_LOCKED) {
+ ALOGVV("%s: ZSL queue frame AE state is %d, need "
+ "full capture", __FUNCTION__, entry.data.u8[0]);
+ continue;
+ }
+
+ entry = frame.find(ANDROID_CONTROL_AF_MODE);
+ if (entry.count == 0) {
+ ALOGW("%s: ZSL queue frame has no AF mode field!",
+ __FUNCTION__);
+ continue;
+ }
+ uint8_t afMode = entry.data.u8[0];
+ if (afMode == ANDROID_CONTROL_AF_MODE_OFF) {
+ // Skip all the ZSL buffer for manual AF mode, as we don't really
+ // know the af state.
+ continue;
+ }
+
+ // Check AF state if device has focuser and focus mode isn't fixed
+ if (mHasFocuser && !isFixedFocusMode(afMode)) {
+ // Make sure the candidate frame has good focus.
+ entry = frame.find(ANDROID_CONTROL_AF_STATE);
+ if (entry.count == 0) {
+ ALOGW("%s: ZSL queue frame has no AF state field!",
+ __FUNCTION__);
+ continue;
+ }
+ uint8_t afState = entry.data.u8[0];
+ if (afState != ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED &&
+ afState != ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED &&
+ afState != ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) {
+ ALOGVV("%s: ZSL queue frame AF state is %d is not good for capture, skip it",
+ __FUNCTION__, afState);
+ continue;
+ }
+ }
+
+ minTimestamp = frameTimestamp;
+ idx = j;
+ }
+
+ ALOGVV("%s: Saw timestamp %" PRId64, __FUNCTION__, frameTimestamp);
+ }
+ }
+
+ if (emptyCount == mFrameList.size()) {
+ /**
+ * This could be mildly bad and means our ZSL was triggered before
+ * there were any frames yet received by the camera framework.
+ *
+ * This is a fairly corner case which can happen under:
+ * + a user presses the shutter button real fast when the camera starts
+ * (startPreview followed immediately by takePicture).
+ * + burst capture case (hitting shutter button as fast possible)
+ *
+ * If this happens in steady case (preview running for a while, call
+ * a single takePicture) then this might be a fwk bug.
+ */
+ ALOGW("%s: ZSL queue has no metadata frames", __FUNCTION__);
+ }
+
+ ALOGV("%s: Candidate timestamp %" PRId64 " (idx %zu), empty frames: %zu",
+ __FUNCTION__, minTimestamp, idx, emptyCount);
+
+ if (metadataIdx) {
+ *metadataIdx = idx;
+ }
+
+ return minTimestamp;
+}
+
+void ZslProcessor::onBufferAcquired(const BufferInfo& /*bufferInfo*/) {
+ // Intentionally left empty
+ // Although theoretically we could use this to get better dump info
+}
+
+void ZslProcessor::onBufferReleased(const BufferInfo& bufferInfo) {
+
+ // ignore output buffers
+ if (bufferInfo.mOutput) {
+ return;
+ }
+
+ // Lock mutex only once we know this is an input buffer returned to avoid
+ // potential deadlock
+ Mutex::Autolock l(mInputMutex);
+ // TODO: Verify that the buffer is in our queue by looking at timestamp
+ // theoretically unnecessary unless we change the following assumptions:
+ // -- only 1 buffer reprocessed at a time (which is the case now)
+
+ // Erase entire ZSL queue since we've now completed the capture and preview
+ // is stopped.
+ //
+ // We need to guarantee that if we do two back-to-back captures,
+ // the second won't use a buffer that's older/the same as the first, which
+ // is theoretically possible if we don't clear out the queue and the
+ // selection criteria is something like 'newest'. Clearing out the result
+ // metadata queue on a completed capture ensures we'll only use new timestamp.
+ // Calling clearZslQueueLocked is a guaranteed deadlock because this callback
+ // holds the Camera3Stream internal lock (mLock), and clearZslQueueLocked requires
+ // to hold the same lock.
+ // TODO: need figure out a way to clear the Zsl buffer queue properly. Right now
+ // it is safe not to do so, as back to back ZSL capture requires stop and start
+ // preview, which will flush ZSL queue automatically.
+ ALOGV("%s: Memory optimization, clearing ZSL queue",
+ __FUNCTION__);
+ clearZslResultQueueLocked();
+
+ // Required so we accept more ZSL requests
+ mState = RUNNING;
+}
+
}; // namespace camera2
}; // namespace android
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.h b/services/camera/libcameraservice/api1/client2/ZslProcessor.h
index 5870bd3..86c06c6 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.h
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2012 The Android Open Source Project
+ * Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -25,11 +25,9 @@
#include <gui/BufferItem.h>
#include <gui/BufferItemConsumer.h>
#include <camera/CameraMetadata.h>
-#include <camera/CaptureResult.h>
-#include "common/CameraDeviceBase.h"
-#include "api1/client2/ZslProcessorInterface.h"
#include "api1/client2/FrameProcessor.h"
+#include "device3/Camera3ZslStream.h"
namespace android {
@@ -38,45 +36,66 @@
namespace camera2 {
class CaptureSequencer;
-class Parameters;
+struct Parameters;
/***
- * ZSL queue processing
+ * ZSL queue processing for HALv3.0 or newer
*/
-class ZslProcessor:
+class ZslProcessor :
+ public camera3::Camera3StreamBufferListener,
virtual public Thread,
- virtual public BufferItemConsumer::FrameAvailableListener,
- virtual public FrameProcessor::FilteredListener,
- virtual public CameraDeviceBase::BufferReleasedListener,
- public ZslProcessorInterface {
+ virtual public FrameProcessor::FilteredListener {
public:
ZslProcessor(sp<Camera2Client> client, wp<CaptureSequencer> sequencer);
~ZslProcessor();
- // From mZslConsumer
- virtual void onFrameAvailable(const BufferItem& item);
- // From FrameProcessor
+ // From FrameProcessor::FilteredListener
virtual void onResultAvailable(const CaptureResult &result);
- virtual void onBufferReleased(buffer_handle_t *handle);
-
/**
****************************************
* ZslProcessorInterface implementation *
****************************************
*/
+ // Update the streams by recreating them if the size/format has changed
status_t updateStream(const Parameters ¶ms);
+
+ // Delete the underlying CameraDevice streams
status_t deleteStream();
- status_t disconnect();
+
+ // Get ID for use with android.request.outputStreams / inputStreams
int getStreamId() const;
+ /**
+ * Submits a ZSL capture request (id = requestId)
+ *
+ * An appropriate ZSL buffer is selected by the closest timestamp,
+ * then we push that buffer to be reprocessed by the HAL.
+ * A capture request is created and submitted on behalf of the client.
+ */
status_t pushToReprocess(int32_t requestId);
+
+ // Flush the ZSL buffer queue, freeing up all the buffers
status_t clearZslQueue();
void dump(int fd, const Vector<String16>& args) const;
+
+ protected:
+ /**
+ **********************************************
+ * Camera3StreamBufferListener implementation *
+ **********************************************
+ */
+ typedef camera3::Camera3StreamBufferListener::BufferInfo BufferInfo;
+ // Buffer was acquired by the HAL
+ virtual void onBufferAcquired(const BufferInfo& bufferInfo);
+ // Buffer was released by the HAL
+ virtual void onBufferReleased(const BufferInfo& bufferInfo);
+
private:
static const nsecs_t kWaitDuration = 10000000; // 10 ms
+ nsecs_t mLatestClearedBufferTimestamp;
enum {
RUNNING,
@@ -84,53 +103,52 @@
} mState;
wp<Camera2Client> mClient;
- wp<CameraDeviceBase> mDevice;
wp<CaptureSequencer> mSequencer;
- int mId;
- bool mDeleted;
+ const int mId;
mutable Mutex mInputMutex;
- bool mZslBufferAvailable;
- Condition mZslBufferAvailableSignal;
enum {
NO_STREAM = -1
};
int mZslStreamId;
- int mZslReprocessStreamId;
- sp<BufferItemConsumer> mZslConsumer;
- sp<Surface> mZslWindow;
+ sp<camera3::Camera3ZslStream> mZslStream;
struct ZslPair {
BufferItem buffer;
CameraMetadata frame;
};
- static const size_t kZslBufferDepth = 4;
- static const size_t kFrameListDepth = kZslBufferDepth * 2;
+ static const int32_t kDefaultMaxPipelineDepth = 4;
+ size_t mBufferQueueDepth;
+ size_t mFrameListDepth;
Vector<CameraMetadata> mFrameList;
size_t mFrameListHead;
ZslPair mNextPair;
Vector<ZslPair> mZslQueue;
- size_t mZslQueueHead;
- size_t mZslQueueTail;
CameraMetadata mLatestCapturedRequest;
+ bool mHasFocuser;
+
virtual bool threadLoop();
- status_t processNewZslBuffer();
-
- // Match up entries from frame list to buffers in ZSL queue
- void findMatchesLocked();
-
status_t clearZslQueueLocked();
+ void clearZslResultQueueLocked();
+
void dumpZslQueue(int id) const;
+
+ nsecs_t getCandidateTimestampLocked(size_t* metadataIdx) const;
+
+ bool isFixedFocusMode(uint8_t afMode) const;
+
+ // Update the post-processing metadata with the default still capture request template
+ status_t updateRequestWithDefaultStillRequest(CameraMetadata &request) const;
};
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
deleted file mode 100644
index 69620ac..0000000
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
+++ /dev/null
@@ -1,677 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "Camera2-ZslProcessor3"
-#define ATRACE_TAG ATRACE_TAG_CAMERA
-//#define LOG_NDEBUG 0
-//#define LOG_NNDEBUG 0
-
-#ifdef LOG_NNDEBUG
-#define ALOGVV(...) ALOGV(__VA_ARGS__)
-#else
-#define ALOGVV(...) ((void)0)
-#endif
-
-#include <inttypes.h>
-
-#include <utils/Log.h>
-#include <utils/Trace.h>
-#include <gui/Surface.h>
-
-#include "common/CameraDeviceBase.h"
-#include "api1/Camera2Client.h"
-#include "api1/client2/CaptureSequencer.h"
-#include "api1/client2/ZslProcessor3.h"
-#include "device3/Camera3Device.h"
-
-namespace android {
-namespace camera2 {
-
-ZslProcessor3::ZslProcessor3(
- sp<Camera2Client> client,
- wp<CaptureSequencer> sequencer):
- Thread(false),
- mLatestClearedBufferTimestamp(0),
- mState(RUNNING),
- mClient(client),
- mSequencer(sequencer),
- mId(client->getCameraId()),
- mZslStreamId(NO_STREAM),
- mFrameListHead(0),
- mZslQueueHead(0),
- mZslQueueTail(0),
- mHasFocuser(false) {
- // Initialize buffer queue and frame list based on pipeline max depth.
- size_t pipelineMaxDepth = kDefaultMaxPipelineDepth;
- if (client != 0) {
- sp<Camera3Device> device =
- static_cast<Camera3Device*>(client->getCameraDevice().get());
- if (device != 0) {
- camera_metadata_ro_entry_t entry =
- device->info().find(ANDROID_REQUEST_PIPELINE_MAX_DEPTH);
- if (entry.count == 1) {
- pipelineMaxDepth = entry.data.u8[0];
- } else {
- ALOGW("%s: Unable to find the android.request.pipelineMaxDepth,"
- " use default pipeline max depth %zu", __FUNCTION__,
- kDefaultMaxPipelineDepth);
- }
-
- entry = device->info().find(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE);
- if (entry.count > 0 && entry.data.f[0] != 0.) {
- mHasFocuser = true;
- }
- }
- }
-
- ALOGV("%s: Initialize buffer queue and frame list depth based on max pipeline depth (%d)",
- __FUNCTION__, pipelineMaxDepth);
- // Need to keep buffer queue longer than metadata queue because sometimes buffer arrives
- // earlier than metadata which causes the buffer corresponding to oldest metadata being
- // removed.
- mFrameListDepth = pipelineMaxDepth;
- mBufferQueueDepth = mFrameListDepth + 1;
-
-
- mZslQueue.insertAt(0, mBufferQueueDepth);
- mFrameList.insertAt(0, mFrameListDepth);
- sp<CaptureSequencer> captureSequencer = mSequencer.promote();
- if (captureSequencer != 0) captureSequencer->setZslProcessor(this);
-}
-
-ZslProcessor3::~ZslProcessor3() {
- ALOGV("%s: Exit", __FUNCTION__);
- deleteStream();
-}
-
-void ZslProcessor3::onResultAvailable(const CaptureResult &result) {
- ATRACE_CALL();
- ALOGV("%s:", __FUNCTION__);
- Mutex::Autolock l(mInputMutex);
- camera_metadata_ro_entry_t entry;
- entry = result.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
- nsecs_t timestamp = entry.data.i64[0];
- if (entry.count == 0) {
- ALOGE("%s: metadata doesn't have timestamp, skip this result", __FUNCTION__);
- return;
- }
-
- entry = result.mMetadata.find(ANDROID_REQUEST_FRAME_COUNT);
- if (entry.count == 0) {
- ALOGE("%s: metadata doesn't have frame number, skip this result", __FUNCTION__);
- return;
- }
- int32_t frameNumber = entry.data.i32[0];
-
- ALOGVV("Got preview metadata for frame %d with timestamp %" PRId64, frameNumber, timestamp);
-
- if (mState != RUNNING) return;
-
- // Corresponding buffer has been cleared. No need to push into mFrameList
- if (timestamp <= mLatestClearedBufferTimestamp) return;
-
- mFrameList.editItemAt(mFrameListHead) = result.mMetadata;
- mFrameListHead = (mFrameListHead + 1) % mFrameListDepth;
-}
-
-status_t ZslProcessor3::updateStream(const Parameters ¶ms) {
- ATRACE_CALL();
- ALOGV("%s: Configuring ZSL streams", __FUNCTION__);
- status_t res;
-
- Mutex::Autolock l(mInputMutex);
-
- sp<Camera2Client> client = mClient.promote();
- if (client == 0) {
- ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
- return INVALID_OPERATION;
- }
- sp<Camera3Device> device =
- static_cast<Camera3Device*>(client->getCameraDevice().get());
- if (device == 0) {
- ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
- return INVALID_OPERATION;
- }
-
- if (mZslStreamId != NO_STREAM) {
- // Check if stream parameters have to change
- uint32_t currentWidth, currentHeight;
- res = device->getStreamInfo(mZslStreamId,
- ¤tWidth, ¤tHeight, 0, 0);
- if (res != OK) {
- ALOGE("%s: Camera %d: Error querying capture output stream info: "
- "%s (%d)", __FUNCTION__,
- client->getCameraId(), strerror(-res), res);
- return res;
- }
- if (currentWidth != (uint32_t)params.fastInfo.arrayWidth ||
- currentHeight != (uint32_t)params.fastInfo.arrayHeight) {
- ALOGV("%s: Camera %d: Deleting stream %d since the buffer "
- "dimensions changed",
- __FUNCTION__, client->getCameraId(), mZslStreamId);
- res = device->deleteStream(mZslStreamId);
- if (res == -EBUSY) {
- ALOGV("%s: Camera %d: Device is busy, call updateStream again "
- " after it becomes idle", __FUNCTION__, mId);
- return res;
- } else if(res != OK) {
- ALOGE("%s: Camera %d: Unable to delete old output stream "
- "for ZSL: %s (%d)", __FUNCTION__,
- client->getCameraId(), strerror(-res), res);
- return res;
- }
- mZslStreamId = NO_STREAM;
- }
- }
-
- if (mZslStreamId == NO_STREAM) {
- // Create stream for HAL production
- // TODO: Sort out better way to select resolution for ZSL
-
- // Note that format specified internally in Camera3ZslStream
- res = device->createZslStream(
- params.fastInfo.arrayWidth, params.fastInfo.arrayHeight,
- mBufferQueueDepth,
- &mZslStreamId,
- &mZslStream);
- if (res != OK) {
- ALOGE("%s: Camera %d: Can't create ZSL stream: "
- "%s (%d)", __FUNCTION__, client->getCameraId(),
- strerror(-res), res);
- return res;
- }
-
- // Only add the camera3 buffer listener when the stream is created.
- mZslStream->addBufferListener(this);
- }
-
- client->registerFrameListener(Camera2Client::kPreviewRequestIdStart,
- Camera2Client::kPreviewRequestIdEnd,
- this,
- /*sendPartials*/false);
-
- return OK;
-}
-
-status_t ZslProcessor3::deleteStream() {
- ATRACE_CALL();
- status_t res;
-
- Mutex::Autolock l(mInputMutex);
-
- if (mZslStreamId != NO_STREAM) {
- sp<Camera2Client> client = mClient.promote();
- if (client == 0) {
- ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
- return INVALID_OPERATION;
- }
-
- sp<Camera3Device> device =
- reinterpret_cast<Camera3Device*>(client->getCameraDevice().get());
- if (device == 0) {
- ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
- return INVALID_OPERATION;
- }
-
- res = device->deleteStream(mZslStreamId);
- if (res != OK) {
- ALOGE("%s: Camera %d: Cannot delete ZSL output stream %d: "
- "%s (%d)", __FUNCTION__, client->getCameraId(),
- mZslStreamId, strerror(-res), res);
- return res;
- }
-
- mZslStreamId = NO_STREAM;
- }
- return OK;
-}
-
-int ZslProcessor3::getStreamId() const {
- Mutex::Autolock l(mInputMutex);
- return mZslStreamId;
-}
-
-status_t ZslProcessor3::updateRequestWithDefaultStillRequest(CameraMetadata &request) const {
- sp<Camera2Client> client = mClient.promote();
- if (client == 0) {
- ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
- return INVALID_OPERATION;
- }
- sp<Camera3Device> device =
- static_cast<Camera3Device*>(client->getCameraDevice().get());
- if (device == 0) {
- ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
- return INVALID_OPERATION;
- }
-
- CameraMetadata stillTemplate;
- device->createDefaultRequest(CAMERA3_TEMPLATE_STILL_CAPTURE, &stillTemplate);
-
- // Find some of the post-processing tags, and assign the value from template to the request.
- // Only check the aberration mode and noise reduction mode for now, as they are very important
- // for image quality.
- uint32_t postProcessingTags[] = {
- ANDROID_NOISE_REDUCTION_MODE,
- ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
- ANDROID_COLOR_CORRECTION_MODE,
- ANDROID_TONEMAP_MODE,
- ANDROID_SHADING_MODE,
- ANDROID_HOT_PIXEL_MODE,
- ANDROID_EDGE_MODE
- };
-
- camera_metadata_entry_t entry;
- for (size_t i = 0; i < sizeof(postProcessingTags) / sizeof(uint32_t); i++) {
- entry = stillTemplate.find(postProcessingTags[i]);
- if (entry.count > 0) {
- request.update(postProcessingTags[i], entry.data.u8, 1);
- }
- }
-
- return OK;
-}
-
-status_t ZslProcessor3::pushToReprocess(int32_t requestId) {
- ALOGV("%s: Send in reprocess request with id %d",
- __FUNCTION__, requestId);
- Mutex::Autolock l(mInputMutex);
- status_t res;
- sp<Camera2Client> client = mClient.promote();
-
- if (client == 0) {
- ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
- return INVALID_OPERATION;
- }
-
- IF_ALOGV() {
- dumpZslQueue(-1);
- }
-
- size_t metadataIdx;
- nsecs_t candidateTimestamp = getCandidateTimestampLocked(&metadataIdx);
-
- if (candidateTimestamp == -1) {
- ALOGE("%s: Could not find good candidate for ZSL reprocessing",
- __FUNCTION__);
- return NOT_ENOUGH_DATA;
- }
-
- res = mZslStream->enqueueInputBufferByTimestamp(candidateTimestamp,
- /*actualTimestamp*/NULL);
-
- if (res == mZslStream->NO_BUFFER_AVAILABLE) {
- ALOGV("%s: No ZSL buffers yet", __FUNCTION__);
- return NOT_ENOUGH_DATA;
- } else if (res != OK) {
- ALOGE("%s: Unable to push buffer for reprocessing: %s (%d)",
- __FUNCTION__, strerror(-res), res);
- return res;
- }
-
- {
- CameraMetadata request = mFrameList[metadataIdx];
-
- // Verify that the frame is reasonable for reprocessing
-
- camera_metadata_entry_t entry;
- entry = request.find(ANDROID_CONTROL_AE_STATE);
- if (entry.count == 0) {
- ALOGE("%s: ZSL queue frame has no AE state field!",
- __FUNCTION__);
- return BAD_VALUE;
- }
- if (entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_CONVERGED &&
- entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_LOCKED) {
- ALOGV("%s: ZSL queue frame AE state is %d, need full capture",
- __FUNCTION__, entry.data.u8[0]);
- return NOT_ENOUGH_DATA;
- }
-
- uint8_t requestType = ANDROID_REQUEST_TYPE_REPROCESS;
- res = request.update(ANDROID_REQUEST_TYPE,
- &requestType, 1);
- if (res != OK) {
- ALOGE("%s: Unable to update request type",
- __FUNCTION__);
- return INVALID_OPERATION;
- }
-
- int32_t inputStreams[1] =
- { mZslStreamId };
- res = request.update(ANDROID_REQUEST_INPUT_STREAMS,
- inputStreams, 1);
- if (res != OK) {
- ALOGE("%s: Unable to update request input streams",
- __FUNCTION__);
- return INVALID_OPERATION;
- }
-
- uint8_t captureIntent =
- static_cast<uint8_t>(ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE);
- res = request.update(ANDROID_CONTROL_CAPTURE_INTENT,
- &captureIntent, 1);
- if (res != OK ) {
- ALOGE("%s: Unable to update request capture intent",
- __FUNCTION__);
- return INVALID_OPERATION;
- }
-
- // TODO: Shouldn't we also update the latest preview frame?
- int32_t outputStreams[1] =
- { client->getCaptureStreamId() };
- res = request.update(ANDROID_REQUEST_OUTPUT_STREAMS,
- outputStreams, 1);
- if (res != OK) {
- ALOGE("%s: Unable to update request output streams",
- __FUNCTION__);
- return INVALID_OPERATION;
- }
-
- res = request.update(ANDROID_REQUEST_ID,
- &requestId, 1);
- if (res != OK ) {
- ALOGE("%s: Unable to update frame to a reprocess request",
- __FUNCTION__);
- return INVALID_OPERATION;
- }
-
- res = client->stopStream();
- if (res != OK) {
- ALOGE("%s: Camera %d: Unable to stop preview for ZSL capture: "
- "%s (%d)",
- __FUNCTION__, client->getCameraId(), strerror(-res), res);
- return INVALID_OPERATION;
- }
-
- // Update JPEG settings
- {
- SharedParameters::Lock l(client->getParameters());
- res = l.mParameters.updateRequestJpeg(&request);
- if (res != OK) {
- ALOGE("%s: Camera %d: Unable to update JPEG entries of ZSL "
- "capture request: %s (%d)", __FUNCTION__,
- client->getCameraId(),
- strerror(-res), res);
- return res;
- }
- }
-
- // Update post-processing settings
- res = updateRequestWithDefaultStillRequest(request);
- if (res != OK) {
- ALOGW("%s: Unable to update post-processing tags, the reprocessed image quality "
- "may be compromised", __FUNCTION__);
- }
-
- mLatestCapturedRequest = request;
- res = client->getCameraDevice()->capture(request);
- if (res != OK ) {
- ALOGE("%s: Unable to send ZSL reprocess request to capture: %s"
- " (%d)", __FUNCTION__, strerror(-res), res);
- return res;
- }
-
- mState = LOCKED;
- }
-
- return OK;
-}
-
-status_t ZslProcessor3::clearZslQueue() {
- Mutex::Autolock l(mInputMutex);
- // If in middle of capture, can't clear out queue
- if (mState == LOCKED) return OK;
-
- return clearZslQueueLocked();
-}
-
-status_t ZslProcessor3::clearZslQueueLocked() {
- if (mZslStream != 0) {
- // clear result metadata list first.
- clearZslResultQueueLocked();
- return mZslStream->clearInputRingBuffer(&mLatestClearedBufferTimestamp);
- }
- return OK;
-}
-
-void ZslProcessor3::clearZslResultQueueLocked() {
- mFrameList.clear();
- mFrameListHead = 0;
- mFrameList.insertAt(0, mFrameListDepth);
-}
-
-void ZslProcessor3::dump(int fd, const Vector<String16>& /*args*/) const {
- Mutex::Autolock l(mInputMutex);
- if (!mLatestCapturedRequest.isEmpty()) {
- String8 result(" Latest ZSL capture request:\n");
- write(fd, result.string(), result.size());
- mLatestCapturedRequest.dump(fd, 2, 6);
- } else {
- String8 result(" Latest ZSL capture request: none yet\n");
- write(fd, result.string(), result.size());
- }
- dumpZslQueue(fd);
-}
-
-bool ZslProcessor3::threadLoop() {
- // TODO: remove dependency on thread. For now, shut thread down right
- // away.
- return false;
-}
-
-void ZslProcessor3::dumpZslQueue(int fd) const {
- String8 header("ZSL queue contents:");
- String8 indent(" ");
- ALOGV("%s", header.string());
- if (fd != -1) {
- header = indent + header + "\n";
- write(fd, header.string(), header.size());
- }
- for (size_t i = 0; i < mZslQueue.size(); i++) {
- const ZslPair &queueEntry = mZslQueue[i];
- nsecs_t bufferTimestamp = queueEntry.buffer.mTimestamp;
- camera_metadata_ro_entry_t entry;
- nsecs_t frameTimestamp = 0;
- int frameAeState = -1;
- if (!queueEntry.frame.isEmpty()) {
- entry = queueEntry.frame.find(ANDROID_SENSOR_TIMESTAMP);
- if (entry.count > 0) frameTimestamp = entry.data.i64[0];
- entry = queueEntry.frame.find(ANDROID_CONTROL_AE_STATE);
- if (entry.count > 0) frameAeState = entry.data.u8[0];
- }
- String8 result =
- String8::format(" %zu: b: %" PRId64 "\tf: %" PRId64 ", AE state: %d", i,
- bufferTimestamp, frameTimestamp, frameAeState);
- ALOGV("%s", result.string());
- if (fd != -1) {
- result = indent + result + "\n";
- write(fd, result.string(), result.size());
- }
-
- }
-}
-
-bool ZslProcessor3::isFixedFocusMode(uint8_t afMode) const {
- switch (afMode) {
- case ANDROID_CONTROL_AF_MODE_AUTO:
- case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
- case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
- case ANDROID_CONTROL_AF_MODE_MACRO:
- return false;
- break;
- case ANDROID_CONTROL_AF_MODE_OFF:
- case ANDROID_CONTROL_AF_MODE_EDOF:
- return true;
- default:
- ALOGE("%s: unknown focus mode %d", __FUNCTION__, afMode);
- return false;
- }
-}
-
-nsecs_t ZslProcessor3::getCandidateTimestampLocked(size_t* metadataIdx) const {
- /**
- * Find the smallest timestamp we know about so far
- * - ensure that aeState is either converged or locked
- */
-
- size_t idx = 0;
- nsecs_t minTimestamp = -1;
-
- size_t emptyCount = mFrameList.size();
-
- for (size_t j = 0; j < mFrameList.size(); j++) {
- const CameraMetadata &frame = mFrameList[j];
- if (!frame.isEmpty()) {
-
- emptyCount--;
-
- camera_metadata_ro_entry_t entry;
- entry = frame.find(ANDROID_SENSOR_TIMESTAMP);
- if (entry.count == 0) {
- ALOGE("%s: Can't find timestamp in frame!",
- __FUNCTION__);
- continue;
- }
- nsecs_t frameTimestamp = entry.data.i64[0];
- if (minTimestamp > frameTimestamp || minTimestamp == -1) {
-
- entry = frame.find(ANDROID_CONTROL_AE_STATE);
-
- if (entry.count == 0) {
- /**
- * This is most likely a HAL bug. The aeState field is
- * mandatory, so it should always be in a metadata packet.
- */
- ALOGW("%s: ZSL queue frame has no AE state field!",
- __FUNCTION__);
- continue;
- }
- if (entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_CONVERGED &&
- entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_LOCKED) {
- ALOGVV("%s: ZSL queue frame AE state is %d, need "
- "full capture", __FUNCTION__, entry.data.u8[0]);
- continue;
- }
-
- entry = frame.find(ANDROID_CONTROL_AF_MODE);
- if (entry.count == 0) {
- ALOGW("%s: ZSL queue frame has no AF mode field!",
- __FUNCTION__);
- continue;
- }
- uint8_t afMode = entry.data.u8[0];
- if (afMode == ANDROID_CONTROL_AF_MODE_OFF) {
- // Skip all the ZSL buffer for manual AF mode, as we don't really
- // know the af state.
- continue;
- }
-
- // Check AF state if device has focuser and focus mode isn't fixed
- if (mHasFocuser && !isFixedFocusMode(afMode)) {
- // Make sure the candidate frame has good focus.
- entry = frame.find(ANDROID_CONTROL_AF_STATE);
- if (entry.count == 0) {
- ALOGW("%s: ZSL queue frame has no AF state field!",
- __FUNCTION__);
- continue;
- }
- uint8_t afState = entry.data.u8[0];
- if (afState != ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED &&
- afState != ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED &&
- afState != ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) {
- ALOGVV("%s: ZSL queue frame AF state is %d is not good for capture, skip it",
- __FUNCTION__, afState);
- continue;
- }
- }
-
- minTimestamp = frameTimestamp;
- idx = j;
- }
-
- ALOGVV("%s: Saw timestamp %" PRId64, __FUNCTION__, frameTimestamp);
- }
- }
-
- if (emptyCount == mFrameList.size()) {
- /**
- * This could be mildly bad and means our ZSL was triggered before
- * there were any frames yet received by the camera framework.
- *
- * This is a fairly corner case which can happen under:
- * + a user presses the shutter button real fast when the camera starts
- * (startPreview followed immediately by takePicture).
- * + burst capture case (hitting shutter button as fast possible)
- *
- * If this happens in steady case (preview running for a while, call
- * a single takePicture) then this might be a fwk bug.
- */
- ALOGW("%s: ZSL queue has no metadata frames", __FUNCTION__);
- }
-
- ALOGV("%s: Candidate timestamp %" PRId64 " (idx %zu), empty frames: %zu",
- __FUNCTION__, minTimestamp, idx, emptyCount);
-
- if (metadataIdx) {
- *metadataIdx = idx;
- }
-
- return minTimestamp;
-}
-
-void ZslProcessor3::onBufferAcquired(const BufferInfo& /*bufferInfo*/) {
- // Intentionally left empty
- // Although theoretically we could use this to get better dump info
-}
-
-void ZslProcessor3::onBufferReleased(const BufferInfo& bufferInfo) {
-
- // ignore output buffers
- if (bufferInfo.mOutput) {
- return;
- }
-
- // Lock mutex only once we know this is an input buffer returned to avoid
- // potential deadlock
- Mutex::Autolock l(mInputMutex);
- // TODO: Verify that the buffer is in our queue by looking at timestamp
- // theoretically unnecessary unless we change the following assumptions:
- // -- only 1 buffer reprocessed at a time (which is the case now)
-
- // Erase entire ZSL queue since we've now completed the capture and preview
- // is stopped.
- //
- // We need to guarantee that if we do two back-to-back captures,
- // the second won't use a buffer that's older/the same as the first, which
- // is theoretically possible if we don't clear out the queue and the
- // selection criteria is something like 'newest'. Clearing out the result
- // metadata queue on a completed capture ensures we'll only use new timestamp.
- // Calling clearZslQueueLocked is a guaranteed deadlock because this callback
- // holds the Camera3Stream internal lock (mLock), and clearZslQueueLocked requires
- // to hold the same lock.
- // TODO: need figure out a way to clear the Zsl buffer queue properly. Right now
- // it is safe not to do so, as back to back ZSL capture requires stop and start
- // preview, which will flush ZSL queue automatically.
- ALOGV("%s: Memory optimization, clearing ZSL queue",
- __FUNCTION__);
- clearZslResultQueueLocked();
-
- // Required so we accept more ZSL requests
- mState = RUNNING;
-}
-
-}; // namespace camera2
-}; // namespace android
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor3.h b/services/camera/libcameraservice/api1/client2/ZslProcessor3.h
deleted file mode 100644
index 2960478..0000000
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor3.h
+++ /dev/null
@@ -1,148 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSOR3_H
-#define ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSOR3_H
-
-#include <utils/Thread.h>
-#include <utils/String16.h>
-#include <utils/Vector.h>
-#include <utils/Mutex.h>
-#include <utils/Condition.h>
-#include <gui/BufferItem.h>
-#include <gui/BufferItemConsumer.h>
-#include <camera/CameraMetadata.h>
-
-#include "api1/client2/FrameProcessor.h"
-#include "api1/client2/ZslProcessorInterface.h"
-#include "device3/Camera3ZslStream.h"
-
-namespace android {
-
-class Camera2Client;
-
-namespace camera2 {
-
-class CaptureSequencer;
-class Parameters;
-
-/***
- * ZSL queue processing
- */
-class ZslProcessor3 :
- public ZslProcessorInterface,
- public camera3::Camera3StreamBufferListener,
- virtual public Thread,
- virtual public FrameProcessor::FilteredListener {
- public:
- ZslProcessor3(sp<Camera2Client> client, wp<CaptureSequencer> sequencer);
- ~ZslProcessor3();
-
- // From FrameProcessor::FilteredListener
- virtual void onResultAvailable(const CaptureResult &result);
-
- /**
- ****************************************
- * ZslProcessorInterface implementation *
- ****************************************
- */
-
- virtual status_t updateStream(const Parameters ¶ms);
- virtual status_t deleteStream();
- virtual int getStreamId() const;
-
- virtual status_t pushToReprocess(int32_t requestId);
- virtual status_t clearZslQueue();
-
- void dump(int fd, const Vector<String16>& args) const;
-
- protected:
- /**
- **********************************************
- * Camera3StreamBufferListener implementation *
- **********************************************
- */
- typedef camera3::Camera3StreamBufferListener::BufferInfo BufferInfo;
- // Buffer was acquired by the HAL
- virtual void onBufferAcquired(const BufferInfo& bufferInfo);
- // Buffer was released by the HAL
- virtual void onBufferReleased(const BufferInfo& bufferInfo);
-
- private:
- static const nsecs_t kWaitDuration = 10000000; // 10 ms
- nsecs_t mLatestClearedBufferTimestamp;
-
- enum {
- RUNNING,
- LOCKED
- } mState;
-
- wp<Camera2Client> mClient;
- wp<CaptureSequencer> mSequencer;
-
- const int mId;
-
- mutable Mutex mInputMutex;
-
- enum {
- NO_STREAM = -1
- };
-
- int mZslStreamId;
- sp<camera3::Camera3ZslStream> mZslStream;
-
- struct ZslPair {
- BufferItem buffer;
- CameraMetadata frame;
- };
-
- static const int32_t kDefaultMaxPipelineDepth = 4;
- size_t mBufferQueueDepth;
- size_t mFrameListDepth;
- Vector<CameraMetadata> mFrameList;
- size_t mFrameListHead;
-
- ZslPair mNextPair;
-
- Vector<ZslPair> mZslQueue;
- size_t mZslQueueHead;
- size_t mZslQueueTail;
-
- CameraMetadata mLatestCapturedRequest;
-
- bool mHasFocuser;
-
- virtual bool threadLoop();
-
- status_t clearZslQueueLocked();
-
- void clearZslResultQueueLocked();
-
- void dumpZslQueue(int id) const;
-
- nsecs_t getCandidateTimestampLocked(size_t* metadataIdx) const;
-
- bool isFixedFocusMode(uint8_t afMode) const;
-
- // Update the post-processing metadata with the default still capture request template
- status_t updateRequestWithDefaultStillRequest(CameraMetadata &request) const;
-};
-
-
-}; //namespace camera2
-}; //namespace android
-
-#endif
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessorInterface.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessorInterface.cpp
deleted file mode 100644
index 9efeaba..0000000
--- a/services/camera/libcameraservice/api1/client2/ZslProcessorInterface.cpp
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "ZslProcessorInterface.h"
-
-namespace android {
-namespace camera2 {
-
-status_t ZslProcessorInterface::disconnect() {
- return OK;
-}
-
-}; //namespace camera2
-}; //namespace android
-
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessorInterface.h b/services/camera/libcameraservice/api1/client2/ZslProcessorInterface.h
deleted file mode 100644
index 9e266e7..0000000
--- a/services/camera/libcameraservice/api1/client2/ZslProcessorInterface.h
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSORINTERFACE_H
-#define ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSORINTERFACE_H
-
-#include <utils/Errors.h>
-#include <utils/RefBase.h>
-#include <utils/String16.h>
-#include <utils/Vector.h>
-
-namespace android {
-namespace camera2 {
-
-class Parameters;
-
-class ZslProcessorInterface : virtual public RefBase {
-public:
-
- // Get ID for use with android.request.outputStreams / inputStreams
- virtual int getStreamId() const = 0;
-
- // Update the streams by recreating them if the size/format has changed
- virtual status_t updateStream(const Parameters& params) = 0;
-
- // Delete the underlying CameraDevice streams
- virtual status_t deleteStream() = 0;
-
- // Clear any additional state necessary before the CameraDevice is disconnected
- virtual status_t disconnect();
-
- /**
- * Submits a ZSL capture request (id = requestId)
- *
- * An appropriate ZSL buffer is selected by the closest timestamp,
- * then we push that buffer to be reprocessed by the HAL.
- * A capture request is created and submitted on behalf of the client.
- */
- virtual status_t pushToReprocess(int32_t requestId) = 0;
-
- // Flush the ZSL buffer queue, freeing up all the buffers
- virtual status_t clearZslQueue() = 0;
-
- // (Debugging only) Dump the current state to the specified file descriptor
- virtual void dump(int fd, const Vector<String16>& args) const = 0;
-};
-
-}; //namespace camera2
-}; //namespace android
-
-#endif
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 5732f80..82c8fe9 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -31,7 +31,7 @@
#include "api2/CameraDeviceClient.h"
-#include "CameraDeviceFactory.h"
+#include "device3/Camera3Device.h"
namespace android {
using namespace camera2;
@@ -62,7 +62,7 @@
String8(clientPackageName).string(), clientPid, clientUid);
mInitialClientPid = clientPid;
- mDevice = CameraDeviceFactory::createDevice(cameraId);
+ mDevice = new Camera3Device(cameraId);
LOG_ALWAYS_FATAL_IF(mDevice == 0, "Device should never be NULL here.");
}
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index 220c5ad..53122dc 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -125,7 +125,7 @@
// that mBinderSerializationLock is locked when they're called
mutable Mutex mBinderSerializationLock;
- /** CameraDeviceBase instance wrapping HAL2+ entry */
+ /** CameraDeviceBase instance wrapping HAL3+ entry */
const int mDeviceVersion;
sp<CameraDeviceBase> mDevice;
diff --git a/services/camera/libcameraservice/common/CameraModule.cpp b/services/camera/libcameraservice/common/CameraModule.cpp
index 16b8aba..d7a1568 100644
--- a/services/camera/libcameraservice/common/CameraModule.cpp
+++ b/services/camera/libcameraservice/common/CameraModule.cpp
@@ -27,11 +27,6 @@
void CameraModule::deriveCameraCharacteristicsKeys(
uint32_t deviceVersion, CameraMetadata &chars) {
ATRACE_CALL();
- // HAL1 devices should not reach here
- if (deviceVersion < CAMERA_DEVICE_API_VERSION_2_0) {
- ALOGV("%s: Cannot derive keys for HAL version < 2.0");
- return;
- }
// Keys added in HAL3.3
if (deviceVersion < CAMERA_DEVICE_API_VERSION_3_3) {
@@ -211,7 +206,7 @@
return ret;
}
int deviceVersion = rawInfo.device_version;
- if (deviceVersion < CAMERA_DEVICE_API_VERSION_2_0) {
+ if (deviceVersion < CAMERA_DEVICE_API_VERSION_3_0) {
// static_camera_characteristics is invalid
*info = rawInfo;
return ret;
diff --git a/services/camera/libcameraservice/device2/Camera2Device.cpp b/services/camera/libcameraservice/device2/Camera2Device.cpp
deleted file mode 100644
index d74f976..0000000
--- a/services/camera/libcameraservice/device2/Camera2Device.cpp
+++ /dev/null
@@ -1,1618 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "Camera2-Device"
-#define ATRACE_TAG ATRACE_TAG_CAMERA
-//#define LOG_NDEBUG 0
-//#define LOG_NNDEBUG 0 // Per-frame verbose logging
-
-#ifdef LOG_NNDEBUG
-#define ALOGVV(...) ALOGV(__VA_ARGS__)
-#else
-#define ALOGVV(...) ((void)0)
-#endif
-
-#include <inttypes.h>
-#include <utils/Log.h>
-#include <utils/Trace.h>
-#include <utils/Timers.h>
-#include "Camera2Device.h"
-#include "CameraService.h"
-
-namespace android {
-
-Camera2Device::Camera2Device(int id):
- mId(id),
- mHal2Device(NULL)
-{
- ATRACE_CALL();
- ALOGV("%s: Created device for camera %d", __FUNCTION__, id);
-}
-
-Camera2Device::~Camera2Device()
-{
- ATRACE_CALL();
- ALOGV("%s: Tearing down for camera id %d", __FUNCTION__, mId);
- disconnect();
-}
-
-int Camera2Device::getId() const {
- return mId;
-}
-
-status_t Camera2Device::initialize(CameraModule *module)
-{
- ATRACE_CALL();
- ALOGV("%s: Initializing device for camera %d", __FUNCTION__, mId);
- if (mHal2Device != NULL) {
- ALOGE("%s: Already initialized!", __FUNCTION__);
- return INVALID_OPERATION;
- }
-
- status_t res;
- char name[10];
- snprintf(name, sizeof(name), "%d", mId);
-
- camera2_device_t *device;
-
- res = module->open(name, reinterpret_cast<hw_device_t**>(&device));
-
- if (res != OK) {
- ALOGE("%s: Could not open camera %d: %s (%d)", __FUNCTION__,
- mId, strerror(-res), res);
- return res;
- }
-
- if (device->common.version != CAMERA_DEVICE_API_VERSION_2_0) {
- ALOGE("%s: Could not open camera %d: "
- "Camera device is not version %x, reports %x instead",
- __FUNCTION__, mId, CAMERA_DEVICE_API_VERSION_2_0,
- device->common.version);
- device->common.close(&device->common);
- return BAD_VALUE;
- }
-
- camera_info info;
- res = module->getCameraInfo(mId, &info);
- if (res != OK ) return res;
-
- if (info.device_version != device->common.version) {
- ALOGE("%s: HAL reporting mismatched camera_info version (%x)"
- " and device version (%x).", __FUNCTION__,
- device->common.version, info.device_version);
- device->common.close(&device->common);
- return BAD_VALUE;
- }
-
- res = mRequestQueue.setConsumerDevice(device);
- if (res != OK) {
- ALOGE("%s: Camera %d: Unable to connect request queue to device: %s (%d)",
- __FUNCTION__, mId, strerror(-res), res);
- device->common.close(&device->common);
- return res;
- }
- res = mFrameQueue.setProducerDevice(device);
- if (res != OK) {
- ALOGE("%s: Camera %d: Unable to connect frame queue to device: %s (%d)",
- __FUNCTION__, mId, strerror(-res), res);
- device->common.close(&device->common);
- return res;
- }
-
- res = device->ops->set_notify_callback(device, notificationCallback,
- NULL);
- if (res != OK) {
- ALOGE("%s: Camera %d: Unable to initialize notification callback!",
- __FUNCTION__, mId);
- device->common.close(&device->common);
- return res;
- }
-
- mDeviceInfo = info.static_camera_characteristics;
- mHal2Device = device;
- mDeviceVersion = device->common.version;
-
- return OK;
-}
-
-status_t Camera2Device::disconnect() {
- ATRACE_CALL();
- status_t res = OK;
- if (mHal2Device) {
- ALOGV("%s: Closing device for camera %d", __FUNCTION__, mId);
-
- int inProgressCount = mHal2Device->ops->get_in_progress_count(mHal2Device);
- if (inProgressCount > 0) {
- ALOGW("%s: Closing camera device %d with %d requests in flight!",
- __FUNCTION__, mId, inProgressCount);
- }
- mReprocessStreams.clear();
- mStreams.clear();
- res = mHal2Device->common.close(&mHal2Device->common);
- if (res != OK) {
- ALOGE("%s: Could not close camera %d: %s (%d)",
- __FUNCTION__,
- mId, strerror(-res), res);
- }
- mHal2Device = NULL;
- ALOGV("%s: Shutdown complete", __FUNCTION__);
- }
- return res;
-}
-
-status_t Camera2Device::dump(int fd, const Vector<String16>& args) {
- ATRACE_CALL();
- String8 result;
- int detailLevel = 0;
- int n = args.size();
- String16 detailOption("-d");
- for (int i = 0; i + 1 < n; i++) {
- if (args[i] == detailOption) {
- String8 levelStr(args[i+1]);
- detailLevel = atoi(levelStr.string());
- }
- }
-
- result.appendFormat(" Camera2Device[%d] dump (detail level %d):\n",
- mId, detailLevel);
-
- if (detailLevel > 0) {
- result = " Request queue contents:\n";
- write(fd, result.string(), result.size());
- mRequestQueue.dump(fd, args);
-
- result = " Frame queue contents:\n";
- write(fd, result.string(), result.size());
- mFrameQueue.dump(fd, args);
- }
-
- result = " Active streams:\n";
- write(fd, result.string(), result.size());
- for (StreamList::iterator s = mStreams.begin(); s != mStreams.end(); s++) {
- (*s)->dump(fd, args);
- }
-
- result = " HAL device dump:\n";
- write(fd, result.string(), result.size());
-
- status_t res;
- res = mHal2Device->ops->dump(mHal2Device, fd);
-
- return res;
-}
-
-const CameraMetadata& Camera2Device::info() const {
- ALOGVV("%s: E", __FUNCTION__);
-
- return mDeviceInfo;
-}
-
-status_t Camera2Device::capture(CameraMetadata &request, int64_t* /*lastFrameNumber*/) {
- ATRACE_CALL();
- ALOGV("%s: E", __FUNCTION__);
-
- mRequestQueue.enqueue(request.release());
- return OK;
-}
-
-status_t Camera2Device::captureList(const List<const CameraMetadata> &requests,
- int64_t* /*lastFrameNumber*/) {
- ATRACE_CALL();
- ALOGE("%s: Camera2Device burst capture not implemented", __FUNCTION__);
- return INVALID_OPERATION;
-}
-
-status_t Camera2Device::setStreamingRequest(const CameraMetadata &request,
- int64_t* /*lastFrameNumber*/) {
- ATRACE_CALL();
- ALOGV("%s: E", __FUNCTION__);
- CameraMetadata streamRequest(request);
- return mRequestQueue.setStreamSlot(streamRequest.release());
-}
-
-status_t Camera2Device::setStreamingRequestList(const List<const CameraMetadata> &requests,
- int64_t* /*lastFrameNumber*/) {
- ATRACE_CALL();
- ALOGE("%s, Camera2Device streaming burst not implemented", __FUNCTION__);
- return INVALID_OPERATION;
-}
-
-status_t Camera2Device::clearStreamingRequest(int64_t* /*lastFrameNumber*/) {
- ATRACE_CALL();
- return mRequestQueue.setStreamSlot(NULL);
-}
-
-status_t Camera2Device::waitUntilRequestReceived(int32_t requestId, nsecs_t timeout) {
- ATRACE_CALL();
- return mRequestQueue.waitForDequeue(requestId, timeout);
-}
-
-status_t Camera2Device::createStream(sp<Surface> consumer,
- uint32_t width, uint32_t height, int format,
- android_dataspace /*dataSpace*/, camera3_stream_rotation_t rotation, int *id) {
- ATRACE_CALL();
- status_t res;
- ALOGV("%s: E", __FUNCTION__);
-
- sp<StreamAdapter> stream = new StreamAdapter(mHal2Device);
- size_t size = 0;
- if (format == HAL_PIXEL_FORMAT_BLOB) {
- size = getJpegBufferSize(width, height);
- }
- res = stream->connectToDevice(consumer, width, height, format, size);
- if (res != OK) {
- ALOGE("%s: Camera %d: Unable to create stream (%d x %d, format %x):"
- "%s (%d)",
- __FUNCTION__, mId, width, height, format, strerror(-res), res);
- return res;
- }
-
- *id = stream->getId();
-
- mStreams.push_back(stream);
- return OK;
-}
-
-ssize_t Camera2Device::getJpegBufferSize(uint32_t width, uint32_t height) const {
- // Always give the max jpeg buffer size regardless of the actual jpeg resolution.
- camera_metadata_ro_entry jpegBufMaxSize = mDeviceInfo.find(ANDROID_JPEG_MAX_SIZE);
- if (jpegBufMaxSize.count == 0) {
- ALOGE("%s: Camera %d: Can't find maximum JPEG size in static metadata!", __FUNCTION__, mId);
- return BAD_VALUE;
- }
-
- return jpegBufMaxSize.data.i32[0];
-}
-
-status_t Camera2Device::createReprocessStreamFromStream(int outputId, int *id) {
- ATRACE_CALL();
- status_t res;
- ALOGV("%s: E", __FUNCTION__);
-
- bool found = false;
- StreamList::iterator streamI;
- for (streamI = mStreams.begin();
- streamI != mStreams.end(); streamI++) {
- if ((*streamI)->getId() == outputId) {
- found = true;
- break;
- }
- }
- if (!found) {
- ALOGE("%s: Camera %d: Output stream %d doesn't exist; can't create "
- "reprocess stream from it!", __FUNCTION__, mId, outputId);
- return BAD_VALUE;
- }
-
- sp<ReprocessStreamAdapter> stream = new ReprocessStreamAdapter(mHal2Device);
-
- res = stream->connectToDevice((*streamI));
- if (res != OK) {
- ALOGE("%s: Camera %d: Unable to create reprocessing stream from "\
- "stream %d: %s (%d)", __FUNCTION__, mId, outputId,
- strerror(-res), res);
- return res;
- }
-
- *id = stream->getId();
-
- mReprocessStreams.push_back(stream);
- return OK;
-}
-
-
-status_t Camera2Device::getStreamInfo(int id,
- uint32_t *width, uint32_t *height,
- uint32_t *format, android_dataspace *dataSpace) {
- ATRACE_CALL();
- ALOGV("%s: E", __FUNCTION__);
- bool found = false;
- StreamList::iterator streamI;
- for (streamI = mStreams.begin();
- streamI != mStreams.end(); streamI++) {
- if ((*streamI)->getId() == id) {
- found = true;
- break;
- }
- }
- if (!found) {
- ALOGE("%s: Camera %d: Stream %d does not exist",
- __FUNCTION__, mId, id);
- return BAD_VALUE;
- }
-
- if (width) *width = (*streamI)->getWidth();
- if (height) *height = (*streamI)->getHeight();
- if (format) *format = (*streamI)->getFormat();
- if (dataSpace) *dataSpace = HAL_DATASPACE_UNKNOWN;
-
- return OK;
-}
-
-status_t Camera2Device::setStreamTransform(int id,
- int transform) {
- ATRACE_CALL();
- ALOGV("%s: E", __FUNCTION__);
- bool found = false;
- StreamList::iterator streamI;
- for (streamI = mStreams.begin();
- streamI != mStreams.end(); streamI++) {
- if ((*streamI)->getId() == id) {
- found = true;
- break;
- }
- }
- if (!found) {
- ALOGE("%s: Camera %d: Stream %d does not exist",
- __FUNCTION__, mId, id);
- return BAD_VALUE;
- }
-
- return (*streamI)->setTransform(transform);
-}
-
-status_t Camera2Device::deleteStream(int id) {
- ATRACE_CALL();
- ALOGV("%s: E", __FUNCTION__);
- bool found = false;
- for (StreamList::iterator streamI = mStreams.begin();
- streamI != mStreams.end(); streamI++) {
- if ((*streamI)->getId() == id) {
- status_t res = (*streamI)->release();
- if (res != OK) {
- ALOGE("%s: Unable to release stream %d from HAL device: "
- "%s (%d)", __FUNCTION__, id, strerror(-res), res);
- return res;
- }
- mStreams.erase(streamI);
- found = true;
- break;
- }
- }
- if (!found) {
- ALOGE("%s: Camera %d: Unable to find stream %d to delete",
- __FUNCTION__, mId, id);
- return BAD_VALUE;
- }
- return OK;
-}
-
-status_t Camera2Device::deleteReprocessStream(int id) {
- ATRACE_CALL();
- ALOGV("%s: E", __FUNCTION__);
- bool found = false;
- for (ReprocessStreamList::iterator streamI = mReprocessStreams.begin();
- streamI != mReprocessStreams.end(); streamI++) {
- if ((*streamI)->getId() == id) {
- status_t res = (*streamI)->release();
- if (res != OK) {
- ALOGE("%s: Unable to release reprocess stream %d from "
- "HAL device: %s (%d)", __FUNCTION__, id,
- strerror(-res), res);
- return res;
- }
- mReprocessStreams.erase(streamI);
- found = true;
- break;
- }
- }
- if (!found) {
- ALOGE("%s: Camera %d: Unable to find stream %d to delete",
- __FUNCTION__, mId, id);
- return BAD_VALUE;
- }
- return OK;
-}
-
-status_t Camera2Device::configureStreams(bool isConstrainedHighSpeed) {
- ATRACE_CALL();
- ALOGV("%s: E", __FUNCTION__);
-
- /**
- * HAL2 devices do not need to configure streams;
- * streams are created on the fly.
- */
- ALOGW("%s: No-op for HAL2 devices", __FUNCTION__);
-
- return OK;
-}
-
-
-status_t Camera2Device::createDefaultRequest(int templateId,
- CameraMetadata *request) {
- ATRACE_CALL();
- status_t err;
- ALOGV("%s: E", __FUNCTION__);
- camera_metadata_t *rawRequest;
- err = mHal2Device->ops->construct_default_request(
- mHal2Device, templateId, &rawRequest);
- request->acquire(rawRequest);
- return err;
-}
-
-status_t Camera2Device::waitUntilDrained() {
- ATRACE_CALL();
- static const uint32_t kSleepTime = 50000; // 50 ms
- static const uint32_t kMaxSleepTime = 10000000; // 10 s
- ALOGV("%s: Camera %d: Starting wait", __FUNCTION__, mId);
- if (mRequestQueue.getBufferCount() ==
- CAMERA2_REQUEST_QUEUE_IS_BOTTOMLESS) return INVALID_OPERATION;
-
- // TODO: Set up notifications from HAL, instead of sleeping here
- uint32_t totalTime = 0;
- while (mHal2Device->ops->get_in_progress_count(mHal2Device) > 0) {
- usleep(kSleepTime);
- totalTime += kSleepTime;
- if (totalTime > kMaxSleepTime) {
- ALOGE("%s: Waited %d us, %d requests still in flight", __FUNCTION__,
- totalTime, mHal2Device->ops->get_in_progress_count(mHal2Device));
- return TIMED_OUT;
- }
- }
- ALOGV("%s: Camera %d: HAL is idle", __FUNCTION__, mId);
- return OK;
-}
-
-status_t Camera2Device::setNotifyCallback(NotificationListener *listener) {
- ATRACE_CALL();
- status_t res;
- res = mHal2Device->ops->set_notify_callback(mHal2Device, notificationCallback,
- reinterpret_cast<void*>(listener) );
- if (res != OK) {
- ALOGE("%s: Unable to set notification callback!", __FUNCTION__);
- }
- return res;
-}
-
-bool Camera2Device::willNotify3A() {
- return true;
-}
-
-void Camera2Device::notificationCallback(int32_t msg_type,
- int32_t ext1,
- int32_t ext2,
- int32_t ext3,
- void *user) {
- ATRACE_CALL();
- NotificationListener *listener = reinterpret_cast<NotificationListener*>(user);
- ALOGV("%s: Notification %d, arguments %d, %d, %d", __FUNCTION__, msg_type,
- ext1, ext2, ext3);
- if (listener != NULL) {
- switch (msg_type) {
- case CAMERA2_MSG_ERROR:
- // TODO: This needs to be fixed. ext2 and ext3 need to be considered.
- listener->notifyError(
- ((ext1 == CAMERA2_MSG_ERROR_DEVICE)
- || (ext1 == CAMERA2_MSG_ERROR_HARDWARE)) ?
- ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE :
- ICameraDeviceCallbacks::ERROR_CAMERA_SERVICE,
- CaptureResultExtras());
- break;
- case CAMERA2_MSG_SHUTTER: {
- // TODO: Only needed for camera2 API, which is unsupported
- // by HAL2 directly.
- // nsecs_t timestamp = (nsecs_t)ext2 | ((nsecs_t)(ext3) << 32 );
- // listener->notifyShutter(requestId, timestamp);
- break;
- }
- case CAMERA2_MSG_AUTOFOCUS:
- listener->notifyAutoFocus(ext1, ext2);
- break;
- case CAMERA2_MSG_AUTOEXPOSURE:
- listener->notifyAutoExposure(ext1, ext2);
- break;
- case CAMERA2_MSG_AUTOWB:
- listener->notifyAutoWhitebalance(ext1, ext2);
- break;
- default:
- ALOGE("%s: Unknown notification %d (arguments %d, %d, %d)!",
- __FUNCTION__, msg_type, ext1, ext2, ext3);
- }
- }
-}
-
-status_t Camera2Device::waitForNextFrame(nsecs_t timeout) {
- return mFrameQueue.waitForBuffer(timeout);
-}
-
-status_t Camera2Device::getNextResult(CaptureResult *result) {
- ATRACE_CALL();
- ALOGV("%s: get CaptureResult", __FUNCTION__);
- if (result == NULL) {
- ALOGE("%s: result pointer is NULL", __FUNCTION__);
- return BAD_VALUE;
- }
- status_t res;
- camera_metadata_t *rawFrame;
- res = mFrameQueue.dequeue(&rawFrame);
- if (rawFrame == NULL) {
- return NOT_ENOUGH_DATA;
- } else if (res == OK) {
- result->mMetadata.acquire(rawFrame);
- }
-
- return res;
-}
-
-status_t Camera2Device::triggerAutofocus(uint32_t id) {
- ATRACE_CALL();
- status_t res;
- ALOGV("%s: Triggering autofocus, id %d", __FUNCTION__, id);
- res = mHal2Device->ops->trigger_action(mHal2Device,
- CAMERA2_TRIGGER_AUTOFOCUS, id, 0);
- if (res != OK) {
- ALOGE("%s: Error triggering autofocus (id %d)",
- __FUNCTION__, id);
- }
- return res;
-}
-
-status_t Camera2Device::triggerCancelAutofocus(uint32_t id) {
- ATRACE_CALL();
- status_t res;
- ALOGV("%s: Canceling autofocus, id %d", __FUNCTION__, id);
- res = mHal2Device->ops->trigger_action(mHal2Device,
- CAMERA2_TRIGGER_CANCEL_AUTOFOCUS, id, 0);
- if (res != OK) {
- ALOGE("%s: Error canceling autofocus (id %d)",
- __FUNCTION__, id);
- }
- return res;
-}
-
-status_t Camera2Device::triggerPrecaptureMetering(uint32_t id) {
- ATRACE_CALL();
- status_t res;
- ALOGV("%s: Triggering precapture metering, id %d", __FUNCTION__, id);
- res = mHal2Device->ops->trigger_action(mHal2Device,
- CAMERA2_TRIGGER_PRECAPTURE_METERING, id, 0);
- if (res != OK) {
- ALOGE("%s: Error triggering precapture metering (id %d)",
- __FUNCTION__, id);
- }
- return res;
-}
-
-status_t Camera2Device::pushReprocessBuffer(int reprocessStreamId,
- buffer_handle_t *buffer, wp<BufferReleasedListener> listener) {
- ATRACE_CALL();
- ALOGV("%s: E", __FUNCTION__);
- bool found = false;
- status_t res = OK;
- for (ReprocessStreamList::iterator streamI = mReprocessStreams.begin();
- streamI != mReprocessStreams.end(); streamI++) {
- if ((*streamI)->getId() == reprocessStreamId) {
- res = (*streamI)->pushIntoStream(buffer, listener);
- if (res != OK) {
- ALOGE("%s: Unable to push buffer to reprocess stream %d: %s (%d)",
- __FUNCTION__, reprocessStreamId, strerror(-res), res);
- return res;
- }
- found = true;
- break;
- }
- }
- if (!found) {
- ALOGE("%s: Camera %d: Unable to find reprocess stream %d",
- __FUNCTION__, mId, reprocessStreamId);
- res = BAD_VALUE;
- }
- return res;
-}
-
-status_t Camera2Device::flush(int64_t* /*lastFrameNumber*/) {
- ATRACE_CALL();
-
- mRequestQueue.clear();
- return waitUntilDrained();
-}
-
-status_t Camera2Device::prepare(int streamId) {
- ATRACE_CALL();
- ALOGE("%s: Camera %d: unimplemented", __FUNCTION__, mId);
- return NO_INIT;
-}
-
-status_t Camera2Device::tearDown(int streamId) {
- ATRACE_CALL();
- ALOGE("%s: Camera %d: unimplemented", __FUNCTION__, mId);
- return NO_INIT;
-}
-
-status_t Camera2Device::prepare(int maxCount, int streamId) {
- ATRACE_CALL();
- ALOGE("%s: Camera %d: unimplemented", __FUNCTION__, mId);
- return NO_INIT;
-}
-
-uint32_t Camera2Device::getDeviceVersion() {
- ATRACE_CALL();
- return mDeviceVersion;
-}
-
-/**
- * Camera2Device::MetadataQueue
- */
-
-Camera2Device::MetadataQueue::MetadataQueue():
- mHal2Device(NULL),
- mFrameCount(0),
- mLatestRequestId(0),
- mCount(0),
- mStreamSlotCount(0),
- mSignalConsumer(true)
-{
- ATRACE_CALL();
- camera2_request_queue_src_ops::dequeue_request = consumer_dequeue;
- camera2_request_queue_src_ops::request_count = consumer_buffer_count;
- camera2_request_queue_src_ops::free_request = consumer_free;
-
- camera2_frame_queue_dst_ops::dequeue_frame = producer_dequeue;
- camera2_frame_queue_dst_ops::cancel_frame = producer_cancel;
- camera2_frame_queue_dst_ops::enqueue_frame = producer_enqueue;
-}
-
-Camera2Device::MetadataQueue::~MetadataQueue() {
- ATRACE_CALL();
- clear();
-}
-
-// Connect to camera2 HAL as consumer (input requests/reprocessing)
-status_t Camera2Device::MetadataQueue::setConsumerDevice(camera2_device_t *d) {
- ATRACE_CALL();
- status_t res;
- res = d->ops->set_request_queue_src_ops(d,
- this);
- if (res != OK) return res;
- mHal2Device = d;
- return OK;
-}
-
-status_t Camera2Device::MetadataQueue::setProducerDevice(camera2_device_t *d) {
- ATRACE_CALL();
- status_t res;
- res = d->ops->set_frame_queue_dst_ops(d,
- this);
- return res;
-}
-
-// Real interfaces
-status_t Camera2Device::MetadataQueue::enqueue(camera_metadata_t *buf) {
- ATRACE_CALL();
- ALOGVV("%s: E", __FUNCTION__);
- Mutex::Autolock l(mMutex);
-
- mCount++;
- mEntries.push_back(buf);
-
- return signalConsumerLocked();
-}
-
-int Camera2Device::MetadataQueue::getBufferCount() {
- ATRACE_CALL();
- Mutex::Autolock l(mMutex);
- if (mStreamSlotCount > 0) {
- return CAMERA2_REQUEST_QUEUE_IS_BOTTOMLESS;
- }
- return mCount;
-}
-
-status_t Camera2Device::MetadataQueue::dequeue(camera_metadata_t **buf,
- bool incrementCount)
-{
- ATRACE_CALL();
- ALOGVV("%s: E", __FUNCTION__);
- status_t res;
- Mutex::Autolock l(mMutex);
-
- if (mCount == 0) {
- if (mStreamSlotCount == 0) {
- ALOGVV("%s: Empty", __FUNCTION__);
- *buf = NULL;
- mSignalConsumer = true;
- return OK;
- }
- ALOGVV("%s: Streaming %d frames to queue", __FUNCTION__,
- mStreamSlotCount);
-
- for (List<camera_metadata_t*>::iterator slotEntry = mStreamSlot.begin();
- slotEntry != mStreamSlot.end();
- slotEntry++ ) {
- size_t entries = get_camera_metadata_entry_count(*slotEntry);
- size_t dataBytes = get_camera_metadata_data_count(*slotEntry);
-
- camera_metadata_t *copy =
- allocate_camera_metadata(entries, dataBytes);
- append_camera_metadata(copy, *slotEntry);
- mEntries.push_back(copy);
- }
- mCount = mStreamSlotCount;
- }
- ALOGVV("MetadataQueue: deque (%d buffers)", mCount);
- camera_metadata_t *b = *(mEntries.begin());
- mEntries.erase(mEntries.begin());
-
- if (incrementCount) {
- ATRACE_INT("cam2_request", mFrameCount);
- camera_metadata_entry_t frameCount;
- res = find_camera_metadata_entry(b,
- ANDROID_REQUEST_FRAME_COUNT,
- &frameCount);
- if (res != OK) {
- ALOGE("%s: Unable to add frame count: %s (%d)",
- __FUNCTION__, strerror(-res), res);
- } else {
- *frameCount.data.i32 = mFrameCount;
- }
- mFrameCount++;
- }
-
- // Check for request ID, and if present, signal waiters.
- camera_metadata_entry_t requestId;
- res = find_camera_metadata_entry(b,
- ANDROID_REQUEST_ID,
- &requestId);
- if (res == OK) {
- mLatestRequestId = requestId.data.i32[0];
- mNewRequestId.signal();
- }
-
- *buf = b;
- mCount--;
-
- return OK;
-}
-
-status_t Camera2Device::MetadataQueue::waitForBuffer(nsecs_t timeout)
-{
- Mutex::Autolock l(mMutex);
- status_t res;
- while (mCount == 0) {
- res = notEmpty.waitRelative(mMutex,timeout);
- if (res != OK) return res;
- }
- return OK;
-}
-
-status_t Camera2Device::MetadataQueue::waitForDequeue(int32_t id,
- nsecs_t timeout) {
- Mutex::Autolock l(mMutex);
- status_t res;
- while (mLatestRequestId != id) {
- nsecs_t startTime = systemTime();
-
- res = mNewRequestId.waitRelative(mMutex, timeout);
- if (res != OK) return res;
-
- timeout -= (systemTime() - startTime);
- }
-
- return OK;
-}
-
-status_t Camera2Device::MetadataQueue::setStreamSlot(camera_metadata_t *buf)
-{
- ATRACE_CALL();
- ALOGV("%s: E", __FUNCTION__);
- Mutex::Autolock l(mMutex);
- if (buf == NULL) {
- freeBuffers(mStreamSlot.begin(), mStreamSlot.end());
- mStreamSlotCount = 0;
- return OK;
- }
-
- if (mStreamSlotCount > 1) {
- List<camera_metadata_t*>::iterator deleter = ++mStreamSlot.begin();
- freeBuffers(++mStreamSlot.begin(), mStreamSlot.end());
- mStreamSlotCount = 1;
- }
- if (mStreamSlotCount == 1) {
- free_camera_metadata( *(mStreamSlot.begin()) );
- *(mStreamSlot.begin()) = buf;
- } else {
- mStreamSlot.push_front(buf);
- mStreamSlotCount = 1;
- }
- return signalConsumerLocked();
-}
-
-status_t Camera2Device::MetadataQueue::setStreamSlot(
- const List<camera_metadata_t*> &bufs)
-{
- ATRACE_CALL();
- ALOGV("%s: E", __FUNCTION__);
- Mutex::Autolock l(mMutex);
-
- if (mStreamSlotCount > 0) {
- freeBuffers(mStreamSlot.begin(), mStreamSlot.end());
- }
- mStreamSlotCount = 0;
- for (List<camera_metadata_t*>::const_iterator r = bufs.begin();
- r != bufs.end(); r++) {
- mStreamSlot.push_back(*r);
- mStreamSlotCount++;
- }
- return signalConsumerLocked();
-}
-
-status_t Camera2Device::MetadataQueue::clear()
-{
- ATRACE_CALL();
- ALOGV("%s: E", __FUNCTION__);
-
- Mutex::Autolock l(mMutex);
-
- // Clear streaming slot
- freeBuffers(mStreamSlot.begin(), mStreamSlot.end());
- mStreamSlotCount = 0;
-
- // Clear request queue
- freeBuffers(mEntries.begin(), mEntries.end());
- mCount = 0;
- return OK;
-}
-
-status_t Camera2Device::MetadataQueue::dump(int fd,
- const Vector<String16>& /*args*/) {
- ATRACE_CALL();
- String8 result;
- status_t notLocked;
- notLocked = mMutex.tryLock();
- if (notLocked) {
- result.append(" (Unable to lock queue mutex)\n");
- }
- result.appendFormat(" Current frame number: %d\n", mFrameCount);
- if (mStreamSlotCount == 0) {
- result.append(" Stream slot: Empty\n");
- write(fd, result.string(), result.size());
- } else {
- result.appendFormat(" Stream slot: %zu entries\n",
- mStreamSlot.size());
- int i = 0;
- for (List<camera_metadata_t*>::iterator r = mStreamSlot.begin();
- r != mStreamSlot.end(); r++) {
- result = String8::format(" Stream slot buffer %d:\n", i);
- write(fd, result.string(), result.size());
- dump_indented_camera_metadata(*r, fd, 2, 10);
- i++;
- }
- }
- if (mEntries.size() == 0) {
- result = " Main queue is empty\n";
- write(fd, result.string(), result.size());
- } else {
- result = String8::format(" Main queue has %zu entries:\n",
- mEntries.size());
- int i = 0;
- for (List<camera_metadata_t*>::iterator r = mEntries.begin();
- r != mEntries.end(); r++) {
- result = String8::format(" Queue entry %d:\n", i);
- write(fd, result.string(), result.size());
- dump_indented_camera_metadata(*r, fd, 2, 10);
- i++;
- }
- }
-
- if (notLocked == 0) {
- mMutex.unlock();
- }
-
- return OK;
-}
-
-status_t Camera2Device::MetadataQueue::signalConsumerLocked() {
- ATRACE_CALL();
- status_t res = OK;
- notEmpty.signal();
- if (mSignalConsumer && mHal2Device != NULL) {
- mSignalConsumer = false;
-
- mMutex.unlock();
- ALOGV("%s: Signaling consumer", __FUNCTION__);
- res = mHal2Device->ops->notify_request_queue_not_empty(mHal2Device);
- mMutex.lock();
- }
- return res;
-}
-
-status_t Camera2Device::MetadataQueue::freeBuffers(
- List<camera_metadata_t*>::iterator start,
- List<camera_metadata_t*>::iterator end)
-{
- ATRACE_CALL();
- while (start != end) {
- free_camera_metadata(*start);
- start = mStreamSlot.erase(start);
- }
- return OK;
-}
-
-Camera2Device::MetadataQueue* Camera2Device::MetadataQueue::getInstance(
- const camera2_request_queue_src_ops_t *q)
-{
- const MetadataQueue* cmq = static_cast<const MetadataQueue*>(q);
- return const_cast<MetadataQueue*>(cmq);
-}
-
-Camera2Device::MetadataQueue* Camera2Device::MetadataQueue::getInstance(
- const camera2_frame_queue_dst_ops_t *q)
-{
- const MetadataQueue* cmq = static_cast<const MetadataQueue*>(q);
- return const_cast<MetadataQueue*>(cmq);
-}
-
-int Camera2Device::MetadataQueue::consumer_buffer_count(
- const camera2_request_queue_src_ops_t *q)
-{
- MetadataQueue *queue = getInstance(q);
- return queue->getBufferCount();
-}
-
-int Camera2Device::MetadataQueue::consumer_dequeue(
- const camera2_request_queue_src_ops_t *q,
- camera_metadata_t **buffer)
-{
- MetadataQueue *queue = getInstance(q);
- return queue->dequeue(buffer, true);
-}
-
-int Camera2Device::MetadataQueue::consumer_free(
- const camera2_request_queue_src_ops_t *q,
- camera_metadata_t *old_buffer)
-{
- ATRACE_CALL();
- MetadataQueue *queue = getInstance(q);
- (void)queue;
- free_camera_metadata(old_buffer);
- return OK;
-}
-
-int Camera2Device::MetadataQueue::producer_dequeue(
- const camera2_frame_queue_dst_ops_t * /*q*/,
- size_t entries, size_t bytes,
- camera_metadata_t **buffer)
-{
- ATRACE_CALL();
- camera_metadata_t *new_buffer =
- allocate_camera_metadata(entries, bytes);
- if (new_buffer == NULL) return NO_MEMORY;
- *buffer = new_buffer;
- return OK;
-}
-
-int Camera2Device::MetadataQueue::producer_cancel(
- const camera2_frame_queue_dst_ops_t * /*q*/,
- camera_metadata_t *old_buffer)
-{
- ATRACE_CALL();
- free_camera_metadata(old_buffer);
- return OK;
-}
-
-int Camera2Device::MetadataQueue::producer_enqueue(
- const camera2_frame_queue_dst_ops_t *q,
- camera_metadata_t *filled_buffer)
-{
- MetadataQueue *queue = getInstance(q);
- return queue->enqueue(filled_buffer);
-}
-
-/**
- * Camera2Device::StreamAdapter
- */
-
-#ifndef container_of
-#define container_of(ptr, type, member) \
- (type *)((char*)(ptr) - offsetof(type, member))
-#endif
-
-Camera2Device::StreamAdapter::StreamAdapter(camera2_device_t *d):
- mState(RELEASED),
- mHal2Device(d),
- mId(-1),
- mWidth(0), mHeight(0), mFormat(0), mSize(0), mUsage(0),
- mMaxProducerBuffers(0), mMaxConsumerBuffers(0),
- mTotalBuffers(0),
- mFormatRequested(0),
- mActiveBuffers(0),
- mFrameCount(0),
- mLastTimestamp(0)
-{
- camera2_stream_ops::dequeue_buffer = dequeue_buffer;
- camera2_stream_ops::enqueue_buffer = enqueue_buffer;
- camera2_stream_ops::cancel_buffer = cancel_buffer;
- camera2_stream_ops::set_crop = set_crop;
-}
-
-Camera2Device::StreamAdapter::~StreamAdapter() {
- ATRACE_CALL();
- if (mState != RELEASED) {
- release();
- }
-}
-
-status_t Camera2Device::StreamAdapter::connectToDevice(
- sp<ANativeWindow> consumer,
- uint32_t width, uint32_t height, int format, size_t size) {
- ATRACE_CALL();
- status_t res;
- ALOGV("%s: E", __FUNCTION__);
-
- if (mState != RELEASED) return INVALID_OPERATION;
- if (consumer == NULL) {
- ALOGE("%s: Null consumer passed to stream adapter", __FUNCTION__);
- return BAD_VALUE;
- }
-
- ALOGV("%s: New stream parameters %d x %d, format 0x%x, size %zu",
- __FUNCTION__, width, height, format, size);
-
- mConsumerInterface = consumer;
- mWidth = width;
- mHeight = height;
- mSize = (format == HAL_PIXEL_FORMAT_BLOB) ? size : 0;
- mFormatRequested = format;
-
- // Allocate device-side stream interface
-
- uint32_t id;
- uint32_t formatActual;
- uint32_t usage;
- uint32_t maxBuffers = 2;
- res = mHal2Device->ops->allocate_stream(mHal2Device,
- mWidth, mHeight, mFormatRequested, getStreamOps(),
- &id, &formatActual, &usage, &maxBuffers);
- if (res != OK) {
- ALOGE("%s: Device stream allocation failed: %s (%d)",
- __FUNCTION__, strerror(-res), res);
- return res;
- }
-
- ALOGV("%s: Allocated stream id %d, actual format 0x%x, "
- "usage 0x%x, producer wants %d buffers", __FUNCTION__,
- id, formatActual, usage, maxBuffers);
-
- mId = id;
- mFormat = formatActual;
- mUsage = usage;
- mMaxProducerBuffers = maxBuffers;
-
- mState = ALLOCATED;
-
- // Configure consumer-side ANativeWindow interface
- res = native_window_api_connect(mConsumerInterface.get(),
- NATIVE_WINDOW_API_CAMERA);
- if (res != OK) {
- ALOGE("%s: Unable to connect to native window for stream %d",
- __FUNCTION__, mId);
-
- return res;
- }
-
- mState = CONNECTED;
-
- res = native_window_set_usage(mConsumerInterface.get(), mUsage);
- if (res != OK) {
- ALOGE("%s: Unable to configure usage %08x for stream %d",
- __FUNCTION__, mUsage, mId);
- return res;
- }
-
- res = native_window_set_scaling_mode(mConsumerInterface.get(),
- NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
- if (res != OK) {
- ALOGE("%s: Unable to configure stream scaling: %s (%d)",
- __FUNCTION__, strerror(-res), res);
- return res;
- }
-
- res = setTransform(0);
- if (res != OK) {
- return res;
- }
-
- if (mFormat == HAL_PIXEL_FORMAT_BLOB) {
- res = native_window_set_buffers_dimensions(mConsumerInterface.get(),
- mSize, 1);
- if (res != OK) {
- ALOGE("%s: Unable to configure compressed stream buffer dimensions"
- " %d x %d, size %zu for stream %d",
- __FUNCTION__, mWidth, mHeight, mSize, mId);
- return res;
- }
- } else {
- res = native_window_set_buffers_dimensions(mConsumerInterface.get(),
- mWidth, mHeight);
- if (res != OK) {
- ALOGE("%s: Unable to configure stream buffer dimensions"
- " %d x %d for stream %d",
- __FUNCTION__, mWidth, mHeight, mId);
- return res;
- }
- }
-
- res = native_window_set_buffers_format(mConsumerInterface.get(), mFormat);
- if (res != OK) {
- ALOGE("%s: Unable to configure stream buffer format"
- " %#x for stream %d",
- __FUNCTION__, mFormat, mId);
- return res;
- }
-
- int maxConsumerBuffers;
- res = mConsumerInterface->query(mConsumerInterface.get(),
- NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxConsumerBuffers);
- if (res != OK) {
- ALOGE("%s: Unable to query consumer undequeued"
- " buffer count for stream %d", __FUNCTION__, mId);
- return res;
- }
- mMaxConsumerBuffers = maxConsumerBuffers;
-
- ALOGV("%s: Consumer wants %d buffers", __FUNCTION__,
- mMaxConsumerBuffers);
-
- mTotalBuffers = mMaxConsumerBuffers + mMaxProducerBuffers;
- mActiveBuffers = 0;
- mFrameCount = 0;
- mLastTimestamp = 0;
-
- res = native_window_set_buffer_count(mConsumerInterface.get(),
- mTotalBuffers);
- if (res != OK) {
- ALOGE("%s: Unable to set buffer count for stream %d",
- __FUNCTION__, mId);
- return res;
- }
-
- // Register allocated buffers with HAL device
- buffer_handle_t *buffers = new buffer_handle_t[mTotalBuffers];
- ANativeWindowBuffer **anwBuffers = new ANativeWindowBuffer*[mTotalBuffers];
- uint32_t bufferIdx = 0;
- for (; bufferIdx < mTotalBuffers; bufferIdx++) {
- res = native_window_dequeue_buffer_and_wait(mConsumerInterface.get(),
- &anwBuffers[bufferIdx]);
- if (res != OK) {
- ALOGE("%s: Unable to dequeue buffer %d for initial registration for "
- "stream %d", __FUNCTION__, bufferIdx, mId);
- goto cleanUpBuffers;
- }
-
- buffers[bufferIdx] = anwBuffers[bufferIdx]->handle;
- ALOGV("%s: Buffer %p allocated", __FUNCTION__, (void*)buffers[bufferIdx]);
- }
-
- ALOGV("%s: Registering %d buffers with camera HAL", __FUNCTION__, mTotalBuffers);
- res = mHal2Device->ops->register_stream_buffers(mHal2Device,
- mId,
- mTotalBuffers,
- buffers);
- if (res != OK) {
- ALOGE("%s: Unable to register buffers with HAL device for stream %d",
- __FUNCTION__, mId);
- } else {
- mState = ACTIVE;
- }
-
-cleanUpBuffers:
- ALOGV("%s: Cleaning up %d buffers", __FUNCTION__, bufferIdx);
- for (uint32_t i = 0; i < bufferIdx; i++) {
- res = mConsumerInterface->cancelBuffer(mConsumerInterface.get(),
- anwBuffers[i], -1);
- if (res != OK) {
- ALOGE("%s: Unable to cancel buffer %d after registration",
- __FUNCTION__, i);
- }
- }
- delete[] anwBuffers;
- delete[] buffers;
-
- return res;
-}
-
-status_t Camera2Device::StreamAdapter::release() {
- ATRACE_CALL();
- status_t res;
- ALOGV("%s: Releasing stream %d (%d x %d, format %d)", __FUNCTION__, mId,
- mWidth, mHeight, mFormat);
- if (mState >= ALLOCATED) {
- res = mHal2Device->ops->release_stream(mHal2Device, mId);
- if (res != OK) {
- ALOGE("%s: Unable to release stream %d",
- __FUNCTION__, mId);
- return res;
- }
- }
- if (mState >= CONNECTED) {
- res = native_window_api_disconnect(mConsumerInterface.get(),
- NATIVE_WINDOW_API_CAMERA);
-
- /* this is not an error. if client calling process dies,
- the window will also die and all calls to it will return
- DEAD_OBJECT, thus it's already "disconnected" */
- if (res == DEAD_OBJECT) {
- ALOGW("%s: While disconnecting stream %d from native window, the"
- " native window died from under us", __FUNCTION__, mId);
- }
- else if (res != OK) {
- ALOGE("%s: Unable to disconnect stream %d from native window (error %d %s)",
- __FUNCTION__, mId, res, strerror(-res));
- return res;
- }
- }
- mId = -1;
- mState = RELEASED;
- return OK;
-}
-
-status_t Camera2Device::StreamAdapter::setTransform(int transform) {
- ATRACE_CALL();
- status_t res;
- if (mState < CONNECTED) {
- ALOGE("%s: Cannot set transform on unconnected stream", __FUNCTION__);
- return INVALID_OPERATION;
- }
- res = native_window_set_buffers_transform(mConsumerInterface.get(),
- transform);
- if (res != OK) {
- ALOGE("%s: Unable to configure stream transform to %x: %s (%d)",
- __FUNCTION__, transform, strerror(-res), res);
- }
- return res;
-}
-
-status_t Camera2Device::StreamAdapter::dump(int fd,
- const Vector<String16>& /*args*/) {
- ATRACE_CALL();
- String8 result = String8::format(" Stream %d: %d x %d, format 0x%x\n",
- mId, mWidth, mHeight, mFormat);
- result.appendFormat(" size %zu, usage 0x%x, requested format 0x%x\n",
- mSize, mUsage, mFormatRequested);
- result.appendFormat(" total buffers: %d, dequeued buffers: %d\n",
- mTotalBuffers, mActiveBuffers);
- result.appendFormat(" frame count: %d, last timestamp %" PRId64 "\n",
- mFrameCount, mLastTimestamp);
- write(fd, result.string(), result.size());
- return OK;
-}
-
-const camera2_stream_ops *Camera2Device::StreamAdapter::getStreamOps() {
- return static_cast<camera2_stream_ops *>(this);
-}
-
-ANativeWindow* Camera2Device::StreamAdapter::toANW(
- const camera2_stream_ops_t *w) {
- return static_cast<const StreamAdapter*>(w)->mConsumerInterface.get();
-}
-
-int Camera2Device::StreamAdapter::dequeue_buffer(const camera2_stream_ops_t *w,
- buffer_handle_t** buffer) {
- ATRACE_CALL();
- int res;
- StreamAdapter* stream =
- const_cast<StreamAdapter*>(static_cast<const StreamAdapter*>(w));
- if (stream->mState != ACTIVE) {
- ALOGE("%s: Called when in bad state: %d", __FUNCTION__, stream->mState);
- return INVALID_OPERATION;
- }
-
- ANativeWindow *a = toANW(w);
- ANativeWindowBuffer* anb;
- res = native_window_dequeue_buffer_and_wait(a, &anb);
- if (res != OK) {
- ALOGE("Stream %d dequeue: Error from native_window: %s (%d)", stream->mId,
- strerror(-res), res);
- return res;
- }
-
- *buffer = &(anb->handle);
- stream->mActiveBuffers++;
-
- ALOGVV("Stream %d dequeue: Buffer %p dequeued", stream->mId, (void*)(**buffer));
- return res;
-}
-
-int Camera2Device::StreamAdapter::enqueue_buffer(const camera2_stream_ops_t* w,
- int64_t timestamp,
- buffer_handle_t* buffer) {
- ATRACE_CALL();
- StreamAdapter *stream =
- const_cast<StreamAdapter*>(static_cast<const StreamAdapter*>(w));
- stream->mFrameCount++;
- ALOGVV("Stream %d enqueue: Frame %d (%p) captured at %lld ns",
- stream->mId, stream->mFrameCount, (void*)(*buffer), timestamp);
- int state = stream->mState;
- if (state != ACTIVE) {
- ALOGE("%s: Called when in bad state: %d", __FUNCTION__, state);
- return INVALID_OPERATION;
- }
- ANativeWindow *a = toANW(w);
- status_t err;
-
- err = native_window_set_buffers_timestamp(a, timestamp);
- if (err != OK) {
- ALOGE("%s: Error setting timestamp on native window: %s (%d)",
- __FUNCTION__, strerror(-err), err);
- return err;
- }
- err = a->queueBuffer(a,
- container_of(buffer, ANativeWindowBuffer, handle), -1);
- if (err != OK) {
- ALOGE("%s: Error queueing buffer to native window: %s (%d)",
- __FUNCTION__, strerror(-err), err);
- return err;
- }
-
- stream->mActiveBuffers--;
- stream->mLastTimestamp = timestamp;
- return OK;
-}
-
-int Camera2Device::StreamAdapter::cancel_buffer(const camera2_stream_ops_t* w,
- buffer_handle_t* buffer) {
- ATRACE_CALL();
- StreamAdapter *stream =
- const_cast<StreamAdapter*>(static_cast<const StreamAdapter*>(w));
- ALOGVV("Stream %d cancel: Buffer %p",
- stream->mId, (void*)(*buffer));
- if (stream->mState != ACTIVE) {
- ALOGE("%s: Called when in bad state: %d", __FUNCTION__, stream->mState);
- return INVALID_OPERATION;
- }
-
- ANativeWindow *a = toANW(w);
- int err = a->cancelBuffer(a,
- container_of(buffer, ANativeWindowBuffer, handle), -1);
- if (err != OK) {
- ALOGE("%s: Error canceling buffer to native window: %s (%d)",
- __FUNCTION__, strerror(-err), err);
- return err;
- }
-
- stream->mActiveBuffers--;
- return OK;
-}
-
-int Camera2Device::StreamAdapter::set_crop(const camera2_stream_ops_t* w,
- int left, int top, int right, int bottom) {
- ATRACE_CALL();
- int state = static_cast<const StreamAdapter*>(w)->mState;
- if (state != ACTIVE) {
- ALOGE("%s: Called when in bad state: %d", __FUNCTION__, state);
- return INVALID_OPERATION;
- }
- ANativeWindow *a = toANW(w);
- android_native_rect_t crop = { left, top, right, bottom };
- return native_window_set_crop(a, &crop);
-}
-
-/**
- * Camera2Device::ReprocessStreamAdapter
- */
-
-#ifndef container_of
-#define container_of(ptr, type, member) \
- (type *)((char*)(ptr) - offsetof(type, member))
-#endif
-
-Camera2Device::ReprocessStreamAdapter::ReprocessStreamAdapter(camera2_device_t *d):
- mState(RELEASED),
- mHal2Device(d),
- mId(-1),
- mWidth(0), mHeight(0), mFormat(0),
- mActiveBuffers(0),
- mFrameCount(0)
-{
- ATRACE_CALL();
- camera2_stream_in_ops::acquire_buffer = acquire_buffer;
- camera2_stream_in_ops::release_buffer = release_buffer;
-}
-
-Camera2Device::ReprocessStreamAdapter::~ReprocessStreamAdapter() {
- ATRACE_CALL();
- if (mState != RELEASED) {
- release();
- }
-}
-
-status_t Camera2Device::ReprocessStreamAdapter::connectToDevice(
- const sp<StreamAdapter> &outputStream) {
- ATRACE_CALL();
- status_t res;
- ALOGV("%s: E", __FUNCTION__);
-
- if (mState != RELEASED) return INVALID_OPERATION;
- if (outputStream == NULL) {
- ALOGE("%s: Null base stream passed to reprocess stream adapter",
- __FUNCTION__);
- return BAD_VALUE;
- }
-
- mBaseStream = outputStream;
- mWidth = outputStream->getWidth();
- mHeight = outputStream->getHeight();
- mFormat = outputStream->getFormat();
-
- ALOGV("%s: New reprocess stream parameters %d x %d, format 0x%x",
- __FUNCTION__, mWidth, mHeight, mFormat);
-
- // Allocate device-side stream interface
-
- uint32_t id;
- res = mHal2Device->ops->allocate_reprocess_stream_from_stream(mHal2Device,
- outputStream->getId(), getStreamOps(),
- &id);
- if (res != OK) {
- ALOGE("%s: Device reprocess stream allocation failed: %s (%d)",
- __FUNCTION__, strerror(-res), res);
- return res;
- }
-
- ALOGV("%s: Allocated reprocess stream id %d based on stream %d",
- __FUNCTION__, id, outputStream->getId());
-
- mId = id;
-
- mState = ACTIVE;
-
- return OK;
-}
-
-status_t Camera2Device::ReprocessStreamAdapter::release() {
- ATRACE_CALL();
- status_t res;
- ALOGV("%s: Releasing stream %d", __FUNCTION__, mId);
- if (mState >= ACTIVE) {
- res = mHal2Device->ops->release_reprocess_stream(mHal2Device, mId);
- if (res != OK) {
- ALOGE("%s: Unable to release stream %d",
- __FUNCTION__, mId);
- return res;
- }
- }
-
- List<QueueEntry>::iterator s;
- for (s = mQueue.begin(); s != mQueue.end(); s++) {
- sp<BufferReleasedListener> listener = s->releaseListener.promote();
- if (listener != 0) listener->onBufferReleased(s->handle);
- }
- for (s = mInFlightQueue.begin(); s != mInFlightQueue.end(); s++) {
- sp<BufferReleasedListener> listener = s->releaseListener.promote();
- if (listener != 0) listener->onBufferReleased(s->handle);
- }
- mQueue.clear();
- mInFlightQueue.clear();
-
- mState = RELEASED;
- return OK;
-}
-
-status_t Camera2Device::ReprocessStreamAdapter::pushIntoStream(
- buffer_handle_t *handle, const wp<BufferReleasedListener> &releaseListener) {
- ATRACE_CALL();
- // TODO: Some error checking here would be nice
- ALOGV("%s: Pushing buffer %p to stream", __FUNCTION__, (void*)(*handle));
-
- QueueEntry entry;
- entry.handle = handle;
- entry.releaseListener = releaseListener;
- mQueue.push_back(entry);
- return OK;
-}
-
-status_t Camera2Device::ReprocessStreamAdapter::dump(int fd,
- const Vector<String16>& /*args*/) {
- ATRACE_CALL();
- String8 result =
- String8::format(" Reprocess stream %d: %d x %d, fmt 0x%x\n",
- mId, mWidth, mHeight, mFormat);
- result.appendFormat(" acquired buffers: %d\n",
- mActiveBuffers);
- result.appendFormat(" frame count: %d\n",
- mFrameCount);
- write(fd, result.string(), result.size());
- return OK;
-}
-
-const camera2_stream_in_ops *Camera2Device::ReprocessStreamAdapter::getStreamOps() {
- return static_cast<camera2_stream_in_ops *>(this);
-}
-
-int Camera2Device::ReprocessStreamAdapter::acquire_buffer(
- const camera2_stream_in_ops_t *w,
- buffer_handle_t** buffer) {
- ATRACE_CALL();
-
- ReprocessStreamAdapter* stream =
- const_cast<ReprocessStreamAdapter*>(
- static_cast<const ReprocessStreamAdapter*>(w));
- if (stream->mState != ACTIVE) {
- ALOGE("%s: Called when in bad state: %d", __FUNCTION__, stream->mState);
- return INVALID_OPERATION;
- }
-
- if (stream->mQueue.empty()) {
- *buffer = NULL;
- return OK;
- }
-
- QueueEntry &entry = *(stream->mQueue.begin());
-
- *buffer = entry.handle;
-
- stream->mInFlightQueue.push_back(entry);
- stream->mQueue.erase(stream->mQueue.begin());
-
- stream->mActiveBuffers++;
-
- ALOGV("Stream %d acquire: Buffer %p acquired", stream->mId,
- (void*)(**buffer));
- return OK;
-}
-
-int Camera2Device::ReprocessStreamAdapter::release_buffer(
- const camera2_stream_in_ops_t* w,
- buffer_handle_t* buffer) {
- ATRACE_CALL();
- ReprocessStreamAdapter *stream =
- const_cast<ReprocessStreamAdapter*>(
- static_cast<const ReprocessStreamAdapter*>(w) );
- stream->mFrameCount++;
- ALOGV("Reprocess stream %d release: Frame %d (%p)",
- stream->mId, stream->mFrameCount, (void*)*buffer);
- int state = stream->mState;
- if (state != ACTIVE) {
- ALOGE("%s: Called when in bad state: %d", __FUNCTION__, state);
- return INVALID_OPERATION;
- }
- stream->mActiveBuffers--;
-
- List<QueueEntry>::iterator s;
- for (s = stream->mInFlightQueue.begin(); s != stream->mInFlightQueue.end(); s++) {
- if ( s->handle == buffer ) break;
- }
- if (s == stream->mInFlightQueue.end()) {
- ALOGE("%s: Can't find buffer %p in in-flight list!", __FUNCTION__,
- buffer);
- return INVALID_OPERATION;
- }
-
- sp<BufferReleasedListener> listener = s->releaseListener.promote();
- if (listener != 0) {
- listener->onBufferReleased(s->handle);
- } else {
- ALOGE("%s: Can't free buffer - missing listener", __FUNCTION__);
- }
- stream->mInFlightQueue.erase(s);
-
- return OK;
-}
-
-// camera 2 devices don't support reprocessing
-status_t Camera2Device::createInputStream(
- uint32_t width, uint32_t height, int format, int *id) {
- ALOGE("%s: camera 2 devices don't support reprocessing", __FUNCTION__);
- return INVALID_OPERATION;
-}
-
-// camera 2 devices don't support reprocessing
-status_t Camera2Device::getInputBufferProducer(
- sp<IGraphicBufferProducer> *producer) {
- ALOGE("%s: camera 2 devices don't support reprocessing", __FUNCTION__);
- return INVALID_OPERATION;
-}
-
-}; // namespace android
diff --git a/services/camera/libcameraservice/device2/Camera2Device.h b/services/camera/libcameraservice/device2/Camera2Device.h
deleted file mode 100644
index b4d343c..0000000
--- a/services/camera/libcameraservice/device2/Camera2Device.h
+++ /dev/null
@@ -1,375 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_SERVERS_CAMERA_CAMERA2DEVICE_H
-#define ANDROID_SERVERS_CAMERA_CAMERA2DEVICE_H
-
-#include <utils/Condition.h>
-#include <utils/Errors.h>
-#include <utils/List.h>
-#include <utils/Mutex.h>
-
-#include "common/CameraDeviceBase.h"
-
-namespace android {
-
-/**
- * CameraDevice for HAL devices with version CAMERA_DEVICE_API_VERSION_2_0
- *
- * TODO for camera2 API implementation:
- * Does not produce notifyShutter / notifyIdle callbacks to NotificationListener
- * Use waitUntilDrained for idle.
- */
-class Camera2Device: public CameraDeviceBase {
- public:
- Camera2Device(int id);
-
- virtual ~Camera2Device();
-
- /**
- * CameraDevice interface
- */
- virtual int getId() const;
- virtual status_t initialize(CameraModule *module);
- virtual status_t disconnect();
- virtual status_t dump(int fd, const Vector<String16>& args);
- virtual const CameraMetadata& info() const;
- virtual status_t capture(CameraMetadata &request, int64_t *lastFrameNumber = NULL);
- virtual status_t captureList(const List<const CameraMetadata> &requests,
- int64_t *lastFrameNumber = NULL);
- virtual status_t setStreamingRequest(const CameraMetadata &request,
- int64_t *lastFrameNumber = NULL);
- virtual status_t setStreamingRequestList(const List<const CameraMetadata> &requests,
- int64_t *lastFrameNumber = NULL);
- virtual status_t clearStreamingRequest(int64_t *lastFrameNumber = NULL);
- virtual status_t waitUntilRequestReceived(int32_t requestId, nsecs_t timeout);
- virtual status_t createStream(sp<Surface> consumer,
- uint32_t width, uint32_t height, int format,
- android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id);
- virtual status_t createInputStream(
- uint32_t width, uint32_t height, int format, int *id);
- virtual status_t createReprocessStreamFromStream(int outputId, int *id);
- virtual status_t getStreamInfo(int id,
- uint32_t *width, uint32_t *height,
- uint32_t *format, android_dataspace *dataSpace);
- virtual status_t setStreamTransform(int id, int transform);
- virtual status_t deleteStream(int id);
- virtual status_t deleteReprocessStream(int id);
- // No-op on HAL2 devices
- virtual status_t configureStreams(bool isConstrainedHighSpeed = false);
- virtual status_t getInputBufferProducer(
- sp<IGraphicBufferProducer> *producer);
- virtual status_t createDefaultRequest(int templateId, CameraMetadata *request);
- virtual status_t waitUntilDrained();
- virtual status_t setNotifyCallback(NotificationListener *listener);
- virtual bool willNotify3A();
- virtual status_t waitForNextFrame(nsecs_t timeout);
- virtual status_t getNextResult(CaptureResult *frame);
- virtual status_t triggerAutofocus(uint32_t id);
- virtual status_t triggerCancelAutofocus(uint32_t id);
- virtual status_t triggerPrecaptureMetering(uint32_t id);
- virtual status_t pushReprocessBuffer(int reprocessStreamId,
- buffer_handle_t *buffer, wp<BufferReleasedListener> listener);
- // Flush implemented as just a wait
- virtual status_t flush(int64_t *lastFrameNumber = NULL);
- // Prepare and tearDown are no-ops
- virtual status_t prepare(int streamId);
- virtual status_t tearDown(int streamId);
- virtual status_t prepare(int maxCount, int streamId);
-
- virtual uint32_t getDeviceVersion();
- virtual ssize_t getJpegBufferSize(uint32_t width, uint32_t height) const;
-
- private:
- const int mId;
- camera2_device_t *mHal2Device;
-
- CameraMetadata mDeviceInfo;
-
- uint32_t mDeviceVersion;
-
- /**
- * Queue class for both sending requests to a camera2 device, and for
- * receiving frames from a camera2 device.
- */
- class MetadataQueue: public camera2_request_queue_src_ops_t,
- public camera2_frame_queue_dst_ops_t {
- public:
- MetadataQueue();
- ~MetadataQueue();
-
- // Interface to camera2 HAL device, either for requests (device is
- // consumer) or for frames (device is producer)
- const camera2_request_queue_src_ops_t* getToConsumerInterface();
- void setFromConsumerInterface(camera2_device_t *d);
-
- // Connect queue consumer endpoint to a camera2 device
- status_t setConsumerDevice(camera2_device_t *d);
- // Connect queue producer endpoint to a camera2 device
- status_t setProducerDevice(camera2_device_t *d);
-
- const camera2_frame_queue_dst_ops_t* getToProducerInterface();
-
- // Real interfaces. On enqueue, queue takes ownership of buffer pointer
- // On dequeue, user takes ownership of buffer pointer.
- status_t enqueue(camera_metadata_t *buf);
- status_t dequeue(camera_metadata_t **buf, bool incrementCount = false);
- int getBufferCount();
- status_t waitForBuffer(nsecs_t timeout);
- // Wait until a buffer with the given ID is dequeued. Will return
- // immediately if the latest buffer dequeued has that ID.
- status_t waitForDequeue(int32_t id, nsecs_t timeout);
-
- // Set repeating buffer(s); if the queue is empty on a dequeue call, the
- // queue copies the contents of the stream slot into the queue, and then
- // dequeues the first new entry. The methods take the ownership of the
- // metadata buffers passed in.
- status_t setStreamSlot(camera_metadata_t *buf);
- status_t setStreamSlot(const List<camera_metadata_t*> &bufs);
-
- // Clear the request queue and the streaming slot
- status_t clear();
-
- status_t dump(int fd, const Vector<String16>& args);
-
- private:
- status_t signalConsumerLocked();
- status_t freeBuffers(List<camera_metadata_t*>::iterator start,
- List<camera_metadata_t*>::iterator end);
-
- camera2_device_t *mHal2Device;
-
- Mutex mMutex;
- Condition notEmpty;
-
- int mFrameCount;
- int32_t mLatestRequestId;
- Condition mNewRequestId;
-
- int mCount;
- List<camera_metadata_t*> mEntries;
- int mStreamSlotCount;
- List<camera_metadata_t*> mStreamSlot;
-
- bool mSignalConsumer;
-
- static MetadataQueue* getInstance(
- const camera2_frame_queue_dst_ops_t *q);
- static MetadataQueue* getInstance(
- const camera2_request_queue_src_ops_t *q);
-
- static int consumer_buffer_count(
- const camera2_request_queue_src_ops_t *q);
-
- static int consumer_dequeue(const camera2_request_queue_src_ops_t *q,
- camera_metadata_t **buffer);
-
- static int consumer_free(const camera2_request_queue_src_ops_t *q,
- camera_metadata_t *old_buffer);
-
- static int producer_dequeue(const camera2_frame_queue_dst_ops_t *q,
- size_t entries, size_t bytes,
- camera_metadata_t **buffer);
-
- static int producer_cancel(const camera2_frame_queue_dst_ops_t *q,
- camera_metadata_t *old_buffer);
-
- static int producer_enqueue(const camera2_frame_queue_dst_ops_t *q,
- camera_metadata_t *filled_buffer);
-
- }; // class MetadataQueue
-
- MetadataQueue mRequestQueue;
- MetadataQueue mFrameQueue;
-
- /**
- * Adapter from an ANativeWindow interface to camera2 device stream ops.
- * Also takes care of allocating/deallocating stream in device interface
- */
- class StreamAdapter: public camera2_stream_ops, public virtual RefBase {
- public:
- StreamAdapter(camera2_device_t *d);
-
- ~StreamAdapter();
-
- /**
- * Create a HAL device stream of the requested size and format.
- *
- * If format is CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, then the HAL device
- * selects an appropriate format; it can be queried with getFormat.
- *
- * If format is HAL_PIXEL_FORMAT_COMPRESSED, the size parameter must
- * be equal to the size in bytes of the buffers to allocate for the
- * stream. For other formats, the size parameter is ignored.
- */
- status_t connectToDevice(sp<ANativeWindow> consumer,
- uint32_t width, uint32_t height, int format, size_t size);
-
- status_t release();
-
- status_t setTransform(int transform);
-
- // Get stream parameters.
- // Only valid after a successful connectToDevice call.
- int getId() const { return mId; }
- uint32_t getWidth() const { return mWidth; }
- uint32_t getHeight() const { return mHeight; }
- uint32_t getFormat() const { return mFormat; }
-
- // Dump stream information
- status_t dump(int fd, const Vector<String16>& args);
-
- private:
- enum {
- ERROR = -1,
- RELEASED = 0,
- ALLOCATED,
- CONNECTED,
- ACTIVE
- } mState;
-
- sp<ANativeWindow> mConsumerInterface;
- camera2_device_t *mHal2Device;
-
- uint32_t mId;
- uint32_t mWidth;
- uint32_t mHeight;
- uint32_t mFormat;
- size_t mSize;
- uint32_t mUsage;
- uint32_t mMaxProducerBuffers;
- uint32_t mMaxConsumerBuffers;
- uint32_t mTotalBuffers;
- int mFormatRequested;
-
- /** Debugging information */
- uint32_t mActiveBuffers;
- uint32_t mFrameCount;
- int64_t mLastTimestamp;
-
- const camera2_stream_ops *getStreamOps();
-
- static ANativeWindow* toANW(const camera2_stream_ops_t *w);
-
- static int dequeue_buffer(const camera2_stream_ops_t *w,
- buffer_handle_t** buffer);
-
- static int enqueue_buffer(const camera2_stream_ops_t* w,
- int64_t timestamp,
- buffer_handle_t* buffer);
-
- static int cancel_buffer(const camera2_stream_ops_t* w,
- buffer_handle_t* buffer);
-
- static int set_crop(const camera2_stream_ops_t* w,
- int left, int top, int right, int bottom);
- }; // class StreamAdapter
-
- typedef List<sp<StreamAdapter> > StreamList;
- StreamList mStreams;
-
- /**
- * Adapter from an ANativeWindow interface to camera2 device stream ops.
- * Also takes care of allocating/deallocating stream in device interface
- */
- class ReprocessStreamAdapter: public camera2_stream_in_ops, public virtual RefBase {
- public:
- ReprocessStreamAdapter(camera2_device_t *d);
-
- ~ReprocessStreamAdapter();
-
- /**
- * Create a HAL device reprocess stream based on an existing output stream.
- */
- status_t connectToDevice(const sp<StreamAdapter> &outputStream);
-
- status_t release();
-
- /**
- * Push buffer into stream for reprocessing. Takes ownership until it notifies
- * that the buffer has been released
- */
- status_t pushIntoStream(buffer_handle_t *handle,
- const wp<BufferReleasedListener> &releaseListener);
-
- /**
- * Get stream parameters.
- * Only valid after a successful connectToDevice call.
- */
- int getId() const { return mId; }
- uint32_t getWidth() const { return mWidth; }
- uint32_t getHeight() const { return mHeight; }
- uint32_t getFormat() const { return mFormat; }
-
- // Dump stream information
- status_t dump(int fd, const Vector<String16>& args);
-
- private:
- enum {
- ERROR = -1,
- RELEASED = 0,
- ACTIVE
- } mState;
-
- sp<ANativeWindow> mConsumerInterface;
- wp<StreamAdapter> mBaseStream;
-
- struct QueueEntry {
- buffer_handle_t *handle;
- wp<BufferReleasedListener> releaseListener;
- };
-
- List<QueueEntry> mQueue;
-
- List<QueueEntry> mInFlightQueue;
-
- camera2_device_t *mHal2Device;
-
- uint32_t mId;
- uint32_t mWidth;
- uint32_t mHeight;
- uint32_t mFormat;
-
- /** Debugging information */
- uint32_t mActiveBuffers;
- uint32_t mFrameCount;
- int64_t mLastTimestamp;
-
- const camera2_stream_in_ops *getStreamOps();
-
- static int acquire_buffer(const camera2_stream_in_ops_t *w,
- buffer_handle_t** buffer);
-
- static int release_buffer(const camera2_stream_in_ops_t* w,
- buffer_handle_t* buffer);
-
- }; // class ReprocessStreamAdapter
-
- typedef List<sp<ReprocessStreamAdapter> > ReprocessStreamList;
- ReprocessStreamList mReprocessStreams;
-
- // Receives HAL notifications and routes them to the NotificationListener
- static void notificationCallback(int32_t msg_type,
- int32_t ext1,
- int32_t ext2,
- int32_t ext3,
- void *user);
-
-}; // class Camera2Device
-
-}; // namespace android
-
-#endif
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 6220349..7acd150 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -3302,7 +3302,7 @@
}
if (mNextRequests.size() < batchSize) {
- ALOGE("RequestThread: only get %d out of %d requests. Skipping requests.",
+ ALOGE("RequestThread: only get %zu out of %zu requests. Skipping requests.",
mNextRequests.size(), batchSize);
cleanUpFailedRequests(/*sendRequestError*/true);
}
@@ -3622,7 +3622,7 @@
status_t Camera3Device::RequestThread::addDummyTriggerIds(
const sp<CaptureRequest> &request) {
- // Trigger ID 0 has special meaning in the HAL2 spec, so avoid it here
+ // Trigger ID 0 had special meaning in the HAL2 spec, so avoid it here
static const int32_t dummyTriggerId = 1;
status_t res;
@@ -3716,8 +3716,6 @@
}
status_t Camera3Device::PreparerThread::clear() {
- status_t res;
-
Mutex::Autolock l(mLock);
for (const auto& stream : mPendingStreams) {