Merge "raw: accept any sample rate greater than 0"
diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp
index c53e6c3..aecb70a 100644
--- a/camera/CameraBase.cpp
+++ b/camera/CameraBase.cpp
@@ -60,6 +60,13 @@
if (res != OK) return res;
res = parcel->writeInt32(status);
+ if (res != OK) return res;
+
+ std::vector<String16> unavailablePhysicalIds16;
+ for (auto& id8 : unavailablePhysicalIds) {
+ unavailablePhysicalIds16.push_back(String16(id8));
+ }
+ res = parcel->writeString16Vector(unavailablePhysicalIds16);
return res;
}
@@ -70,6 +77,14 @@
cameraId = String8(tempCameraId);
res = parcel->readInt32(&status);
+ if (res != OK) return res;
+
+ std::vector<String16> unavailablePhysicalIds16;
+ res = parcel->readString16Vector(&unavailablePhysicalIds16);
+ if (res != OK) return res;
+ for (auto& id16 : unavailablePhysicalIds16) {
+ unavailablePhysicalIds.push_back(String8(id16));
+ }
return res;
}
diff --git a/camera/aidl/android/hardware/ICameraServiceListener.aidl b/camera/aidl/android/hardware/ICameraServiceListener.aidl
index e9dcbdb..81657fd 100644
--- a/camera/aidl/android/hardware/ICameraServiceListener.aidl
+++ b/camera/aidl/android/hardware/ICameraServiceListener.aidl
@@ -54,6 +54,12 @@
oneway void onStatusChanged(int status, String cameraId);
/**
+ * Notify registered client about status changes for a physical camera backing
+ * a logical camera.
+ */
+ oneway void onPhysicalCameraStatusChanged(int status, String cameraId, String physicalCameraId);
+
+ /**
* The torch mode status of a camera.
*
* Initial status will be transmitted with onTorchStatusChanged immediately
diff --git a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
index cdb7ac3..b183ccc 100644
--- a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
+++ b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
@@ -83,8 +83,9 @@
* @param operatingMode The kind of session to create; either NORMAL_MODE or
* CONSTRAINED_HIGH_SPEED_MODE. Must be a non-negative value.
* @param sessionParams Session wide camera parameters
+ * @return a list of stream ids that can be used in offline mode via "switchToOffline"
*/
- void endConfigure(int operatingMode, in CameraMetadataNative sessionParams);
+ int[] endConfigure(int operatingMode, in CameraMetadataNative sessionParams);
/**
* Check whether a particular session configuration has camera device
@@ -189,5 +190,5 @@
* @return Offline session object.
*/
ICameraOfflineSession switchToOffline(in ICameraDeviceCallbacks callbacks,
- in Surface[] offlineOutputs);
+ in int[] offlineOutputIds);
}
diff --git a/camera/cameraserver/Android.bp b/camera/cameraserver/Android.bp
index 320c499..22e9578 100644
--- a/camera/cameraserver/Android.bp
+++ b/camera/cameraserver/Android.bp
@@ -32,6 +32,7 @@
"android.hardware.camera.common@1.0",
"android.hardware.camera.provider@2.4",
"android.hardware.camera.provider@2.5",
+ "android.hardware.camera.provider@2.6",
"android.hardware.camera.device@1.0",
"android.hardware.camera.device@3.2",
"android.hardware.camera.device@3.4",
diff --git a/camera/include/camera/CameraBase.h b/camera/include/camera/CameraBase.h
index 74a2dce..499b0e6 100644
--- a/camera/include/camera/CameraBase.h
+++ b/camera/include/camera/CameraBase.h
@@ -80,10 +80,16 @@
*/
int32_t status;
+ /**
+ * Unavailable physical camera names for a multi-camera device
+ */
+ std::vector<String8> unavailablePhysicalIds;
+
virtual status_t writeToParcel(android::Parcel* parcel) const;
virtual status_t readFromParcel(const android::Parcel* parcel);
- CameraStatus(String8 id, int32_t s) : cameraId(id), status(s) {}
+ CameraStatus(String8 id, int32_t s, const std::vector<String8>& unavailSubIds) :
+ cameraId(id), status(s), unavailablePhysicalIds(unavailSubIds) {}
CameraStatus() : status(ICameraServiceListener::STATUS_PRESENT) {}
};
diff --git a/camera/ndk/Android.bp b/camera/ndk/Android.bp
index d8220eb..56f209c 100644
--- a/camera/ndk/Android.bp
+++ b/camera/ndk/Android.bp
@@ -57,6 +57,9 @@
"libmediandk",
"libnativewindow",
],
+ header_libs: [
+ "jni_headers",
+ ],
cflags: [
"-fvisibility=hidden",
"-DEXPORT=__attribute__ ((visibility (\"default\")))",
@@ -121,7 +124,6 @@
"android.frameworks.cameraservice.common@2.0",
"android.frameworks.cameraservice.service@2.0",
],
-
static_libs: [
"android.hardware.camera.common@1.0-helper",
"libarect",
diff --git a/camera/ndk/NdkCameraMetadata.cpp b/camera/ndk/NdkCameraMetadata.cpp
index 9a39ed8..99691ed 100644
--- a/camera/ndk/NdkCameraMetadata.cpp
+++ b/camera/ndk/NdkCameraMetadata.cpp
@@ -26,6 +26,68 @@
using namespace android;
+#ifndef __ANDROID_VNDK__
+namespace {
+
+constexpr const char* android_hardware_camera2_CameraMetadata_jniClassName =
+ "android/hardware/camera2/CameraMetadata";
+constexpr const char* android_hardware_camera2_CameraCharacteristics_jniClassName =
+ "android/hardware/camera2/CameraCharacteristics";
+constexpr const char* android_hardware_camera2_CaptureResult_jniClassName =
+ "android/hardware/camera2/CaptureResult";
+
+jclass android_hardware_camera2_CameraCharacteristics_clazz = nullptr;
+jclass android_hardware_camera2_CaptureResult_clazz = nullptr;
+jmethodID android_hardware_camera2_CameraMetadata_getNativeMetadataPtr = nullptr;
+
+// Called at most once to initializes global variables used by JNI.
+bool InitJni(JNIEnv* env) {
+ // From C++11 onward, static initializers are guaranteed to be executed at most once,
+ // even if called from multiple threads.
+ static bool ok = [env]() -> bool {
+ const jclass cameraMetadataClazz = env->FindClass(
+ android_hardware_camera2_CameraMetadata_jniClassName);
+ if (cameraMetadataClazz == nullptr) {
+ return false;
+ }
+ android_hardware_camera2_CameraMetadata_getNativeMetadataPtr =
+ env->GetMethodID(cameraMetadataClazz, "getNativeMetadataPtr", "()J");
+ if (android_hardware_camera2_CameraMetadata_getNativeMetadataPtr == nullptr) {
+ return false;
+ }
+
+ android_hardware_camera2_CameraCharacteristics_clazz = env->FindClass(
+ android_hardware_camera2_CameraCharacteristics_jniClassName);
+ if (android_hardware_camera2_CameraCharacteristics_clazz == nullptr) {
+ return false;
+ }
+
+ android_hardware_camera2_CaptureResult_clazz = env->FindClass(
+ android_hardware_camera2_CaptureResult_jniClassName);
+ if (android_hardware_camera2_CaptureResult_clazz == nullptr) {
+ return false;
+ }
+
+ return true;
+ }();
+ return ok;
+}
+
+// Given cameraMetadata, an instance of android.hardware.camera2.CameraMetadata, invokes
+// cameraMetadata.getNativeMetadataPtr() and returns it as a CameraMetadata*.
+CameraMetadata* CameraMetadata_getNativeMetadataPtr(JNIEnv* env, jobject cameraMetadata) {
+ if (cameraMetadata == nullptr) {
+ ALOGE("%s: Invalid Java CameraMetadata object.", __FUNCTION__);
+ return nullptr;
+ }
+ jlong ret = env->CallLongMethod(cameraMetadata,
+ android_hardware_camera2_CameraMetadata_getNativeMetadataPtr);
+ return reinterpret_cast<CameraMetadata *>(ret);
+}
+
+} // namespace
+#endif /* __ANDROID_VNDK__ */
+
EXPORT
camera_status_t ACameraMetadata_getConstEntry(
const ACameraMetadata* acm, uint32_t tag, ACameraMetadata_const_entry* entry) {
@@ -58,7 +120,7 @@
return nullptr;
}
ACameraMetadata* copy = new ACameraMetadata(*src);
- copy->incStrong((void*) ACameraMetadata_copy);
+ copy->incStrong(/*id=*/(void*) ACameraMetadata_copy);
return copy;
}
@@ -86,3 +148,34 @@
return staticMetadata->isLogicalMultiCamera(numPhysicalCameras, physicalCameraIds);
}
+
+#ifndef __ANDROID_VNDK__
+EXPORT
+ACameraMetadata* ACameraMetadata_fromCameraMetadata(JNIEnv* env, jobject cameraMetadata) {
+ ATRACE_CALL();
+
+ const bool ok = InitJni(env);
+ LOG_ALWAYS_FATAL_IF(!ok, "Failed to find CameraMetadata Java classes.");
+
+ if (cameraMetadata == nullptr) {
+ return nullptr;
+ }
+
+ ACameraMetadata::ACAMERA_METADATA_TYPE type;
+ if (env->IsInstanceOf(cameraMetadata,
+ android_hardware_camera2_CameraCharacteristics_clazz)) {
+ type = ACameraMetadata::ACM_CHARACTERISTICS;
+ } else if (env->IsInstanceOf(cameraMetadata,
+ android_hardware_camera2_CaptureResult_clazz)) {
+ type = ACameraMetadata::ACM_RESULT;
+ } else {
+ return nullptr;
+ }
+
+ CameraMetadata* src = CameraMetadata_getNativeMetadataPtr(env,
+ cameraMetadata);
+ ACameraMetadata* output = new ACameraMetadata(src, type);
+ output->incStrong(/*id=*/(void*) ACameraMetadata_fromCameraMetadata);
+ return output;
+}
+#endif /* __ANDROID_VNDK__ */
\ No newline at end of file
diff --git a/camera/ndk/impl/ACameraDevice.cpp b/camera/ndk/impl/ACameraDevice.cpp
index 46a8dae..0d7180a 100644
--- a/camera/ndk/impl/ACameraDevice.cpp
+++ b/camera/ndk/impl/ACameraDevice.cpp
@@ -710,7 +710,8 @@
if ((sessionParameters != nullptr) && (sessionParameters->settings != nullptr)) {
params.append(sessionParameters->settings->getInternalData());
}
- remoteRet = mRemote->endConfigure(/*isConstrainedHighSpeed*/ false, params);
+ std::vector<int> offlineStreamIds;
+ remoteRet = mRemote->endConfigure(/*isConstrainedHighSpeed*/ false, params, &offlineStreamIds);
if (remoteRet.serviceSpecificErrorCode() == hardware::ICameraService::ERROR_ILLEGAL_ARGUMENT) {
ALOGE("Camera device %s cannnot support app output configuration: %s", getId(),
remoteRet.toString8().string());
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index 7a0f63b..4870265 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -32,6 +32,7 @@
namespace acam {
// Static member definitions
const char* CameraManagerGlobal::kCameraIdKey = "CameraId";
+const char* CameraManagerGlobal::kPhysicalCameraIdKey = "PhysicalCameraId";
const char* CameraManagerGlobal::kCallbackFpKey = "CallbackFp";
const char* CameraManagerGlobal::kContextKey = "CallbackContext";
Mutex CameraManagerGlobal::sLock;
@@ -220,7 +221,7 @@
if (pair.second) {
for (auto& pair : mDeviceStatusMap) {
const String8& cameraId = pair.first;
- int32_t status = pair.second.status;
+ int32_t status = pair.second.getStatus();
// Don't send initial callbacks for camera ids which don't support
// camera2
if (!pair.second.supportsHAL3) {
@@ -264,9 +265,9 @@
// Needed to make sure we're connected to cameraservice
getCameraServiceLocked();
for(auto& deviceStatus : mDeviceStatusMap) {
- if (deviceStatus.second.status == hardware::ICameraServiceListener::STATUS_NOT_PRESENT ||
- deviceStatus.second.status ==
- hardware::ICameraServiceListener::STATUS_ENUMERATING) {
+ int32_t status = deviceStatus.second.getStatus();
+ if (status == hardware::ICameraServiceListener::STATUS_NOT_PRESENT ||
+ status == hardware::ICameraServiceListener::STATUS_ENUMERATING) {
continue;
}
if (!deviceStatus.second.supportsHAL3) {
@@ -341,6 +342,39 @@
(*cb)(context);
break;
}
+ case kWhatSendSinglePhysicalCameraCallback:
+ {
+ ACameraManager_PhysicalCameraAvailabilityCallback cb;
+ void* context;
+ AString cameraId;
+ AString physicalCameraId;
+ bool found = msg->findPointer(kCallbackFpKey, (void**) &cb);
+ if (!found) {
+ ALOGE("%s: Cannot find camera callback fp!", __FUNCTION__);
+ return;
+ }
+ if (cb == nullptr) {
+ // Physical camera callback is null
+ return;
+ }
+ found = msg->findPointer(kContextKey, &context);
+ if (!found) {
+ ALOGE("%s: Cannot find callback context!", __FUNCTION__);
+ return;
+ }
+ found = msg->findString(kCameraIdKey, &cameraId);
+ if (!found) {
+ ALOGE("%s: Cannot find camera ID!", __FUNCTION__);
+ return;
+ }
+ found = msg->findString(kPhysicalCameraIdKey, &physicalCameraId);
+ if (!found) {
+ ALOGE("%s: Cannot find physical camera ID!", __FUNCTION__);
+ return;
+ }
+ (*cb)(context, cameraId.c_str(), physicalCameraId.c_str());
+ break;
+ }
default:
ALOGE("%s: unknown message type %d", __FUNCTION__, msg->what());
break;
@@ -368,6 +402,17 @@
return binder::Status::ok();
}
+binder::Status CameraManagerGlobal::CameraServiceListener::onPhysicalCameraStatusChanged(
+ int32_t status, const String16& cameraId, const String16& physicalCameraId) {
+ sp<CameraManagerGlobal> cm = mCameraManager.promote();
+ if (cm != nullptr) {
+ cm->onStatusChanged(status, String8(cameraId), String8(physicalCameraId));
+ } else {
+ ALOGE("Cannot deliver physical camera status change. Global camera manager died");
+ }
+ return binder::Status::ok();
+}
+
void CameraManagerGlobal::onCameraAccessPrioritiesChanged() {
Mutex::Autolock _l(mLock);
for (auto cb : mCallbacks) {
@@ -397,7 +442,7 @@
bool firstStatus = (mDeviceStatusMap.count(cameraId) == 0);
int32_t oldStatus = firstStatus ?
status : // first status
- mDeviceStatusMap[cameraId].status;
+ mDeviceStatusMap[cameraId].getStatus();
if (!firstStatus &&
isStatusAvailable(status) == isStatusAvailable(oldStatus)) {
@@ -406,8 +451,14 @@
}
bool supportsHAL3 = supportsCamera2ApiLocked(cameraId);
+ if (firstStatus) {
+ mDeviceStatusMap.emplace(std::piecewise_construct,
+ std::forward_as_tuple(cameraId),
+ std::forward_as_tuple(status, supportsHAL3));
+ } else {
+ mDeviceStatusMap[cameraId].updateStatus(status);
+ }
// Iterate through all registered callbacks
- mDeviceStatusMap[cameraId] = StatusAndHAL3Support(status, supportsHAL3);
if (supportsHAL3) {
for (auto cb : mCallbacks) {
sp<AMessage> msg = new AMessage(kWhatSendSingleCallback, mHandler);
@@ -424,6 +475,81 @@
}
}
+void CameraManagerGlobal::onStatusChanged(
+ int32_t status, const String8& cameraId, const String8& physicalCameraId) {
+ Mutex::Autolock _l(mLock);
+ onStatusChangedLocked(status, cameraId, physicalCameraId);
+}
+
+void CameraManagerGlobal::onStatusChangedLocked(
+ int32_t status, const String8& cameraId, const String8& physicalCameraId) {
+ if (!validStatus(status)) {
+ ALOGE("%s: Invalid status %d", __FUNCTION__, status);
+ return;
+ }
+
+ auto logicalStatus = mDeviceStatusMap.find(cameraId);
+ if (logicalStatus == mDeviceStatusMap.end()) {
+ ALOGE("%s: Physical camera id %s status change on a non-present id %s",
+ __FUNCTION__, physicalCameraId.c_str(), cameraId.c_str());
+ return;
+ }
+ int32_t logicalCamStatus = mDeviceStatusMap[cameraId].getStatus();
+ if (logicalCamStatus != hardware::ICameraServiceListener::STATUS_PRESENT &&
+ logicalCamStatus != hardware::ICameraServiceListener::STATUS_NOT_AVAILABLE) {
+ ALOGE("%s: Physical camera id %s status %d change for an invalid logical camera state %d",
+ __FUNCTION__, physicalCameraId.string(), status, logicalCamStatus);
+ return;
+ }
+
+ bool supportsHAL3 = supportsCamera2ApiLocked(cameraId);
+
+ bool updated = false;
+ if (status == hardware::ICameraServiceListener::STATUS_PRESENT) {
+ updated = mDeviceStatusMap[cameraId].removeUnavailablePhysicalId(physicalCameraId);
+ } else {
+ updated = mDeviceStatusMap[cameraId].addUnavailablePhysicalId(physicalCameraId);
+ }
+
+ // Iterate through all registered callbacks
+ if (supportsHAL3 && updated) {
+ for (auto cb : mCallbacks) {
+ sp<AMessage> msg = new AMessage(kWhatSendSinglePhysicalCameraCallback, mHandler);
+ ACameraManager_PhysicalCameraAvailabilityCallback cbFp = isStatusAvailable(status) ?
+ cb.mPhysicalCamAvailable : cb.mPhysicalCamUnavailable;
+ msg->setPointer(kCallbackFpKey, (void *) cbFp);
+ msg->setPointer(kContextKey, cb.mContext);
+ msg->setString(kCameraIdKey, AString(cameraId));
+ msg->setString(kPhysicalCameraIdKey, AString(physicalCameraId));
+ msg->post();
+ }
+ }
+}
+
+int32_t CameraManagerGlobal::StatusAndHAL3Support::getStatus() {
+ std::lock_guard<std::mutex> lock(mLock);
+ return status;
+}
+
+void CameraManagerGlobal::StatusAndHAL3Support::updateStatus(int32_t newStatus) {
+ std::lock_guard<std::mutex> lock(mLock);
+ status = newStatus;
+}
+
+bool CameraManagerGlobal::StatusAndHAL3Support::addUnavailablePhysicalId(
+ const String8& physicalCameraId) {
+ std::lock_guard<std::mutex> lock(mLock);
+ auto result = unavailablePhysicalIds.insert(physicalCameraId);
+ return result.second;
+}
+
+bool CameraManagerGlobal::StatusAndHAL3Support::removeUnavailablePhysicalId(
+ const String8& physicalCameraId) {
+ std::lock_guard<std::mutex> lock(mLock);
+ auto count = unavailablePhysicalIds.erase(physicalCameraId);
+ return count > 0;
+}
+
} // namespace acam
} // namespace android
diff --git a/camera/ndk/impl/ACameraManager.h b/camera/ndk/impl/ACameraManager.h
index e945ba0..98cd934 100644
--- a/camera/ndk/impl/ACameraManager.h
+++ b/camera/ndk/impl/ACameraManager.h
@@ -85,6 +85,8 @@
public:
explicit CameraServiceListener(CameraManagerGlobal* cm) : mCameraManager(cm) {}
virtual binder::Status onStatusChanged(int32_t status, const String16& cameraId);
+ virtual binder::Status onPhysicalCameraStatusChanged(int32_t status,
+ const String16& cameraId, const String16& physicalCameraId);
// Torch API not implemented yet
virtual binder::Status onTorchStatusChanged(int32_t, const String16&) {
@@ -104,18 +106,24 @@
mAvailable(callback->onCameraAvailable),
mUnavailable(callback->onCameraUnavailable),
mAccessPriorityChanged(nullptr),
+ mPhysicalCamAvailable(nullptr),
+ mPhysicalCamUnavailable(nullptr),
mContext(callback->context) {}
explicit Callback(const ACameraManager_ExtendedAvailabilityCallbacks *callback) :
mAvailable(callback->availabilityCallbacks.onCameraAvailable),
mUnavailable(callback->availabilityCallbacks.onCameraUnavailable),
mAccessPriorityChanged(callback->onCameraAccessPrioritiesChanged),
+ mPhysicalCamAvailable(callback->onPhysicalCameraAvailable),
+ mPhysicalCamUnavailable(callback->onPhysicalCameraUnavailable),
mContext(callback->availabilityCallbacks.context) {}
bool operator == (const Callback& other) const {
return (mAvailable == other.mAvailable &&
mUnavailable == other.mUnavailable &&
mAccessPriorityChanged == other.mAccessPriorityChanged &&
+ mPhysicalCamAvailable == other.mPhysicalCamAvailable &&
+ mPhysicalCamUnavailable == other.mPhysicalCamUnavailable &&
mContext == other.mContext);
}
bool operator != (const Callback& other) const {
@@ -124,6 +132,12 @@
bool operator < (const Callback& other) const {
if (*this == other) return false;
if (mContext != other.mContext) return mContext < other.mContext;
+ if (mPhysicalCamAvailable != other.mPhysicalCamAvailable) {
+ return mPhysicalCamAvailable < other.mPhysicalCamAvailable;
+ }
+ if (mPhysicalCamUnavailable != other.mPhysicalCamUnavailable) {
+ return mPhysicalCamUnavailable < other.mPhysicalCamUnavailable;
+ }
if (mAccessPriorityChanged != other.mAccessPriorityChanged) {
return mAccessPriorityChanged < other.mAccessPriorityChanged;
}
@@ -136,6 +150,8 @@
ACameraManager_AvailabilityCallback mAvailable;
ACameraManager_AvailabilityCallback mUnavailable;
ACameraManager_AccessPrioritiesChangedCallback mAccessPriorityChanged;
+ ACameraManager_PhysicalCameraAvailabilityCallback mPhysicalCamAvailable;
+ ACameraManager_PhysicalCameraAvailabilityCallback mPhysicalCamUnavailable;
void* mContext;
};
std::set<Callback> mCallbacks;
@@ -144,8 +160,10 @@
enum {
kWhatSendSingleCallback,
kWhatSendSingleAccessCallback,
+ kWhatSendSinglePhysicalCameraCallback,
};
static const char* kCameraIdKey;
+ static const char* kPhysicalCameraIdKey;
static const char* kCallbackFpKey;
static const char* kContextKey;
class CallbackHandler : public AHandler {
@@ -160,6 +178,9 @@
void onCameraAccessPrioritiesChanged();
void onStatusChanged(int32_t status, const String8& cameraId);
void onStatusChangedLocked(int32_t status, const String8& cameraId);
+ void onStatusChanged(int32_t status, const String8& cameraId, const String8& physicalCameraId);
+ void onStatusChangedLocked(int32_t status, const String8& cameraId,
+ const String8& physicalCameraId);
// Utils for status
static bool validStatus(int32_t status);
static bool isStatusAvailable(int32_t status);
@@ -187,11 +208,20 @@
};
struct StatusAndHAL3Support {
+ private:
int32_t status = hardware::ICameraServiceListener::STATUS_NOT_PRESENT;
- bool supportsHAL3 = false;
+ mutable std::mutex mLock;
+ std::set<String8> unavailablePhysicalIds;
+ public:
+ const bool supportsHAL3 = false;
StatusAndHAL3Support(int32_t st, bool HAL3support):
status(st), supportsHAL3(HAL3support) { };
StatusAndHAL3Support() = default;
+
+ bool addUnavailablePhysicalId(const String8& physicalCameraId);
+ bool removeUnavailablePhysicalId(const String8& physicalCameraId);
+ int32_t getStatus();
+ void updateStatus(int32_t newStatus);
};
// Map camera_id -> status
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index e15e1a0..0ff78ab 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -28,7 +28,37 @@
* ACameraMetadata Implementation
*/
ACameraMetadata::ACameraMetadata(camera_metadata_t* buffer, ACAMERA_METADATA_TYPE type) :
- mData(buffer), mType(type) {
+ mData(new CameraMetadata(buffer)),
+ mOwnsData(true),
+ mType(type) {
+ init();
+}
+
+ACameraMetadata::ACameraMetadata(CameraMetadata* cameraMetadata, ACAMERA_METADATA_TYPE type) :
+ mData(cameraMetadata),
+ mOwnsData(false),
+ mType(type) {
+ init();
+}
+
+ACameraMetadata::ACameraMetadata(const ACameraMetadata& other) :
+ mOwnsData(other.mOwnsData),
+ mType(other.mType) {
+ if (other.mOwnsData) {
+ mData = new CameraMetadata(*(other.mData));
+ } else {
+ mData = other.mData;
+ }
+}
+
+ACameraMetadata::~ACameraMetadata() {
+ if (mOwnsData) {
+ delete mData;
+ }
+}
+
+void
+ACameraMetadata::init() {
if (mType == ACM_CHARACTERISTICS) {
filterUnsupportedFeatures();
filterStreamConfigurations();
@@ -61,7 +91,7 @@
void
ACameraMetadata::filterUnsupportedFeatures() {
// Hide unsupported capabilities (reprocessing)
- camera_metadata_entry entry = mData.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+ camera_metadata_entry entry = mData->find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
if (entry.count == 0 || entry.type != TYPE_BYTE) {
ALOGE("%s: malformed available capability key! count %zu, type %d",
__FUNCTION__, entry.count, entry.type);
@@ -80,7 +110,7 @@
}
}
}
- mData.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, capabilities);
+ mData->update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, capabilities);
}
void
@@ -118,7 +148,7 @@
const int STREAM_WIDTH_OFFSET = 1;
const int STREAM_HEIGHT_OFFSET = 2;
const int STREAM_DURATION_OFFSET = 3;
- camera_metadata_entry entry = mData.find(tag);
+ camera_metadata_entry entry = mData->find(tag);
if (entry.count == 0 || entry.count % 4 || entry.type != TYPE_INT64) {
ALOGE("%s: malformed duration key %d! count %zu, type %d",
__FUNCTION__, tag, entry.count, entry.type);
@@ -194,7 +224,7 @@
}
}
- mData.update(tag, filteredDurations);
+ mData->update(tag, filteredDurations);
}
void
@@ -204,7 +234,7 @@
const int STREAM_WIDTH_OFFSET = 1;
const int STREAM_HEIGHT_OFFSET = 2;
const int STREAM_IS_INPUT_OFFSET = 3;
- camera_metadata_entry entry = mData.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
+ camera_metadata_entry entry = mData->find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
if (entry.count > 0 && (entry.count % 4 || entry.type != TYPE_INT32)) {
ALOGE("%s: malformed available stream configuration key! count %zu, type %d",
__FUNCTION__, entry.count, entry.type);
@@ -234,10 +264,10 @@
}
if (filteredStreamConfigs.size() > 0) {
- mData.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, filteredStreamConfigs);
+ mData->update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, filteredStreamConfigs);
}
- entry = mData.find(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS);
+ entry = mData->find(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS);
if (entry.count > 0 && (entry.count % 4 || entry.type != TYPE_INT32)) {
ALOGE("%s: malformed available depth stream configuration key! count %zu, type %d",
__FUNCTION__, entry.count, entry.type);
@@ -270,11 +300,11 @@
}
if (filteredDepthStreamConfigs.size() > 0) {
- mData.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
+ mData->update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
filteredDepthStreamConfigs);
}
- entry = mData.find(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS);
+ entry = mData->find(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS);
Vector<int32_t> filteredHeicStreamConfigs;
filteredHeicStreamConfigs.setCapacity(entry.count);
@@ -297,9 +327,9 @@
filteredHeicStreamConfigs.push_back(height);
filteredHeicStreamConfigs.push_back(isInput);
}
- mData.update(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, filteredHeicStreamConfigs);
+ mData->update(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, filteredHeicStreamConfigs);
- entry = mData.find(ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS);
+ entry = mData->find(ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS);
Vector<int32_t> filteredDynamicDepthStreamConfigs;
filteredDynamicDepthStreamConfigs.setCapacity(entry.count);
@@ -322,7 +352,7 @@
filteredDynamicDepthStreamConfigs.push_back(height);
filteredDynamicDepthStreamConfigs.push_back(isInput);
}
- mData.update(ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS,
+ mData->update(ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS,
filteredDynamicDepthStreamConfigs);
}
@@ -343,7 +373,7 @@
Mutex::Autolock _l(mLock);
- camera_metadata_ro_entry rawEntry = mData.find(tag);
+ camera_metadata_ro_entry rawEntry = static_cast<const CameraMetadata*>(mData)->find(tag);
if (rawEntry.count == 0) {
ALOGE("%s: cannot find metadata tag %d", __FUNCTION__, tag);
return ACAMERA_ERROR_METADATA_NOT_FOUND;
@@ -390,9 +420,9 @@
/*out*/const uint32_t** tags) const {
Mutex::Autolock _l(mLock);
if (mTags.size() == 0) {
- size_t entry_count = mData.entryCount();
+ size_t entry_count = mData->entryCount();
mTags.setCapacity(entry_count);
- const camera_metadata_t* rawMetadata = mData.getAndLock();
+ const camera_metadata_t* rawMetadata = mData->getAndLock();
for (size_t i = 0; i < entry_count; i++) {
camera_metadata_ro_entry_t entry;
int ret = get_camera_metadata_ro_entry(rawMetadata, i, &entry);
@@ -405,7 +435,7 @@
mTags.push_back(entry.tag);
}
}
- mData.unlock(rawMetadata);
+ mData->unlock(rawMetadata);
}
*numTags = mTags.size();
@@ -415,7 +445,7 @@
const CameraMetadata&
ACameraMetadata::getInternalData() const {
- return mData;
+ return (*mData);
}
bool
diff --git a/camera/ndk/impl/ACameraMetadata.h b/camera/ndk/impl/ACameraMetadata.h
index 97f7f48..a57b2ff 100644
--- a/camera/ndk/impl/ACameraMetadata.h
+++ b/camera/ndk/impl/ACameraMetadata.h
@@ -36,8 +36,8 @@
using namespace android;
/**
- * ACameraMetadata opaque struct definition
- * Leave outside of android namespace because it's NDK struct
+ * ACameraMetadata is an opaque struct definition.
+ * It is intentionally left outside of the android namespace because it's NDK struct.
*/
struct ACameraMetadata : public RefBase {
public:
@@ -47,11 +47,20 @@
ACM_RESULT, // Read only
} ACAMERA_METADATA_TYPE;
- // Takes ownership of pass-in buffer
- ACameraMetadata(camera_metadata_t *buffer, ACAMERA_METADATA_TYPE type);
- // Clone
- ACameraMetadata(const ACameraMetadata& other) :
- mData(other.mData), mType(other.mType) {};
+ // Constructs a ACameraMetadata that takes ownership of `buffer`.
+ ACameraMetadata(camera_metadata_t* buffer, ACAMERA_METADATA_TYPE type);
+
+ // Constructs a ACameraMetadata that is a view of `cameraMetadata`.
+ // `cameraMetadata` will not be deleted by ~ACameraMetadata().
+ ACameraMetadata(CameraMetadata* cameraMetadata, ACAMERA_METADATA_TYPE type);
+
+ // Copy constructor.
+ //
+ // If `other` owns its CameraMetadata, then makes a deep copy.
+ // Otherwise, the new instance is also a view of the same data.
+ ACameraMetadata(const ACameraMetadata& other);
+
+ ~ACameraMetadata();
camera_status_t getConstEntry(uint32_t tag, ACameraMetadata_const_entry* entry) const;
@@ -70,6 +79,9 @@
private:
+ // Common code called by constructors.
+ void init();
+
// This function does not check whether the capability passed to it is valid.
// The caller must make sure that it is.
bool isNdkSupportedCapability(const int32_t capability);
@@ -95,11 +107,11 @@
status_t ret = OK;
if (count == 0 && data == nullptr) {
- ret = mData.erase(tag);
+ ret = mData->erase(tag);
} else {
// Here we have to use reinterpret_cast because the NDK data type is
// exact copy of internal data type but they do not inherit from each other
- ret = mData.update(tag, reinterpret_cast<const INTERNAL_T*>(data), count);
+ ret = mData->update(tag, reinterpret_cast<const INTERNAL_T*>(data), count);
}
if (ret == OK) {
@@ -110,10 +122,14 @@
}
}
- // guard access of public APIs: get/update/getTags
- mutable Mutex mLock;
- CameraMetadata mData;
- mutable Vector<uint32_t> mTags; // updated in getTags, cleared by update
+ // Guard access of public APIs: get/update/getTags.
+ mutable Mutex mLock;
+
+ CameraMetadata* mData;
+ // If true, has ownership of mData. Otherwise, mData is a view of an external instance.
+ bool mOwnsData;
+
+ mutable Vector<uint32_t> mTags; // Updated by `getTags()`, cleared by `update()`.
const ACAMERA_METADATA_TYPE mType;
static std::unordered_set<uint32_t> sSystemTags;
diff --git a/camera/ndk/include/camera/NdkCameraManager.h b/camera/ndk/include/camera/NdkCameraManager.h
index 2cc8a97..e2b71bf 100644
--- a/camera/ndk/include/camera/NdkCameraManager.h
+++ b/camera/ndk/include/camera/NdkCameraManager.h
@@ -123,6 +123,21 @@
const char* cameraId);
/**
+ * Definition of physical camera availability callbacks.
+ *
+ * @param context The optional application context provided by user in
+ * {@link ACameraManager_AvailabilityCallbacks}.
+ * @param cameraId The ID of the logical multi-camera device whose physical camera status is
+ * changing. The memory of this argument is owned by camera framework and will
+ * become invalid immediately after this callback returns.
+ * @param physicalCameraId The ID of the physical camera device whose status is changing. The
+ * memory of this argument is owned by camera framework and will become invalid
+ * immediately after this callback returns.
+ */
+typedef void (*ACameraManager_PhysicalCameraAvailabilityCallback)(void* context,
+ const char* cameraId, const char* physicalCameraId);
+
+/**
* A listener for camera devices becoming available or unavailable to open.
*
* <p>Cameras become available when they are no longer in use, or when a new
@@ -320,8 +335,15 @@
/// Called when there is camera access permission change
ACameraManager_AccessPrioritiesChangedCallback onCameraAccessPrioritiesChanged;
+ /// Called when a physical camera becomes available
+ ACameraManager_PhysicalCameraAvailabilityCallback onPhysicalCameraAvailable __INTRODUCED_IN(30);
+
+ /// Called when a physical camera becomes unavailable
+ ACameraManager_PhysicalCameraAvailabilityCallback onPhysicalCameraUnavailable
+ __INTRODUCED_IN(30);
+
/// Reserved for future use, please ensure that all entries are set to NULL
- void *reserved[6];
+ void *reserved[4];
} ACameraManager_ExtendedAvailabilityCallbacks;
/**
diff --git a/camera/ndk/include/camera/NdkCameraMetadata.h b/camera/ndk/include/camera/NdkCameraMetadata.h
index 9bbfb83..ee75610 100644
--- a/camera/ndk/include/camera/NdkCameraMetadata.h
+++ b/camera/ndk/include/camera/NdkCameraMetadata.h
@@ -39,6 +39,12 @@
#include <stdint.h>
#include <sys/cdefs.h>
+#ifndef __ANDROID_VNDK__
+#if __ANDROID_API__ >= 30
+#include "jni.h"
+#endif /* __ANDROID_API__ >= 30 */
+#endif /* __ANDROID_VNDK__ */
+
#include "NdkCameraError.h"
#include "NdkCameraMetadataTags.h"
@@ -255,8 +261,40 @@
#endif /* __ANDROID_API__ >= 29 */
+#ifndef __ANDROID_VNDK__
+#if __ANDROID_API__ >= 30
+
+/**
+ * Return a {@link ACameraMetadata} that references the same data as
+ * {@link cameraMetadata}, which is an instance of
+ * {@link android.hardware.camera2.CameraMetadata} (e.g., a
+ * {@link android.hardware.camera2.CameraCharacteristics} or
+ * {@link android.hardware.camera2.CaptureResult}).
+ *
+ * <p>The returned ACameraMetadata must be freed by the application by {@link ACameraMetadata_free}
+ * after application is done using it.</p>
+ *
+ * <p>This function does not affect the lifetime of {@link cameraMetadata}. Attempting to use the
+ * returned ACameraMetadata object after {@link cameraMetadata} has been garbage collected is
+ * unsafe. To manage the lifetime beyond the current JNI function call, use
+ * {@code env->NewGlobalRef()} and {@code env->DeleteGlobalRef()}.
+ *
+ * @param env the JNI environment.
+ * @param cameraMetadata the source {@link android.hardware.camera2.CameraMetadata} from which the
+ * returned {@link ACameraMetadata} is a view.
+ *
+ * @return a valid ACameraMetadata pointer or NULL if {@link cameraMetadata} is null or not a valid
+ * instance of {@link android.hardware.camera2.CameraMetadata}.
+ *
+ */
+ACameraMetadata* ACameraMetadata_fromCameraMetadata(JNIEnv* env, jobject cameraMetadata)
+ __INTRODUCED_IN(30);
+
+#endif /* __ANDROID_API__ >= 30 */
+#endif /* __ANDROID_VNDK__ */
+
__END_DECLS
#endif /* _NDK_CAMERA_METADATA_H */
-/** @} */
+/** @} */
\ No newline at end of file
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 6fefa41..8b371db 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -2509,6 +2509,11 @@
* <p><code>p' = Rp</code></p>
* <p>where <code>p</code> is in the device sensor coordinate system, and
* <code>p'</code> is in the camera-oriented coordinate system.</p>
+ * <p>If ACAMERA_LENS_POSE_REFERENCE is UNDEFINED, the quaternion rotation cannot
+ * be accurately represented by the camera device, and will be represented by
+ * default values matching its default facing.</p>
+ *
+ * @see ACAMERA_LENS_POSE_REFERENCE
*/
ACAMERA_LENS_POSE_ROTATION = // float[4]
ACAMERA_LENS_START + 6,
@@ -2549,6 +2554,8 @@
* <p>When ACAMERA_LENS_POSE_REFERENCE is GYROSCOPE, then this position is relative to
* the center of the primary gyroscope on the device. The axis definitions are the same as
* with PRIMARY_CAMERA.</p>
+ * <p>When ACAMERA_LENS_POSE_REFERENCE is UNDEFINED, this position cannot be accurately
+ * represented by the camera device, and will be represented as <code>(0, 0, 0)</code>.</p>
*
* @see ACAMERA_LENS_DISTORTION
* @see ACAMERA_LENS_INTRINSIC_CALIBRATION
@@ -2691,8 +2698,10 @@
ACAMERA_LENS_RADIAL_DISTORTION = // Deprecated! DO NOT USE
ACAMERA_LENS_START + 11,
/**
- * <p>The origin for ACAMERA_LENS_POSE_TRANSLATION.</p>
+ * <p>The origin for ACAMERA_LENS_POSE_TRANSLATION, and the accuracy of
+ * ACAMERA_LENS_POSE_TRANSLATION and ACAMERA_LENS_POSE_ROTATION.</p>
*
+ * @see ACAMERA_LENS_POSE_ROTATION
* @see ACAMERA_LENS_POSE_TRANSLATION
*
* <p>Type: byte (acamera_metadata_enum_android_lens_pose_reference_t)</p>
@@ -7539,6 +7548,19 @@
*/
ACAMERA_LENS_POSE_REFERENCE_GYROSCOPE = 1,
+ /**
+ * <p>The camera device cannot represent the values of ACAMERA_LENS_POSE_TRANSLATION
+ * and ACAMERA_LENS_POSE_ROTATION accurately enough. One such example is a camera device
+ * on the cover of a foldable phone: in order to measure the pose translation and rotation,
+ * some kind of hinge position sensor would be needed.</p>
+ * <p>The value of ACAMERA_LENS_POSE_TRANSLATION must be all zeros, and
+ * ACAMERA_LENS_POSE_ROTATION must be values matching its default facing.</p>
+ *
+ * @see ACAMERA_LENS_POSE_ROTATION
+ * @see ACAMERA_LENS_POSE_TRANSLATION
+ */
+ ACAMERA_LENS_POSE_REFERENCE_UNDEFINED = 2,
+
} acamera_metadata_enum_android_lens_pose_reference_t;
@@ -7919,14 +7941,20 @@
* <p>The camera device is a logical camera backed by two or more physical cameras.</p>
* <p>In API level 28, the physical cameras must also be exposed to the application via
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraManager.html#getCameraIdList">CameraManager#getCameraIdList</a>.</p>
- * <p>Starting from API level 29, some or all physical cameras may not be independently
- * exposed to the application, in which case the physical camera IDs will not be
- * available in <a href="https://developer.android.com/reference/android/hardware/camera2/CameraManager.html#getCameraIdList">CameraManager#getCameraIdList</a>. But the
+ * <p>Starting from API level 29:</p>
+ * <ul>
+ * <li>Some or all physical cameras may not be independently exposed to the application,
+ * in which case the physical camera IDs will not be available in
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraManager.html#getCameraIdList">CameraManager#getCameraIdList</a>. But the
* application can still query the physical cameras' characteristics by calling
- * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraManager.html#getCameraCharacteristics">CameraManager#getCameraCharacteristics</a>. Additionally,
- * if a physical camera is hidden from camera ID list, the mandatory stream combinations
- * for that physical camera must be supported through the logical camera using physical
- * streams.</p>
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraManager.html#getCameraCharacteristics">CameraManager#getCameraCharacteristics</a>.</li>
+ * <li>If a physical camera is hidden from camera ID list, the mandatory stream
+ * combinations for that physical camera must be supported through the logical camera
+ * using physical streams. One exception is that in API level 30, a physical camera
+ * may become unavailable via
+ * {@link ACameraManager_PhysicalCameraAvailabilityCallback }
+ * callback.</li>
+ * </ul>
* <p>Combinations of logical and physical streams, or physical streams from different
* physical cameras are not guaranteed. However, if the camera device supports
* {@link ACameraDevice_isSessionConfigurationSupported },
diff --git a/camera/ndk/libcamera2ndk.map.txt b/camera/ndk/libcamera2ndk.map.txt
index b6f1553..2b630db 100644
--- a/camera/ndk/libcamera2ndk.map.txt
+++ b/camera/ndk/libcamera2ndk.map.txt
@@ -31,6 +31,7 @@
ACameraMetadata_getAllTags;
ACameraMetadata_getConstEntry;
ACameraMetadata_isLogicalMultiCamera; # introduced=29
+ ACameraMetadata_fromCameraMetadata; # introduced=30
ACameraOutputTarget_create;
ACameraOutputTarget_free;
ACaptureRequest_addTarget;
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index 9a18b10..571cf59 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -83,6 +83,12 @@
return binder::Status::ok();
};
+ virtual binder::Status onPhysicalCameraStatusChanged(int32_t /*status*/,
+ const String16& /*cameraId*/, const String16& /*physicalCameraId*/) {
+ // No op
+ return binder::Status::ok();
+ };
+
virtual binder::Status onTorchStatusChanged(int32_t status, const String16& cameraId) {
Mutex::Autolock l(mLock);
mCameraTorchStatuses[cameraId] = status;
@@ -498,7 +504,9 @@
EXPECT_TRUE(res.isOk()) << res;
EXPECT_LE(0, streamId);
CameraMetadata sessionParams;
- res = device->endConfigure(/*isConstrainedHighSpeed*/ false, sessionParams);
+ std::vector<int> offlineStreamIds;
+ res = device->endConfigure(/*isConstrainedHighSpeed*/ false, sessionParams,
+ &offlineStreamIds);
EXPECT_TRUE(res.isOk()) << res;
EXPECT_FALSE(callbacks->hadError());
@@ -609,7 +617,8 @@
EXPECT_TRUE(res.isOk()) << res;
res = device->deleteStream(streamId);
EXPECT_TRUE(res.isOk()) << res;
- res = device->endConfigure(/*isConstrainedHighSpeed*/ false, sessionParams);
+ res = device->endConfigure(/*isConstrainedHighSpeed*/ false, sessionParams,
+ &offlineStreamIds);
EXPECT_TRUE(res.isOk()) << res;
sleep(/*second*/1); // allow some time for errors to show up, if any
diff --git a/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp b/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
index f164f28..3ecf6d5 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
@@ -136,6 +136,8 @@
return Void();
}
+ base = static_cast<uint8_t *>(static_cast<void *>(destBase->getPointer()));
+
if (destBuffer.offset + destBuffer.size > destBase->getSize()) {
_hidl_cb(Status_V1_2::ERROR_DRM_FRAME_TOO_LARGE, 0, "invalid buffer size");
return Void();
diff --git a/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
index 942ea7d..546eb3e 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
@@ -829,6 +829,12 @@
// and the drm service. The clearkey implementation consists of:
// count - number of secure stops
// list of fixed length secure stops
+ size_t countBufferSize = sizeof(uint32_t);
+ if (input.size() < countBufferSize) {
+ // SafetyNet logging
+ android_errorWriteLog(0x534e4554, "144766455");
+ return Status::BAD_VALUE;
+ }
uint32_t count = 0;
sscanf(reinterpret_cast<char*>(input.data()), "%04" PRIu32, &count);
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 5c572e1..39263f9 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -48,6 +48,7 @@
#include "C2OMXNode.h"
#include "CCodecBufferChannel.h"
#include "CCodecConfig.h"
+#include "Codec2Mapper.h"
#include "InputSurfaceWrapper.h"
extern "C" android::PersistentSurface *CreateInputSurface();
@@ -717,6 +718,11 @@
encoder = false;
}
+ int32_t flags;
+ if (!msg->findInt32("flags", &flags)) {
+ return BAD_VALUE;
+ }
+
// TODO: read from intf()
if ((!encoder) != (comp->getName().find("encoder") == std::string::npos)) {
return UNKNOWN_ERROR;
@@ -743,6 +749,9 @@
Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
const std::unique_ptr<Config> &config = *configLocked;
config->mUsingSurface = surface != nullptr;
+ config->mBuffersBoundToCodec = ((flags & CONFIGURE_FLAG_USE_BLOCK_MODEL) == 0);
+ ALOGD("[%s] buffers are %sbound to CCodec for this session",
+ comp->getName().c_str(), config->mBuffersBoundToCodec ? "" : "not ");
// Enforce required parameters
int32_t i32;
@@ -1299,6 +1308,7 @@
sp<AMessage> inputFormat;
sp<AMessage> outputFormat;
status_t err2 = OK;
+ bool buffersBoundToCodec = false;
{
Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
const std::unique_ptr<Config> &config = *configLocked;
@@ -1307,12 +1317,13 @@
if (config->mInputSurface) {
err2 = config->mInputSurface->start();
}
+ buffersBoundToCodec = config->mBuffersBoundToCodec;
}
if (err2 != OK) {
mCallback->onError(err2, ACTION_CODE_FATAL);
return;
}
- err2 = mChannel->start(inputFormat, outputFormat);
+ err2 = mChannel->start(inputFormat, outputFormat, buffersBoundToCodec);
if (err2 != OK) {
mCallback->onError(err2, ACTION_CODE_FATAL);
return;
@@ -1556,7 +1567,11 @@
return;
}
- (void)mChannel->start(nullptr, nullptr);
+ (void)mChannel->start(nullptr, nullptr, [&]{
+ Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+ const std::unique_ptr<Config> &config = *configLocked;
+ return config->mBuffersBoundToCodec;
+ }());
{
Mutexed<State>::Locked state(mState);
@@ -1911,5 +1926,243 @@
inputSurface->getHalInterface()));
}
+static status_t GetCommonAllocatorIds(
+ const std::vector<std::string> &names,
+ C2Allocator::type_t type,
+ std::set<C2Allocator::id_t> *ids) {
+ int poolMask = GetCodec2PoolMask();
+ C2PlatformAllocatorStore::id_t preferredLinearId = GetPreferredLinearAllocatorId(poolMask);
+ C2Allocator::id_t defaultAllocatorId =
+ (type == C2Allocator::LINEAR) ? preferredLinearId : C2PlatformAllocatorStore::GRALLOC;
+
+ ids->clear();
+ if (names.empty()) {
+ return OK;
+ }
+ std::shared_ptr<Codec2Client::Interface> intf{
+ Codec2Client::CreateInterfaceByName(names[0].c_str())};
+ std::vector<std::unique_ptr<C2Param>> params;
+ c2_status_t err = intf->query(
+ {}, {C2PortAllocatorsTuning::input::PARAM_TYPE}, C2_MAY_BLOCK, ¶ms);
+ if (err == C2_OK && params.size() == 1u) {
+ C2PortAllocatorsTuning::input *allocators =
+ C2PortAllocatorsTuning::input::From(params[0].get());
+ if (allocators && allocators->flexCount() > 0) {
+ ids->insert(allocators->m.values, allocators->m.values + allocators->flexCount());
+ }
+ }
+ if (ids->empty()) {
+ // The component does not advertise allocators. Use default.
+ ids->insert(defaultAllocatorId);
+ }
+ for (size_t i = 1; i < names.size(); ++i) {
+ intf = Codec2Client::CreateInterfaceByName(names[i].c_str());
+ err = intf->query(
+ {}, {C2PortAllocatorsTuning::input::PARAM_TYPE}, C2_MAY_BLOCK, ¶ms);
+ bool filtered = false;
+ if (err == C2_OK && params.size() == 1u) {
+ C2PortAllocatorsTuning::input *allocators =
+ C2PortAllocatorsTuning::input::From(params[0].get());
+ if (allocators && allocators->flexCount() > 0) {
+ filtered = true;
+ for (auto it = ids->begin(); it != ids->end(); ) {
+ bool found = false;
+ for (size_t j = 0; j < allocators->flexCount(); ++j) {
+ if (allocators->m.values[j] == *it) {
+ found = true;
+ break;
+ }
+ }
+ if (found) {
+ ++it;
+ } else {
+ it = ids->erase(it);
+ }
+ }
+ }
+ }
+ if (!filtered) {
+ // The component does not advertise supported allocators. Use default.
+ bool containsDefault = (ids->count(defaultAllocatorId) > 0u);
+ if (ids->size() != (containsDefault ? 1 : 0)) {
+ ids->clear();
+ if (containsDefault) {
+ ids->insert(defaultAllocatorId);
+ }
+ }
+ }
+ }
+ // Finally, filter with pool masks
+ for (auto it = ids->begin(); it != ids->end(); ) {
+ if ((poolMask >> *it) & 1) {
+ ++it;
+ } else {
+ it = ids->erase(it);
+ }
+ }
+ return OK;
+}
+
+static status_t CalculateMinMaxUsage(
+ const std::vector<std::string> &names, uint64_t *minUsage, uint64_t *maxUsage) {
+ static C2StreamUsageTuning::input sUsage{0u /* stream id */};
+ *minUsage = 0;
+ *maxUsage = ~0ull;
+ for (const std::string &name : names) {
+ std::shared_ptr<Codec2Client::Interface> intf{
+ Codec2Client::CreateInterfaceByName(name.c_str())};
+ std::vector<C2FieldSupportedValuesQuery> fields;
+ fields.push_back(C2FieldSupportedValuesQuery::Possible(
+ C2ParamField{&sUsage, &sUsage.value}));
+ c2_status_t err = intf->querySupportedValues(fields, C2_MAY_BLOCK);
+ if (err != C2_OK) {
+ continue;
+ }
+ if (fields[0].status != C2_OK) {
+ continue;
+ }
+ const C2FieldSupportedValues &supported = fields[0].values;
+ if (supported.type != C2FieldSupportedValues::FLAGS) {
+ continue;
+ }
+ if (supported.values.empty()) {
+ *maxUsage = 0;
+ continue;
+ }
+ *minUsage |= supported.values[0].u64;
+ int64_t currentMaxUsage = 0;
+ for (const C2Value::Primitive &flags : supported.values) {
+ currentMaxUsage |= flags.u64;
+ }
+ *maxUsage &= currentMaxUsage;
+ }
+ return OK;
+}
+
+// static
+status_t CCodec::CanFetchLinearBlock(
+ const std::vector<std::string> &names, const C2MemoryUsage &usage, bool *isCompatible) {
+ uint64_t minUsage = usage.expected;
+ uint64_t maxUsage = ~0ull;
+ std::set<C2Allocator::id_t> allocators;
+ GetCommonAllocatorIds(names, C2Allocator::LINEAR, &allocators);
+ if (allocators.empty()) {
+ *isCompatible = false;
+ return OK;
+ }
+ CalculateMinMaxUsage(names, &minUsage, &maxUsage);
+ *isCompatible = ((maxUsage & minUsage) == minUsage);
+ return OK;
+}
+
+static std::shared_ptr<C2BlockPool> GetPool(C2Allocator::id_t allocId) {
+ static std::mutex sMutex{};
+ static std::map<C2Allocator::id_t, std::shared_ptr<C2BlockPool>> sPools;
+ std::unique_lock<std::mutex> lock{sMutex};
+ std::shared_ptr<C2BlockPool> pool;
+ auto it = sPools.find(allocId);
+ if (it == sPools.end()) {
+ c2_status_t err = CreateCodec2BlockPool(allocId, nullptr, &pool);
+ if (err == OK) {
+ sPools.emplace(allocId, pool);
+ } else {
+ pool.reset();
+ }
+ } else {
+ pool = it->second;
+ }
+ return pool;
+}
+
+// static
+std::shared_ptr<C2LinearBlock> CCodec::FetchLinearBlock(
+ size_t capacity, const C2MemoryUsage &usage, const std::vector<std::string> &names) {
+ uint64_t minUsage = usage.expected;
+ uint64_t maxUsage = ~0ull;
+ std::set<C2Allocator::id_t> allocators;
+ GetCommonAllocatorIds(names, C2Allocator::LINEAR, &allocators);
+ if (allocators.empty()) {
+ allocators.insert(C2PlatformAllocatorStore::DEFAULT_LINEAR);
+ }
+ CalculateMinMaxUsage(names, &minUsage, &maxUsage);
+ if ((maxUsage & minUsage) != minUsage) {
+ allocators.clear();
+ allocators.insert(C2PlatformAllocatorStore::DEFAULT_LINEAR);
+ }
+ std::shared_ptr<C2LinearBlock> block;
+ for (C2Allocator::id_t allocId : allocators) {
+ std::shared_ptr<C2BlockPool> pool = GetPool(allocId);
+ if (!pool) {
+ continue;
+ }
+ c2_status_t err = pool->fetchLinearBlock(capacity, C2MemoryUsage{minUsage}, &block);
+ if (err != C2_OK || !block) {
+ block.reset();
+ continue;
+ }
+ break;
+ }
+ return block;
+}
+
+// static
+status_t CCodec::CanFetchGraphicBlock(
+ const std::vector<std::string> &names, bool *isCompatible) {
+ uint64_t minUsage = 0;
+ uint64_t maxUsage = ~0ull;
+ std::set<C2Allocator::id_t> allocators;
+ GetCommonAllocatorIds(names, C2Allocator::GRAPHIC, &allocators);
+ if (allocators.empty()) {
+ *isCompatible = false;
+ return OK;
+ }
+ CalculateMinMaxUsage(names, &minUsage, &maxUsage);
+ *isCompatible = ((maxUsage & minUsage) == minUsage);
+ return OK;
+}
+
+// static
+std::shared_ptr<C2GraphicBlock> CCodec::FetchGraphicBlock(
+ int32_t width,
+ int32_t height,
+ int32_t format,
+ uint64_t usage,
+ const std::vector<std::string> &names) {
+ uint32_t halPixelFormat = HAL_PIXEL_FORMAT_YCBCR_420_888;
+ if (!C2Mapper::mapPixelFormatFrameworkToCodec(format, &halPixelFormat)) {
+ ALOGD("Unrecognized pixel format: %d", format);
+ return nullptr;
+ }
+ uint64_t minUsage = 0;
+ uint64_t maxUsage = ~0ull;
+ std::set<C2Allocator::id_t> allocators;
+ GetCommonAllocatorIds(names, C2Allocator::GRAPHIC, &allocators);
+ if (allocators.empty()) {
+ allocators.insert(C2PlatformAllocatorStore::DEFAULT_GRAPHIC);
+ }
+ CalculateMinMaxUsage(names, &minUsage, &maxUsage);
+ minUsage |= usage;
+ if ((maxUsage & minUsage) != minUsage) {
+ allocators.clear();
+ allocators.insert(C2PlatformAllocatorStore::DEFAULT_GRAPHIC);
+ }
+ std::shared_ptr<C2GraphicBlock> block;
+ for (C2Allocator::id_t allocId : allocators) {
+ std::shared_ptr<C2BlockPool> pool;
+ c2_status_t err = CreateCodec2BlockPool(allocId, nullptr, &pool);
+ if (err != C2_OK || !pool) {
+ continue;
+ }
+ err = pool->fetchGraphicBlock(
+ width, height, halPixelFormat, C2MemoryUsage{minUsage}, &block);
+ if (err != C2_OK || !block) {
+ block.reset();
+ continue;
+ }
+ break;
+ }
+ return block;
+}
+
} // namespace android
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 3218491..ae95336 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -29,6 +29,7 @@
#include <android/hardware/cas/native/1.0/IDescrambler.h>
#include <android/hardware/drm/1.0/types.h>
#include <android-base/stringprintf.h>
+#include <binder/MemoryBase.h>
#include <binder/MemoryDealer.h>
#include <cutils/properties.h>
#include <gui/Surface.h>
@@ -249,7 +250,7 @@
}
CCodecBufferChannel::~CCodecBufferChannel() {
- if (mCrypto != nullptr && mDealer != nullptr && mHeapSeqNum >= 0) {
+ if (mCrypto != nullptr && mHeapSeqNum >= 0) {
mCrypto->unsetHeap(mHeapSeqNum);
}
}
@@ -409,6 +410,173 @@
return OK;
}
+status_t CCodecBufferChannel::attachBuffer(
+ const std::shared_ptr<C2Buffer> &c2Buffer,
+ const sp<MediaCodecBuffer> &buffer) {
+ if (!buffer->copy(c2Buffer)) {
+ return -ENOSYS;
+ }
+ return OK;
+}
+
+void CCodecBufferChannel::ensureDecryptDestination(size_t size) {
+ if (!mDecryptDestination || mDecryptDestination->size() < size) {
+ sp<IMemoryHeap> heap{new MemoryHeapBase(size * 2)};
+ if (mDecryptDestination && mCrypto && mHeapSeqNum >= 0) {
+ mCrypto->unsetHeap(mHeapSeqNum);
+ }
+ mDecryptDestination = new MemoryBase(heap, 0, size * 2);
+ if (mCrypto) {
+ mHeapSeqNum = mCrypto->setHeap(hardware::fromHeap(heap));
+ }
+ }
+}
+
+int32_t CCodecBufferChannel::getHeapSeqNum(const sp<HidlMemory> &memory) {
+ CHECK(mCrypto);
+ auto it = mHeapSeqNumMap.find(memory);
+ int32_t heapSeqNum = -1;
+ if (it == mHeapSeqNumMap.end()) {
+ heapSeqNum = mCrypto->setHeap(memory);
+ mHeapSeqNumMap.emplace(memory, heapSeqNum);
+ } else {
+ heapSeqNum = it->second;
+ }
+ return heapSeqNum;
+}
+
+status_t CCodecBufferChannel::attachEncryptedBuffer(
+ const sp<hardware::HidlMemory> &memory,
+ bool secure,
+ const uint8_t *key,
+ const uint8_t *iv,
+ CryptoPlugin::Mode mode,
+ CryptoPlugin::Pattern pattern,
+ size_t offset,
+ const CryptoPlugin::SubSample *subSamples,
+ size_t numSubSamples,
+ const sp<MediaCodecBuffer> &buffer) {
+ static const C2MemoryUsage kSecureUsage{C2MemoryUsage::READ_PROTECTED, 0};
+ static const C2MemoryUsage kDefaultReadWriteUsage{
+ C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+
+ size_t size = 0;
+ for (size_t i = 0; i < numSubSamples; ++i) {
+ size += subSamples[i].mNumBytesOfClearData + subSamples[i].mNumBytesOfEncryptedData;
+ }
+ std::shared_ptr<C2BlockPool> pool = mBlockPools.lock()->inputPool;
+ std::shared_ptr<C2LinearBlock> block;
+ c2_status_t err = pool->fetchLinearBlock(
+ size,
+ secure ? kSecureUsage : kDefaultReadWriteUsage,
+ &block);
+ if (err != C2_OK) {
+ return NO_MEMORY;
+ }
+ if (!secure) {
+ ensureDecryptDestination(size);
+ }
+ ssize_t result = -1;
+ ssize_t codecDataOffset = 0;
+ if (mCrypto) {
+ AString errorDetailMsg;
+ int32_t heapSeqNum = getHeapSeqNum(memory);
+ hardware::drm::V1_0::SharedBuffer src{(uint32_t)heapSeqNum, offset, size};
+ hardware::drm::V1_0::DestinationBuffer dst;
+ if (secure) {
+ dst.type = DrmBufferType::NATIVE_HANDLE;
+ dst.secureMemory = hardware::hidl_handle(block->handle());
+ } else {
+ dst.type = DrmBufferType::SHARED_MEMORY;
+ IMemoryToSharedBuffer(
+ mDecryptDestination, mHeapSeqNum, &dst.nonsecureMemory);
+ }
+ result = mCrypto->decrypt(
+ key, iv, mode, pattern, src, 0, subSamples, numSubSamples,
+ dst, &errorDetailMsg);
+ if (result < 0) {
+ return result;
+ }
+ if (dst.type == DrmBufferType::SHARED_MEMORY) {
+ C2WriteView view = block->map().get();
+ if (view.error() != C2_OK) {
+ return false;
+ }
+ if (view.size() < result) {
+ return false;
+ }
+ memcpy(view.data(), mDecryptDestination->unsecurePointer(), result);
+ }
+ } else {
+ // Here we cast CryptoPlugin::SubSample to hardware::cas::native::V1_0::SubSample
+ // directly, the structure definitions should match as checked in DescramblerImpl.cpp.
+ hidl_vec<SubSample> hidlSubSamples;
+ hidlSubSamples.setToExternal((SubSample *)subSamples, numSubSamples, false /*own*/);
+
+ hardware::cas::native::V1_0::SharedBuffer src{*memory, offset, size};
+ hardware::cas::native::V1_0::DestinationBuffer dst;
+ if (secure) {
+ dst.type = BufferType::NATIVE_HANDLE;
+ dst.secureMemory = hardware::hidl_handle(block->handle());
+ } else {
+ dst.type = BufferType::SHARED_MEMORY;
+ dst.nonsecureMemory = src;
+ }
+
+ CasStatus status = CasStatus::OK;
+ hidl_string detailedError;
+ ScramblingControl sctrl = ScramblingControl::UNSCRAMBLED;
+
+ if (key != nullptr) {
+ sctrl = (ScramblingControl)key[0];
+ // Adjust for the PES offset
+ codecDataOffset = key[2] | (key[3] << 8);
+ }
+
+ auto returnVoid = mDescrambler->descramble(
+ sctrl,
+ hidlSubSamples,
+ src,
+ 0,
+ dst,
+ 0,
+ [&status, &result, &detailedError] (
+ CasStatus _status, uint32_t _bytesWritten,
+ const hidl_string& _detailedError) {
+ status = _status;
+ result = (ssize_t)_bytesWritten;
+ detailedError = _detailedError;
+ });
+
+ if (!returnVoid.isOk() || status != CasStatus::OK || result < 0) {
+ ALOGI("[%s] descramble failed, trans=%s, status=%d, result=%zd",
+ mName, returnVoid.description().c_str(), status, result);
+ return UNKNOWN_ERROR;
+ }
+
+ if (result < codecDataOffset) {
+ ALOGD("invalid codec data offset: %zd, result %zd", codecDataOffset, result);
+ return BAD_VALUE;
+ }
+ }
+ if (!secure) {
+ C2WriteView view = block->map().get();
+ if (view.error() != C2_OK) {
+ return UNKNOWN_ERROR;
+ }
+ if (view.size() < result) {
+ return UNKNOWN_ERROR;
+ }
+ memcpy(view.data(), mDecryptDestination->unsecurePointer(), result);
+ }
+ std::shared_ptr<C2Buffer> c2Buffer{C2Buffer::CreateLinearBuffer(
+ block->share(codecDataOffset, result - codecDataOffset, C2Fence{}))};
+ if (!buffer->copy(c2Buffer)) {
+ return -ENOSYS;
+ }
+ return OK;
+}
+
status_t CCodecBufferChannel::queueInputBuffer(const sp<MediaCodecBuffer> &buffer) {
QueueGuard guard(mSync);
if (!guard.isRunning()) {
@@ -774,7 +942,9 @@
}
status_t CCodecBufferChannel::start(
- const sp<AMessage> &inputFormat, const sp<AMessage> &outputFormat) {
+ const sp<AMessage> &inputFormat,
+ const sp<AMessage> &outputFormat,
+ bool buffersBoundToCodec) {
C2StreamBufferTypeSetting::input iStreamFormat(0u);
C2StreamBufferTypeSetting::output oStreamFormat(0u);
C2PortReorderBufferDepthTuning::output reorderDepth;
@@ -897,7 +1067,9 @@
input->numSlots = numInputSlots;
input->extraBuffers.flush();
input->numExtraSlots = 0u;
- if (graphic) {
+ if (!buffersBoundToCodec) {
+ input->buffers.reset(new SlotInputBuffers(mName));
+ } else if (graphic) {
if (mInputSurface) {
input->buffers.reset(new DummyInputBuffers(mName));
} else if (mMetaMode == MODE_ANW) {
@@ -1071,7 +1243,7 @@
output->outputDelay = outputDelayValue;
output->numSlots = numOutputSlots;
if (graphic) {
- if (outputSurface) {
+ if (outputSurface || !buffersBoundToCodec) {
output->buffers.reset(new GraphicOutputBuffers(mName));
} else {
output->buffers.reset(new RawGraphicOutputBuffers(numOutputSlots, mName));
@@ -1669,6 +1841,16 @@
}
void CCodecBufferChannel::setCrypto(const sp<ICrypto> &crypto) {
+ if (mCrypto != nullptr) {
+ for (std::pair<wp<HidlMemory>, int32_t> entry : mHeapSeqNumMap) {
+ mCrypto->unsetHeap(entry.second);
+ }
+ mHeapSeqNumMap.clear();
+ if (mHeapSeqNum >= 0) {
+ mCrypto->unsetHeap(mHeapSeqNum);
+ mHeapSeqNum = -1;
+ }
+ }
mCrypto = crypto;
}
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index 82fec18..0263211 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -70,6 +70,20 @@
const CryptoPlugin::SubSample *subSamples,
size_t numSubSamples,
AString *errorDetailMsg) override;
+ virtual status_t attachBuffer(
+ const std::shared_ptr<C2Buffer> &c2Buffer,
+ const sp<MediaCodecBuffer> &buffer) override;
+ virtual status_t attachEncryptedBuffer(
+ const sp<hardware::HidlMemory> &memory,
+ bool secure,
+ const uint8_t *key,
+ const uint8_t *iv,
+ CryptoPlugin::Mode mode,
+ CryptoPlugin::Pattern pattern,
+ size_t offset,
+ const CryptoPlugin::SubSample *subSamples,
+ size_t numSubSamples,
+ const sp<MediaCodecBuffer> &buffer) override;
virtual status_t renderOutputBuffer(
const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) override;
virtual status_t discardBuffer(const sp<MediaCodecBuffer> &buffer) override;
@@ -108,7 +122,10 @@
* Start queueing buffers to the component. This object should never queue
* buffers before this call has completed.
*/
- status_t start(const sp<AMessage> &inputFormat, const sp<AMessage> &outputFormat);
+ status_t start(
+ const sp<AMessage> &inputFormat,
+ const sp<AMessage> &outputFormat,
+ bool buffersBoundToCodec);
/**
* Request initial input buffers to be filled by client.
@@ -216,11 +233,14 @@
std::unique_ptr<C2Work> work, const sp<AMessage> &outputFormat,
const C2StreamInitDataInfo::output *initData);
void sendOutputBuffers();
+ void ensureDecryptDestination(size_t size);
+ int32_t getHeapSeqNum(const sp<hardware::HidlMemory> &memory);
QueueSync mSync;
sp<MemoryDealer> mDealer;
sp<IMemory> mDecryptDestination;
int32_t mHeapSeqNum;
+ std::map<wp<hardware::HidlMemory>, int32_t> mHeapSeqNumMap;
std::shared_ptr<Codec2Client::Component> mComponent;
std::string mComponentName; ///< component name for debugging
diff --git a/media/codec2/sfplugin/CCodecBuffers.cpp b/media/codec2/sfplugin/CCodecBuffers.cpp
index 2a04810..265eeb7 100644
--- a/media/codec2/sfplugin/CCodecBuffers.cpp
+++ b/media/codec2/sfplugin/CCodecBuffers.cpp
@@ -494,6 +494,44 @@
return mAllocate();
}
+// SlotInputBuffers
+
+bool SlotInputBuffers::requestNewBuffer(size_t *index, sp<MediaCodecBuffer> *buffer) {
+ sp<Codec2Buffer> newBuffer = createNewBuffer();
+ *index = mImpl.assignSlot(newBuffer);
+ *buffer = newBuffer;
+ return true;
+}
+
+bool SlotInputBuffers::releaseBuffer(
+ const sp<MediaCodecBuffer> &buffer,
+ std::shared_ptr<C2Buffer> *c2buffer,
+ bool release) {
+ return mImpl.releaseSlot(buffer, c2buffer, release);
+}
+
+bool SlotInputBuffers::expireComponentBuffer(
+ const std::shared_ptr<C2Buffer> &c2buffer) {
+ return mImpl.expireComponentBuffer(c2buffer);
+}
+
+void SlotInputBuffers::flush() {
+ mImpl.flush();
+}
+
+std::unique_ptr<InputBuffers> SlotInputBuffers::toArrayMode(size_t) {
+ TRESPASS("Array mode should not be called at non-legacy mode");
+ return nullptr;
+}
+
+size_t SlotInputBuffers::numClientBuffers() const {
+ return mImpl.numClientBuffers();
+}
+
+sp<Codec2Buffer> SlotInputBuffers::createNewBuffer() {
+ return new DummyContainerBuffer{mFormat, nullptr};
+}
+
// LinearInputBuffers
bool LinearInputBuffers::requestNewBuffer(size_t *index, sp<MediaCodecBuffer> *buffer) {
diff --git a/media/codec2/sfplugin/CCodecBuffers.h b/media/codec2/sfplugin/CCodecBuffers.h
index ad972ce..bae08e0 100644
--- a/media/codec2/sfplugin/CCodecBuffers.h
+++ b/media/codec2/sfplugin/CCodecBuffers.h
@@ -547,6 +547,36 @@
std::function<sp<Codec2Buffer>()> mAllocate;
};
+class SlotInputBuffers : public InputBuffers {
+public:
+ SlotInputBuffers(const char *componentName, const char *name = "Slot-Input")
+ : InputBuffers(componentName, name),
+ mImpl(mName) { }
+ ~SlotInputBuffers() override = default;
+
+ bool requestNewBuffer(size_t *index, sp<MediaCodecBuffer> *buffer) final;
+
+ bool releaseBuffer(
+ const sp<MediaCodecBuffer> &buffer,
+ std::shared_ptr<C2Buffer> *c2buffer,
+ bool release) final;
+
+ bool expireComponentBuffer(
+ const std::shared_ptr<C2Buffer> &c2buffer) final;
+
+ void flush() final;
+
+ std::unique_ptr<InputBuffers> toArrayMode(size_t size) final;
+
+ size_t numClientBuffers() const final;
+
+protected:
+ sp<Codec2Buffer> createNewBuffer() final;
+
+private:
+ FlexBuffersImpl mImpl;
+};
+
class LinearInputBuffers : public InputBuffers {
public:
LinearInputBuffers(const char *componentName, const char *name = "1D-Input")
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index ee3cdf6..d2f5ea7 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -595,34 +595,18 @@
.withMappers([](C2Value v) -> C2Value {
int32_t value;
if (v.get(&value)) {
- switch (value) {
- case COLOR_FormatSurface:
- return (uint32_t)HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
- case COLOR_FormatYUV420Flexible:
- return (uint32_t)HAL_PIXEL_FORMAT_YCBCR_420_888;
- case COLOR_FormatYUV420Planar:
- case COLOR_FormatYUV420SemiPlanar:
- case COLOR_FormatYUV420PackedPlanar:
- case COLOR_FormatYUV420PackedSemiPlanar:
- return (uint32_t)HAL_PIXEL_FORMAT_YV12;
- default:
- // TODO: support some sort of passthrough
- break;
+ uint32_t result;
+ if (C2Mapper::mapPixelFormatFrameworkToCodec(value, &result)) {
+ return result;
}
}
return C2Value();
}, [](C2Value v) -> C2Value {
uint32_t value;
if (v.get(&value)) {
- switch (value) {
- case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
- return COLOR_FormatSurface;
- case HAL_PIXEL_FORMAT_YV12:
- case HAL_PIXEL_FORMAT_YCBCR_420_888:
- return COLOR_FormatYUV420Flexible;
- default:
- // TODO: support some sort of passthrough
- break;
+ int32_t result;
+ if (C2Mapper::mapPixelFormatCodecToFramework(value, &result)) {
+ return result;
}
}
return C2Value();
diff --git a/media/codec2/sfplugin/CCodecConfig.h b/media/codec2/sfplugin/CCodecConfig.h
index a61c8b7..093bfdd 100644
--- a/media/codec2/sfplugin/CCodecConfig.h
+++ b/media/codec2/sfplugin/CCodecConfig.h
@@ -118,6 +118,7 @@
sp<AMessage> mOutputFormat;
bool mUsingSurface; ///< using input or output surface
+ bool mBuffersBoundToCodec; ///< whether buffers are bound to codecs or not.
std::shared_ptr<InputSurfaceWrapper> mInputSurface;
std::unique_ptr<InputSurfaceWrapper::Config> mISConfig;
diff --git a/media/codec2/sfplugin/Codec2Buffer.h b/media/codec2/sfplugin/Codec2Buffer.h
index 9291c52..09475ef 100644
--- a/media/codec2/sfplugin/Codec2Buffer.h
+++ b/media/codec2/sfplugin/Codec2Buffer.h
@@ -57,36 +57,6 @@
using MediaCodecBuffer::MediaCodecBuffer;
~Codec2Buffer() override = default;
- /**
- * \return C2Buffer object represents this buffer.
- */
- virtual std::shared_ptr<C2Buffer> asC2Buffer() = 0;
-
- /**
- * Test if we can copy the content of |buffer| into this object.
- *
- * \param buffer C2Buffer object to copy.
- * \return true if the content of buffer can be copied over to this buffer
- * false otherwise.
- */
- virtual bool canCopy(const std::shared_ptr<C2Buffer> &buffer) const {
- (void)buffer;
- return false;
- }
-
- /**
- * Copy the content of |buffer| into this object. This method assumes that
- * canCopy() check already passed.
- *
- * \param buffer C2Buffer object to copy.
- * \return true if successful
- * false otherwise.
- */
- virtual bool copy(const std::shared_ptr<C2Buffer> &buffer) {
- (void)buffer;
- return false;
- }
-
sp<ABuffer> getImageData() const { return mImageData; }
protected:
diff --git a/media/codec2/sfplugin/include/media/stagefright/CCodec.h b/media/codec2/sfplugin/include/media/stagefright/CCodec.h
index 30dc945..6ff2c4a 100644
--- a/media/codec2/sfplugin/include/media/stagefright/CCodec.h
+++ b/media/codec2/sfplugin/include/media/stagefright/CCodec.h
@@ -70,6 +70,22 @@
static PersistentSurface *CreateInputSurface();
+ static status_t CanFetchLinearBlock(
+ const std::vector<std::string> &names, const C2MemoryUsage &usage, bool *isCompatible);
+
+ static std::shared_ptr<C2LinearBlock> FetchLinearBlock(
+ size_t capacity, const C2MemoryUsage &usage, const std::vector<std::string> &names);
+
+ static status_t CanFetchGraphicBlock(
+ const std::vector<std::string> &names, bool *isCompatible);
+
+ static std::shared_ptr<C2GraphicBlock> FetchGraphicBlock(
+ int32_t width,
+ int32_t height,
+ int32_t format,
+ uint64_t usage,
+ const std::vector<std::string> &names);
+
protected:
virtual ~CCodec();
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.cpp b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
index 2f3d688..903db6c 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.cpp
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
@@ -949,3 +949,41 @@
bool C2Mapper::map(ColorAspects::Transfer from, C2Color::transfer_t *to) {
return sColorTransfersSf.map(from, to);
}
+
+// static
+bool C2Mapper::mapPixelFormatFrameworkToCodec(
+ int32_t frameworkValue, uint32_t *c2Value) {
+ switch (frameworkValue) {
+ case COLOR_FormatSurface:
+ *c2Value = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ return true;
+ case COLOR_FormatYUV420Flexible:
+ *c2Value = HAL_PIXEL_FORMAT_YCBCR_420_888;
+ return true;
+ case COLOR_FormatYUV420Planar:
+ case COLOR_FormatYUV420SemiPlanar:
+ case COLOR_FormatYUV420PackedPlanar:
+ case COLOR_FormatYUV420PackedSemiPlanar:
+ *c2Value = HAL_PIXEL_FORMAT_YV12;
+ return true;
+ default:
+ // TODO: support some sort of passthrough
+ return false;
+ }
+}
+
+// static
+bool C2Mapper::mapPixelFormatCodecToFramework(
+ uint32_t c2Value, int32_t *frameworkValue) {
+ switch (c2Value) {
+ case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+ *frameworkValue = COLOR_FormatSurface;
+ return true;
+ case HAL_PIXEL_FORMAT_YV12:
+ case HAL_PIXEL_FORMAT_YCBCR_420_888:
+ *frameworkValue = COLOR_FormatYUV420Flexible;
+ return true;
+ default:
+ return false;
+ }
+}
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.h b/media/codec2/sfplugin/utils/Codec2Mapper.h
index cec6f07..797c8a8 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.h
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.h
@@ -75,6 +75,11 @@
static bool map(ColorAspects::MatrixCoeffs, C2Color::matrix_t*);
static bool map(C2Color::transfer_t, ColorAspects::Transfer*);
static bool map(ColorAspects::Transfer, C2Color::transfer_t*);
+
+ static bool mapPixelFormatFrameworkToCodec(
+ int32_t frameworkValue, uint32_t *c2Value);
+ static bool mapPixelFormatCodecToFramework(
+ uint32_t c2Value, int32_t *frameworkValue);
};
}
diff --git a/media/codec2/vndk/C2AllocatorIon.cpp b/media/codec2/vndk/C2AllocatorIon.cpp
index 0470a31..dfc90b1 100644
--- a/media/codec2/vndk/C2AllocatorIon.cpp
+++ b/media/codec2/vndk/C2AllocatorIon.cpp
@@ -262,7 +262,7 @@
*fence = C2Fence(); // not using fences
}
(void)mMappings.erase(it);
- ALOGV("successfully unmapped: %d", mHandle.bufferFd());
+ ALOGV("successfully unmapped: addr=%p size=%zu fd=%d", addr, size, mHandle.bufferFd());
return C2_OK;
}
ALOGD("unmap failed to find specified map");
diff --git a/media/extractors/Android.bp b/media/extractors/Android.bp
new file mode 100644
index 0000000..bb42580
--- /dev/null
+++ b/media/extractors/Android.bp
@@ -0,0 +1,46 @@
+// Copyright (C) 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+cc_defaults {
+ name: "extractor-defaults",
+
+ include_dirs: [
+ "frameworks/av/media/libstagefright/include",
+ ],
+
+ shared_libs: [
+ "liblog",
+ "libmediandk#29",
+ ],
+
+ relative_install_path: "extractors",
+
+ compile_multilib: "first",
+
+ cflags: [
+ "-Werror",
+ "-Wall",
+ "-fvisibility=hidden",
+ ],
+
+ version_script: "exports.lds",
+
+ sanitize: {
+ cfi: true,
+ misc_undefined: [
+ "unsigned-integer-overflow",
+ "signed-integer-overflow",
+ ],
+ },
+}
\ No newline at end of file
diff --git a/media/extractors/aac/Android.bp b/media/extractors/aac/Android.bp
index 53b394f..60d3ae1 100644
--- a/media/extractors/aac/Android.bp
+++ b/media/extractors/aac/Android.bp
@@ -1,40 +1,13 @@
cc_library {
+ name: "libaacextractor",
+ defaults: ["extractor-defaults"],
srcs: ["AACExtractor.cpp"],
- include_dirs: [
- "frameworks/av/media/libstagefright/",
- ],
-
- shared_libs: [
- "liblog",
- "libmediandk",
- ],
-
static_libs: [
"libstagefright_foundation",
"libstagefright_metadatautils",
"libutils",
],
- name: "libaacextractor",
- relative_install_path: "extractors",
-
- compile_multilib: "first",
-
- cflags: [
- "-Werror",
- "-Wall",
- "-fvisibility=hidden",
- ],
- version_script: "exports.lds",
-
- sanitize: {
- cfi: true,
- misc_undefined: [
- "unsigned-integer-overflow",
- "signed-integer-overflow",
- ],
- },
-
}
diff --git a/media/extractors/amr/Android.bp b/media/extractors/amr/Android.bp
index cd76062..49c9567 100644
--- a/media/extractors/amr/Android.bp
+++ b/media/extractors/amr/Android.bp
@@ -1,38 +1,11 @@
cc_library {
+ name: "libamrextractor",
+ defaults: ["extractor-defaults"],
srcs: ["AMRExtractor.cpp"],
- include_dirs: [
- "frameworks/av/media/libstagefright/include",
- ],
-
- shared_libs: [
- "liblog",
- "libmediandk",
- ],
-
static_libs: [
"libstagefright_foundation",
],
- name: "libamrextractor",
- relative_install_path: "extractors",
-
- compile_multilib: "first",
-
- cflags: [
- "-Werror",
- "-Wall",
- "-fvisibility=hidden",
- ],
- version_script: "exports.lds",
-
- sanitize: {
- cfi: true,
- misc_undefined: [
- "unsigned-integer-overflow",
- "signed-integer-overflow",
- ],
- },
-
}
diff --git a/media/extractors/flac/Android.bp b/media/extractors/flac/Android.bp
index c669b34..3675611 100644
--- a/media/extractors/flac/Android.bp
+++ b/media/extractors/flac/Android.bp
@@ -1,16 +1,15 @@
cc_library {
+ name: "libflacextractor",
+ defaults: ["extractor-defaults"],
srcs: ["FLACExtractor.cpp"],
include_dirs: [
- "frameworks/av/media/libstagefright/include",
"external/flac/include",
],
shared_libs: [
"libbinder_ndk",
- "liblog",
- "libmediandk",
],
static_libs: [
@@ -21,24 +20,4 @@
"libutils",
],
- name: "libflacextractor",
- relative_install_path: "extractors",
-
- compile_multilib: "first",
-
- cflags: [
- "-Werror",
- "-Wall",
- "-fvisibility=hidden",
- ],
- version_script: "exports.lds",
-
- sanitize: {
- cfi: true,
- misc_undefined: [
- "unsigned-integer-overflow",
- "signed-integer-overflow",
- ],
- },
-
}
diff --git a/media/extractors/midi/Android.bp b/media/extractors/midi/Android.bp
index 40c91e7..592ffa9 100644
--- a/media/extractors/midi/Android.bp
+++ b/media/extractors/midi/Android.bp
@@ -1,43 +1,18 @@
cc_library {
+ name: "libmidiextractor",
+ defaults: ["extractor-defaults"],
srcs: ["MidiExtractor.cpp"],
- include_dirs: [
- "frameworks/av/media/libstagefright/include",
- ],
-
header_libs: [
"libmedia_headers",
],
- shared_libs: [
- "liblog",
- "libmediandk",
- ],
-
static_libs: [
"libmedia_midiiowrapper",
"libsonivox",
"libstagefright_foundation"
],
- name: "libmidiextractor",
- relative_install_path: "extractors",
- compile_multilib: "first",
-
- cflags: [
- "-Werror",
- "-Wall",
- "-fvisibility=hidden",
- ],
- version_script: "exports.lds",
-
- sanitize: {
- cfi: true,
- misc_undefined: [
- "unsigned-integer-overflow",
- "signed-integer-overflow",
- ],
- },
}
diff --git a/media/extractors/mkv/Android.bp b/media/extractors/mkv/Android.bp
index 942c88a..7ad8cc1 100644
--- a/media/extractors/mkv/Android.bp
+++ b/media/extractors/mkv/Android.bp
@@ -1,4 +1,6 @@
cc_library {
+ name: "libmkvextractor",
+ defaults: ["extractor-defaults"],
srcs: ["MatroskaExtractor.cpp"],
@@ -6,12 +8,9 @@
"external/flac/include",
"external/libvpx/libwebm",
"frameworks/av/media/libstagefright/flac/dec",
- "frameworks/av/media/libstagefright/include",
],
shared_libs: [
- "liblog",
- "libmediandk",
"libstagefright_flacdec",
],
@@ -22,24 +21,4 @@
"libutils",
],
- name: "libmkvextractor",
- relative_install_path: "extractors",
-
- compile_multilib: "first",
-
- cflags: [
- "-Werror",
- "-Wall",
- "-fvisibility=hidden",
- ],
- version_script: "exports.lds",
-
- sanitize: {
- cfi: true,
- misc_undefined: [
- "unsigned-integer-overflow",
- "signed-integer-overflow",
- ],
- },
-
}
diff --git a/media/extractors/mp3/Android.bp b/media/extractors/mp3/Android.bp
index 6f02b0f..102ac81 100644
--- a/media/extractors/mp3/Android.bp
+++ b/media/extractors/mp3/Android.bp
@@ -1,44 +1,16 @@
cc_library {
-
+ name: "libmp3extractor",
+ defaults: ["extractor-defaults"],
srcs: [
"MP3Extractor.cpp",
"VBRISeeker.cpp",
"XINGSeeker.cpp",
],
- include_dirs: [
- "frameworks/av/media/libstagefright/include",
- ],
-
- shared_libs: [
- "liblog",
- "libmediandk",
- ],
-
static_libs: [
"libutils",
"libstagefright_id3",
"libstagefright_foundation",
],
- name: "libmp3extractor",
- relative_install_path: "extractors",
-
- compile_multilib: "first",
-
- cflags: [
- "-Werror",
- "-Wall",
- "-fvisibility=hidden",
- ],
- version_script: "exports.lds",
-
- sanitize: {
- cfi: true,
- misc_undefined: [
- "unsigned-integer-overflow",
- "signed-integer-overflow",
- ],
- },
-
}
diff --git a/media/extractors/mp4/Android.bp b/media/extractors/mp4/Android.bp
index d9f11fc..e48e1b7 100644
--- a/media/extractors/mp4/Android.bp
+++ b/media/extractors/mp4/Android.bp
@@ -1,5 +1,6 @@
-cc_defaults {
- name: "libmp4extractor_defaults",
+cc_library {
+ name: "libmp4extractor",
+ defaults: ["extractor-defaults"],
srcs: [
"AC4Parser.cpp",
@@ -9,50 +10,10 @@
"SampleTable.cpp",
],
- include_dirs: [
- "frameworks/av/media/libstagefright/",
- ],
-
- shared_libs: [
- "liblog",
- "libmediandk"
- ],
-
static_libs: [
"libstagefright_esds",
"libstagefright_foundation",
"libstagefright_id3",
"libutils",
],
-
- cflags: [
- "-Werror",
- "-Wall",
- "-fvisibility=hidden",
- ],
- version_script: "exports.lds",
- relative_install_path: "extractors",
- compile_multilib: "first",
-}
-
-cc_library {
-
-
- name: "libmp4extractor",
- defaults: ["libmp4extractor_defaults"],
-
- sanitize: {
- cfi: true,
- misc_undefined: [
- "unsigned-integer-overflow",
- "signed-integer-overflow",
- ],
- },
-
-}
-
-cc_library_static {
- name: "libmp4extractor_fuzzing",
-
- defaults: ["libmp4extractor_defaults"],
}
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
index 9e0bc96..c8079eb 100755
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -31,8 +31,9 @@
#include "MPEG4Extractor.h"
#include "SampleTable.h"
#include "ItemTable.h"
-#include "include/ESDS.h"
+#include <ESDS.h>
+#include <ID3.h>
#include <media/stagefright/DataSourceBase.h>
#include <media/ExtractorUtils.h>
#include <media/stagefright/foundation/ABitReader.h>
@@ -52,7 +53,6 @@
#include <utils/String8.h>
#include <byteswap.h>
-#include "include/ID3.h"
#ifndef UINT32_MAX
#define UINT32_MAX (4294967295U)
diff --git a/media/extractors/mpeg2/Android.bp b/media/extractors/mpeg2/Android.bp
index 8d9145d..ef8431e 100644
--- a/media/extractors/mpeg2/Android.bp
+++ b/media/extractors/mpeg2/Android.bp
@@ -1,4 +1,7 @@
cc_library {
+ name: "libmpeg2extractor",
+
+ defaults: ["extractor-defaults"],
srcs: [
"ExtractorBundle.cpp",
@@ -6,15 +9,8 @@
"MPEG2TSExtractor.cpp",
],
- include_dirs: [
- "frameworks/av/media/libstagefright",
- "frameworks/av/media/libstagefright/include",
- ],
-
shared_libs: [
"libcgrouprc#29",
- "liblog#10000",
- "libmediandk#29",
"libvndksupport#29",
],
@@ -46,27 +42,6 @@
"libutils",
],
- name: "libmpeg2extractor",
- relative_install_path: "extractors",
-
- compile_multilib: "first",
-
- cflags: [
- "-Werror",
- "-Wall",
- "-fvisibility=hidden",
- ],
- version_script: "exports.lds",
-
- sanitize: {
- // STOPSHIP: turn on cfi once b/139945549 is resolved.
- cfi: false,
- misc_undefined: [
- "unsigned-integer-overflow",
- "signed-integer-overflow",
- ],
- },
-
apex_available: [
"com.android.media",
"test_com.android.media",
diff --git a/media/extractors/mpeg2/MPEG2PSExtractor.cpp b/media/extractors/mpeg2/MPEG2PSExtractor.cpp
index 002a855..d431b05 100644
--- a/media/extractors/mpeg2/MPEG2PSExtractor.cpp
+++ b/media/extractors/mpeg2/MPEG2PSExtractor.cpp
@@ -20,8 +20,8 @@
#include "MPEG2PSExtractor.h"
-#include "mpeg2ts/AnotherPacketSource.h"
-#include "mpeg2ts/ESQueue.h"
+#include <AnotherPacketSource.h>
+#include <ESQueue.h>
#include <media/stagefright/foundation/ABitReader.h>
#include <media/stagefright/foundation/ABuffer.h>
diff --git a/media/extractors/mpeg2/MPEG2TSExtractor.cpp b/media/extractors/mpeg2/MPEG2TSExtractor.cpp
index f4643c5..9e093eb 100644
--- a/media/extractors/mpeg2/MPEG2TSExtractor.cpp
+++ b/media/extractors/mpeg2/MPEG2TSExtractor.cpp
@@ -37,8 +37,7 @@
#include <media/stagefright/Utils.h>
#include <utils/String8.h>
-#include "mpeg2ts/AnotherPacketSource.h"
-#include "mpeg2ts/ATSParser.h"
+#include <AnotherPacketSource.h>
#include <hidl/HybridInterface.h>
#include <android/hardware/cas/1.0/ICas.h>
diff --git a/media/extractors/mpeg2/MPEG2TSExtractor.h b/media/extractors/mpeg2/MPEG2TSExtractor.h
index dcd1e7b..fd77b08 100644
--- a/media/extractors/mpeg2/MPEG2TSExtractor.h
+++ b/media/extractors/mpeg2/MPEG2TSExtractor.h
@@ -27,7 +27,7 @@
#include <utils/KeyedVector.h>
#include <utils/Vector.h>
-#include "mpeg2ts/ATSParser.h"
+#include <ATSParser.h>
namespace android {
diff --git a/media/extractors/ogg/Android.bp b/media/extractors/ogg/Android.bp
index e661b5d..7aed683 100644
--- a/media/extractors/ogg/Android.bp
+++ b/media/extractors/ogg/Android.bp
@@ -1,9 +1,11 @@
cc_library {
+ name: "liboggextractor",
+
+ defaults: ["extractor-defaults"],
srcs: ["OggExtractor.cpp"],
include_dirs: [
- "frameworks/av/media/libstagefright/include",
"external/tremolo",
],
@@ -11,11 +13,6 @@
"libaudio_system_headers",
],
- shared_libs: [
- "liblog",
- "libmediandk",
- ],
-
static_libs: [
"libstagefright_foundation",
"libstagefright_metadatautils",
@@ -23,24 +20,4 @@
"libvorbisidec",
],
- name: "liboggextractor",
- relative_install_path: "extractors",
-
- compile_multilib: "first",
-
- cflags: [
- "-Werror",
- "-Wall",
- "-fvisibility=hidden",
- ],
- version_script: "exports.lds",
-
- sanitize: {
- cfi: true,
- misc_undefined: [
- "unsigned-integer-overflow",
- "signed-integer-overflow",
- ],
- },
-
}
diff --git a/media/extractors/tests/Android.bp b/media/extractors/tests/Android.bp
new file mode 100644
index 0000000..059c308
--- /dev/null
+++ b/media/extractors/tests/Android.bp
@@ -0,0 +1,96 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_test {
+ name: "ExtractorUnitTest",
+ gtest: true,
+
+ srcs: ["ExtractorUnitTest.cpp"],
+
+ static_libs: [
+ "libaacextractor",
+ "libamrextractor",
+ "libmp3extractor",
+ "libwavextractor",
+ "liboggextractor",
+ "libflacextractor",
+ "libmidiextractor",
+ "libmkvextractor",
+ "libmpeg2extractor",
+ "libmp4extractor",
+ "libaudioutils",
+ "libdatasource",
+
+ "libstagefright",
+ "libstagefright_id3",
+ "libstagefright_flacdec",
+ "libstagefright_esds",
+ "libstagefright_mpeg2support",
+ "libstagefright_mpeg2extractor",
+ "libstagefright_foundation",
+ "libstagefright_metadatautils",
+
+ "libmedia_midiiowrapper",
+ "libsonivox",
+ "libvorbisidec",
+ "libwebm",
+ "libFLAC",
+ ],
+
+ shared_libs: [
+ "android.hardware.cas@1.0",
+ "android.hardware.cas.native@1.0",
+ "android.hidl.token@1.0-utils",
+ "android.hidl.allocator@1.0",
+ "libbinder",
+ "libbinder_ndk",
+ "libutils",
+ "liblog",
+ "libcutils",
+ "libmediandk",
+ "libmedia",
+ "libcrypto",
+ "libhidlmemory",
+ "libhidlbase",
+ ],
+
+ include_dirs: [
+ "frameworks/av/media/extractors/",
+ "frameworks/av/media/libstagefright/",
+ ],
+
+ compile_multilib: "first",
+
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
+
+ ldflags: [
+ "-Wl",
+ "-Bsymbolic",
+ // to ignore duplicate symbol: GETEXTRACTORDEF
+ "-z muldefs",
+ ],
+
+ sanitize: {
+ cfi: true,
+ misc_undefined: [
+ "unsigned-integer-overflow",
+ "signed-integer-overflow",
+ ],
+ },
+}
diff --git a/media/extractors/tests/ExtractorUnitTest.cpp b/media/extractors/tests/ExtractorUnitTest.cpp
new file mode 100644
index 0000000..518166e
--- /dev/null
+++ b/media/extractors/tests/ExtractorUnitTest.cpp
@@ -0,0 +1,528 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ExtractorUnitTest"
+#include <utils/Log.h>
+
+#include <datasource/FileSource.h>
+#include <media/stagefright/MediaBufferGroup.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaDataUtils.h>
+
+#include "aac/AACExtractor.h"
+#include "amr/AMRExtractor.h"
+#include "flac/FLACExtractor.h"
+#include "midi/MidiExtractor.h"
+#include "mkv/MatroskaExtractor.h"
+#include "mp3/MP3Extractor.h"
+#include "mp4/MPEG4Extractor.h"
+#include "mp4/SampleTable.h"
+#include "mpeg2/MPEG2PSExtractor.h"
+#include "mpeg2/MPEG2TSExtractor.h"
+#include "ogg/OggExtractor.h"
+#include "wav/WAVExtractor.h"
+
+#include "ExtractorUnitTestEnvironment.h"
+
+using namespace android;
+
+#define OUTPUT_DUMP_FILE "/data/local/tmp/extractorOutput"
+
+constexpr int32_t kMaxCount = 10;
+constexpr int32_t kOpusSeekPreRollUs = 80000; // 80 ms;
+
+static ExtractorUnitTestEnvironment *gEnv = nullptr;
+
+class ExtractorUnitTest : public ::testing::TestWithParam<pair<string, string>> {
+ public:
+ ExtractorUnitTest() : mInputFp(nullptr), mDataSource(nullptr), mExtractor(nullptr) {}
+
+ ~ExtractorUnitTest() {
+ if (mInputFp) {
+ fclose(mInputFp);
+ mInputFp = nullptr;
+ }
+ if (mDataSource) {
+ mDataSource.clear();
+ mDataSource = nullptr;
+ }
+ if (mExtractor) {
+ delete mExtractor;
+ mExtractor = nullptr;
+ }
+ }
+
+ virtual void SetUp() override {
+ mExtractorName = unknown_comp;
+ mDisableTest = false;
+
+ static const std::map<std::string, standardExtractors> mapExtractor = {
+ {"aac", AAC}, {"amr", AMR}, {"mp3", MP3}, {"ogg", OGG},
+ {"wav", WAV}, {"mkv", MKV}, {"flac", FLAC}, {"midi", MIDI},
+ {"mpeg4", MPEG4}, {"mpeg2ts", MPEG2TS}, {"mpeg2ps", MPEG2PS}};
+ // Find the component type
+ string writerFormat = GetParam().first;
+ if (mapExtractor.find(writerFormat) != mapExtractor.end()) {
+ mExtractorName = mapExtractor.at(writerFormat);
+ }
+ if (mExtractorName == standardExtractors::unknown_comp) {
+ cout << "[ WARN ] Test Skipped. Invalid extractor\n";
+ mDisableTest = true;
+ }
+ }
+
+ int32_t setDataSource(string inputFileName);
+
+ int32_t createExtractor();
+
+ enum standardExtractors {
+ AAC,
+ AMR,
+ FLAC,
+ MIDI,
+ MKV,
+ MP3,
+ MPEG4,
+ MPEG2PS,
+ MPEG2TS,
+ OGG,
+ WAV,
+ unknown_comp,
+ };
+
+ bool mDisableTest;
+ standardExtractors mExtractorName;
+
+ FILE *mInputFp;
+ sp<DataSource> mDataSource;
+ MediaExtractorPluginHelper *mExtractor;
+};
+
+int32_t ExtractorUnitTest::setDataSource(string inputFileName) {
+ mInputFp = fopen(inputFileName.c_str(), "rb");
+ if (!mInputFp) {
+ ALOGE("Unable to open input file for reading");
+ return -1;
+ }
+ struct stat buf;
+ stat(inputFileName.c_str(), &buf);
+ int32_t fd = fileno(mInputFp);
+ mDataSource = new FileSource(dup(fd), 0, buf.st_size);
+ if (!mDataSource) return -1;
+ return 0;
+}
+
+int32_t ExtractorUnitTest::createExtractor() {
+ switch (mExtractorName) {
+ case AAC:
+ mExtractor = new AACExtractor(new DataSourceHelper(mDataSource->wrap()), 0);
+ break;
+ case AMR:
+ mExtractor = new AMRExtractor(new DataSourceHelper(mDataSource->wrap()));
+ break;
+ case MP3:
+ mExtractor = new MP3Extractor(new DataSourceHelper(mDataSource->wrap()), nullptr);
+ break;
+ case OGG:
+ mExtractor = new OggExtractor(new DataSourceHelper(mDataSource->wrap()));
+ break;
+ case WAV:
+ mExtractor = new WAVExtractor(new DataSourceHelper(mDataSource->wrap()));
+ break;
+ case MKV:
+ mExtractor = new MatroskaExtractor(new DataSourceHelper(mDataSource->wrap()));
+ break;
+ case FLAC:
+ mExtractor = new FLACExtractor(new DataSourceHelper(mDataSource->wrap()));
+ break;
+ case MPEG4:
+ mExtractor = new MPEG4Extractor(new DataSourceHelper(mDataSource->wrap()));
+ break;
+ case MPEG2TS:
+ mExtractor = new MPEG2TSExtractor(new DataSourceHelper(mDataSource->wrap()));
+ break;
+ case MPEG2PS:
+ mExtractor = new MPEG2PSExtractor(new DataSourceHelper(mDataSource->wrap()));
+ break;
+ case MIDI:
+ mExtractor = new MidiExtractor(mDataSource->wrap());
+ break;
+ default:
+ return -1;
+ }
+ if (!mExtractor) return -1;
+ return 0;
+}
+
+void getSeekablePoints(vector<int64_t> &seekablePoints, MediaTrackHelper *track) {
+ int32_t status = 0;
+ if (!seekablePoints.empty()) {
+ seekablePoints.clear();
+ }
+ int64_t timeStamp;
+ while (status != AMEDIA_ERROR_END_OF_STREAM) {
+ MediaBufferHelper *buffer = nullptr;
+ status = track->read(&buffer);
+ if (buffer) {
+ AMediaFormat *metaData = buffer->meta_data();
+ int32_t isSync = 0;
+ AMediaFormat_getInt32(metaData, AMEDIAFORMAT_KEY_IS_SYNC_FRAME, &isSync);
+ if (isSync) {
+ AMediaFormat_getInt64(metaData, AMEDIAFORMAT_KEY_TIME_US, &timeStamp);
+ seekablePoints.push_back(timeStamp);
+ }
+ buffer->release();
+ }
+ }
+}
+
+TEST_P(ExtractorUnitTest, CreateExtractorTest) {
+ if (mDisableTest) return;
+
+ ALOGV("Checks if a valid extractor is created for a given input file");
+ string inputFileName = gEnv->getRes() + GetParam().second;
+
+ ASSERT_EQ(setDataSource(inputFileName), 0)
+ << "SetDataSource failed for" << GetParam().first << "extractor";
+
+ ASSERT_EQ(createExtractor(), 0)
+ << "Extractor creation failed for" << GetParam().first << "extractor";
+
+ // A valid extractor instace should return success for following calls
+ ASSERT_GT(mExtractor->countTracks(), 0);
+
+ AMediaFormat *format = AMediaFormat_new();
+ ASSERT_NE(format, nullptr) << "AMediaFormat_new returned null AMediaformat";
+
+ ASSERT_EQ(mExtractor->getMetaData(format), AMEDIA_OK);
+ AMediaFormat_delete(format);
+}
+
+TEST_P(ExtractorUnitTest, ExtractorTest) {
+ if (mDisableTest) return;
+
+ ALOGV("Validates %s Extractor for a given input file", GetParam().first.c_str());
+ string inputFileName = gEnv->getRes() + GetParam().second;
+
+ int32_t status = setDataSource(inputFileName);
+ ASSERT_EQ(status, 0) << "SetDataSource failed for" << GetParam().first << "extractor";
+
+ status = createExtractor();
+ ASSERT_EQ(status, 0) << "Extractor creation failed for" << GetParam().first << "extractor";
+
+ int32_t numTracks = mExtractor->countTracks();
+ ASSERT_GT(numTracks, 0) << "Extractor didn't find any track for the given clip";
+
+ for (int32_t idx = 0; idx < numTracks; idx++) {
+ MediaTrackHelper *track = mExtractor->getTrack(idx);
+ ASSERT_NE(track, nullptr) << "Failed to get track for index " << idx;
+
+ CMediaTrack *cTrack = wrap(track);
+ ASSERT_NE(cTrack, nullptr) << "Failed to get track wrapper for index " << idx;
+
+ MediaBufferGroup *bufferGroup = new MediaBufferGroup();
+ status = cTrack->start(track, bufferGroup->wrap());
+ ASSERT_EQ(OK, (media_status_t)status) << "Failed to start the track";
+
+ FILE *outFp = fopen((OUTPUT_DUMP_FILE + to_string(idx)).c_str(), "wb");
+ if (!outFp) {
+ ALOGW("Unable to open output file for dumping extracted stream");
+ }
+
+ while (status != AMEDIA_ERROR_END_OF_STREAM) {
+ MediaBufferHelper *buffer = nullptr;
+ status = track->read(&buffer);
+ ALOGV("track->read Status = %d buffer %p", status, buffer);
+ if (buffer) {
+ ALOGV("buffer->data %p buffer->size() %zu buffer->range_length() %zu",
+ buffer->data(), buffer->size(), buffer->range_length());
+ if (outFp) fwrite(buffer->data(), 1, buffer->range_length(), outFp);
+ buffer->release();
+ }
+ }
+ if (outFp) fclose(outFp);
+ status = cTrack->stop(track);
+ ASSERT_EQ(OK, status) << "Failed to stop the track";
+ delete bufferGroup;
+ delete track;
+ }
+}
+
+TEST_P(ExtractorUnitTest, MetaDataComparisonTest) {
+ if (mDisableTest) return;
+
+ ALOGV("Validates Extractor's meta data for a given input file");
+ string inputFileName = gEnv->getRes() + GetParam().second;
+
+ int32_t status = setDataSource(inputFileName);
+ ASSERT_EQ(status, 0) << "SetDataSource failed for" << GetParam().first << "extractor";
+
+ status = createExtractor();
+ ASSERT_EQ(status, 0) << "Extractor creation failed for" << GetParam().first << "extractor";
+
+ int32_t numTracks = mExtractor->countTracks();
+ ASSERT_GT(numTracks, 0) << "Extractor didn't find any track for the given clip";
+
+ AMediaFormat *extractorFormat = AMediaFormat_new();
+ ASSERT_NE(extractorFormat, nullptr) << "AMediaFormat_new returned null AMediaformat";
+ AMediaFormat *trackFormat = AMediaFormat_new();
+ ASSERT_NE(trackFormat, nullptr) << "AMediaFormat_new returned null AMediaformat";
+
+ for (int32_t idx = 0; idx < numTracks; idx++) {
+ MediaTrackHelper *track = mExtractor->getTrack(idx);
+ ASSERT_NE(track, nullptr) << "Failed to get track for index " << idx;
+
+ CMediaTrack *cTrack = wrap(track);
+ ASSERT_NE(cTrack, nullptr) << "Failed to get track wrapper for index " << idx;
+
+ MediaBufferGroup *bufferGroup = new MediaBufferGroup();
+ status = cTrack->start(track, bufferGroup->wrap());
+ ASSERT_EQ(OK, (media_status_t)status) << "Failed to start the track";
+
+ status = mExtractor->getTrackMetaData(extractorFormat, idx, 1);
+ ASSERT_EQ(OK, (media_status_t)status) << "Failed to get trackMetaData";
+
+ status = track->getFormat(trackFormat);
+ ASSERT_EQ(OK, (media_status_t)status) << "Failed to get track meta data";
+
+ const char *extractorMime, *trackMime;
+ AMediaFormat_getString(extractorFormat, AMEDIAFORMAT_KEY_MIME, &extractorMime);
+ AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &trackMime);
+ ASSERT_TRUE(!strcmp(extractorMime, trackMime))
+ << "Extractor's format doesn't match track format";
+
+ if (!strncmp(extractorMime, "audio/", 6)) {
+ int32_t exSampleRate, exChannelCount;
+ int32_t trackSampleRate, trackChannelCount;
+ ASSERT_TRUE(AMediaFormat_getInt32(extractorFormat, AMEDIAFORMAT_KEY_CHANNEL_COUNT,
+ &exChannelCount));
+ ASSERT_TRUE(AMediaFormat_getInt32(extractorFormat, AMEDIAFORMAT_KEY_SAMPLE_RATE,
+ &exSampleRate));
+ ASSERT_TRUE(AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_CHANNEL_COUNT,
+ &trackChannelCount));
+ ASSERT_TRUE(AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_SAMPLE_RATE,
+ &trackSampleRate));
+ ASSERT_EQ(exChannelCount, trackChannelCount) << "ChannelCount not as expected";
+ ASSERT_EQ(exSampleRate, trackSampleRate) << "SampleRate not as expected";
+ } else {
+ int32_t exWidth, exHeight;
+ int32_t trackWidth, trackHeight;
+ ASSERT_TRUE(AMediaFormat_getInt32(extractorFormat, AMEDIAFORMAT_KEY_WIDTH, &exWidth));
+ ASSERT_TRUE(AMediaFormat_getInt32(extractorFormat, AMEDIAFORMAT_KEY_HEIGHT, &exHeight));
+ ASSERT_TRUE(AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_WIDTH, &trackWidth));
+ ASSERT_TRUE(AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_HEIGHT, &trackHeight));
+ ASSERT_EQ(exWidth, trackWidth) << "Width not as expected";
+ ASSERT_EQ(exHeight, trackHeight) << "Height not as expected";
+ }
+ status = cTrack->stop(track);
+ ASSERT_EQ(OK, status) << "Failed to stop the track";
+ delete bufferGroup;
+ delete track;
+ }
+ AMediaFormat_delete(trackFormat);
+ AMediaFormat_delete(extractorFormat);
+}
+
+TEST_P(ExtractorUnitTest, MultipleStartStopTest) {
+ if (mDisableTest) return;
+
+ ALOGV("Test %s extractor for multiple start and stop calls", GetParam().first.c_str());
+ string inputFileName = gEnv->getRes() + GetParam().second;
+
+ int32_t status = setDataSource(inputFileName);
+ ASSERT_EQ(status, 0) << "SetDataSource failed for" << GetParam().first << "extractor";
+
+ status = createExtractor();
+ ASSERT_EQ(status, 0) << "Extractor creation failed for" << GetParam().first << "extractor";
+
+ int32_t numTracks = mExtractor->countTracks();
+ ASSERT_GT(numTracks, 0) << "Extractor didn't find any track for the given clip";
+
+ // start/stop the tracks multiple times
+ for (int32_t count = 0; count < kMaxCount; count++) {
+ for (int32_t idx = 0; idx < numTracks; idx++) {
+ MediaTrackHelper *track = mExtractor->getTrack(idx);
+ ASSERT_NE(track, nullptr) << "Failed to get track for index " << idx;
+
+ CMediaTrack *cTrack = wrap(track);
+ ASSERT_NE(cTrack, nullptr) << "Failed to get track wrapper for index " << idx;
+
+ MediaBufferGroup *bufferGroup = new MediaBufferGroup();
+ status = cTrack->start(track, bufferGroup->wrap());
+ ASSERT_EQ(OK, (media_status_t)status) << "Failed to start the track";
+ MediaBufferHelper *buffer = nullptr;
+ status = track->read(&buffer);
+ if (buffer) {
+ ALOGV("buffer->data %p buffer->size() %zu buffer->range_length() %zu",
+ buffer->data(), buffer->size(), buffer->range_length());
+ buffer->release();
+ }
+ status = cTrack->stop(track);
+ ASSERT_EQ(OK, status) << "Failed to stop the track";
+ delete bufferGroup;
+ delete track;
+ }
+ }
+}
+
+TEST_P(ExtractorUnitTest, SeekTest) {
+ // Both Flac and Wav extractor can give samples from any pts and mark the given sample as
+ // sync frame. So, this seek test is not applicable to FLAC and WAV extractors
+ if (mDisableTest || mExtractorName == FLAC || mExtractorName == WAV) return;
+
+ ALOGV("Validates %s Extractor behaviour for different seek modes", GetParam().first.c_str());
+ string inputFileName = gEnv->getRes() + GetParam().second;
+
+ int32_t status = setDataSource(inputFileName);
+ ASSERT_EQ(status, 0) << "SetDataSource failed for" << GetParam().first << "extractor";
+
+ status = createExtractor();
+ ASSERT_EQ(status, 0) << "Extractor creation failed for" << GetParam().first << "extractor";
+
+ int32_t numTracks = mExtractor->countTracks();
+ ASSERT_GT(numTracks, 0) << "Extractor didn't find any track for the given clip";
+
+ uint32_t seekFlag = mExtractor->flags();
+ if (!(seekFlag & MediaExtractorPluginHelper::CAN_SEEK)) {
+ cout << "[ WARN ] Test Skipped. " << GetParam().first
+ << " Extractor doesn't support seek\n";
+ return;
+ }
+
+ vector<int64_t> seekablePoints;
+ for (int32_t idx = 0; idx < numTracks; idx++) {
+ MediaTrackHelper *track = mExtractor->getTrack(idx);
+ ASSERT_NE(track, nullptr) << "Failed to get track for index " << idx;
+
+ CMediaTrack *cTrack = wrap(track);
+ ASSERT_NE(cTrack, nullptr) << "Failed to get track wrapper for index " << idx;
+
+ // Get all the seekable points of a given input
+ MediaBufferGroup *bufferGroup = new MediaBufferGroup();
+ status = cTrack->start(track, bufferGroup->wrap());
+ ASSERT_EQ(OK, (media_status_t)status) << "Failed to start the track";
+ getSeekablePoints(seekablePoints, track);
+ ASSERT_GT(seekablePoints.size(), 0)
+ << "Failed to get seekable points for " << GetParam().first << " extractor";
+
+ AMediaFormat *trackFormat = AMediaFormat_new();
+ ASSERT_NE(trackFormat, nullptr) << "AMediaFormat_new returned null format";
+ status = track->getFormat(trackFormat);
+ ASSERT_EQ(OK, (media_status_t)status) << "Failed to get track meta data";
+
+ bool isOpus = false;
+ const char *mime;
+ AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+ if (!strcmp(mime, "audio/opus")) isOpus = true;
+ AMediaFormat_delete(trackFormat);
+
+ int32_t seekIdx = 0;
+ size_t seekablePointsSize = seekablePoints.size();
+ for (int32_t mode = CMediaTrackReadOptions::SEEK_PREVIOUS_SYNC;
+ mode <= CMediaTrackReadOptions::SEEK_CLOSEST; mode++) {
+ for (int32_t seekCount = 0; seekCount < kMaxCount; seekCount++) {
+ seekIdx = rand() % seekablePointsSize + 1;
+ if (seekIdx >= seekablePointsSize) seekIdx = seekablePointsSize - 1;
+
+ int64_t seekToTimeStamp = seekablePoints[seekIdx];
+ if (seekablePointsSize > 1) {
+ int64_t prevTimeStamp = seekablePoints[seekIdx - 1];
+ seekToTimeStamp = seekToTimeStamp - ((seekToTimeStamp - prevTimeStamp) >> 3);
+ }
+
+ // Opus has a seekPreRollUs. TimeStamp returned by the
+ // extractor is calculated based on (seekPts - seekPreRollUs).
+ // So we add the preRoll value to the timeStamp we want to seek to.
+ if (isOpus) {
+ seekToTimeStamp += kOpusSeekPreRollUs;
+ }
+
+ MediaTrackHelper::ReadOptions *options = new MediaTrackHelper::ReadOptions(
+ mode | CMediaTrackReadOptions::SEEK, seekToTimeStamp);
+ ASSERT_NE(options, nullptr) << "Cannot create read option";
+
+ MediaBufferHelper *buffer = nullptr;
+ status = track->read(&buffer, options);
+ if (status == AMEDIA_ERROR_END_OF_STREAM) {
+ delete options;
+ continue;
+ }
+ if (buffer) {
+ AMediaFormat *metaData = buffer->meta_data();
+ int64_t timeStamp;
+ AMediaFormat_getInt64(metaData, AMEDIAFORMAT_KEY_TIME_US, &timeStamp);
+ buffer->release();
+
+ // CMediaTrackReadOptions::SEEK is 8. Using mask 0111b to get true modes
+ switch (mode & 0x7) {
+ case CMediaTrackReadOptions::SEEK_PREVIOUS_SYNC:
+ if (seekablePointsSize == 1) {
+ EXPECT_EQ(timeStamp, seekablePoints[seekIdx]);
+ } else {
+ EXPECT_EQ(timeStamp, seekablePoints[seekIdx - 1]);
+ }
+ break;
+ case CMediaTrackReadOptions::SEEK_NEXT_SYNC:
+ case CMediaTrackReadOptions::SEEK_CLOSEST_SYNC:
+ case CMediaTrackReadOptions::SEEK_CLOSEST:
+ EXPECT_EQ(timeStamp, seekablePoints[seekIdx]);
+ break;
+ default:
+ break;
+ }
+ }
+ delete options;
+ }
+ }
+ status = cTrack->stop(track);
+ ASSERT_EQ(OK, status) << "Failed to stop the track";
+ delete bufferGroup;
+ delete track;
+ }
+ seekablePoints.clear();
+}
+
+// TODO: (b/145332185)
+// Add MIDI inputs
+INSTANTIATE_TEST_SUITE_P(ExtractorUnitTestAll, ExtractorUnitTest,
+ ::testing::Values(make_pair("aac", "loudsoftaac.aac"),
+ make_pair("amr", "testamr.amr"),
+ make_pair("amr", "amrwb.wav"),
+ make_pair("ogg", "john_cage.ogg"),
+ make_pair("wav", "monotestgsm.wav"),
+ make_pair("mpeg2ts", "segment000001.ts"),
+ make_pair("flac", "sinesweepflac.flac"),
+ make_pair("ogg", "testopus.opus"),
+ make_pair("mkv", "sinesweepvorbis.mkv"),
+ make_pair("mpeg4", "sinesweepoggmp4.mp4"),
+ make_pair("mp3", "sinesweepmp3lame.mp3"),
+ make_pair("mkv", "swirl_144x136_vp9.webm"),
+ make_pair("mkv", "swirl_144x136_vp8.webm"),
+ make_pair("mpeg2ps", "swirl_144x136_mpeg2.mpg"),
+ make_pair("mpeg4", "swirl_132x130_mpeg4.mp4")));
+
+int main(int argc, char **argv) {
+ gEnv = new ExtractorUnitTestEnvironment();
+ ::testing::AddGlobalTestEnvironment(gEnv);
+ ::testing::InitGoogleTest(&argc, argv);
+ int status = gEnv->initFromOptions(argc, argv);
+ if (status == 0) {
+ status = RUN_ALL_TESTS();
+ ALOGV("Test result = %d\n", status);
+ }
+ return status;
+}
diff --git a/media/extractors/tests/ExtractorUnitTestEnvironment.h b/media/extractors/tests/ExtractorUnitTestEnvironment.h
new file mode 100644
index 0000000..fce8fc2
--- /dev/null
+++ b/media/extractors/tests/ExtractorUnitTestEnvironment.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __EXTRACTOR_UNIT_TEST_ENVIRONMENT_H__
+#define __EXTRACTOR_UNIT_TEST_ENVIRONMENT_H__
+
+#include <gtest/gtest.h>
+
+#include <getopt.h>
+
+using namespace std;
+
+class ExtractorUnitTestEnvironment : public ::testing::Environment {
+ public:
+ ExtractorUnitTestEnvironment() : res("/data/local/tmp/") {}
+
+ // Parses the command line arguments
+ int initFromOptions(int argc, char **argv);
+
+ void setRes(const char *_res) { res = _res; }
+
+ const string getRes() const { return res; }
+
+ private:
+ string res;
+};
+
+int ExtractorUnitTestEnvironment::initFromOptions(int argc, char **argv) {
+ static struct option options[] = {{"res", required_argument, 0, 'P'}, {0, 0, 0, 0}};
+
+ while (true) {
+ int index = 0;
+ int c = getopt_long(argc, argv, "P:", options, &index);
+ if (c == -1) {
+ break;
+ }
+
+ switch (c) {
+ case 'P':
+ setRes(optarg);
+ break;
+ default:
+ break;
+ }
+ }
+
+ if (optind < argc) {
+ fprintf(stderr,
+ "unrecognized option: %s\n\n"
+ "usage: %s <gtest options> <test options>\n\n"
+ "test options are:\n\n"
+ "-P, --path: Resource files directory location\n",
+ argv[optind ?: 1], argv[0]);
+ return 2;
+ }
+ return 0;
+}
+
+#endif // __EXTRACTOR_UNIT_TEST_ENVIRONMENT_H__
diff --git a/media/extractors/tests/README.md b/media/extractors/tests/README.md
new file mode 100644
index 0000000..6e02d3e
--- /dev/null
+++ b/media/extractors/tests/README.md
@@ -0,0 +1,34 @@
+## Media Testing ##
+---
+#### Extractor :
+The Extractor Test Suite validates the extractors available in the device.
+
+Run the following steps to build the test suite:
+```
+m ExtractorUnitTest
+```
+
+The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
+The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
+
+To test 64-bit binary push binaries from nativetest64.
+```
+adb push ${OUT}/data/nativetest64/ExtractorUnitTest/ExtractorUnitTest /data/local/tmp/
+```
+
+To test 32-bit binary push binaries from nativetest.
+```
+adb push ${OUT}/data/nativetest/ExtractorUnitTest/ExtractorUnitTest /data/local/tmp/
+```
+
+The resource file for the tests is taken from [here](https://drive.google.com/drive/folders/1Z9nCIRB6pGLvb5mPkF8BURa5Nc6cY9pY). Push these files into device for testing.
+Download extractor folder and push all the files in this folder to /data/local/tmp/ on the device.
+```
+adb push extractor /data/local/tmp/
+```
+
+usage: ExtractorUnitTest -P \<path_to_folder\>
+```
+adb shell /data/local/tmp/ExtractorUnitTest -P /data/local/tmp/extractor/
+```
diff --git a/media/extractors/wav/Android.bp b/media/extractors/wav/Android.bp
index 51e3c31..8ce5c3f 100644
--- a/media/extractors/wav/Android.bp
+++ b/media/extractors/wav/Android.bp
@@ -1,4 +1,7 @@
cc_library {
+ name: "libwavextractor",
+
+ defaults: ["extractor-defaults"],
srcs: ["WAVExtractor.cpp"],
@@ -8,8 +11,6 @@
shared_libs: [
"libbinder_ndk",
- "liblog",
- "libmediandk",
],
static_libs: [
@@ -17,25 +18,4 @@
"libfifo",
"libstagefright_foundation",
],
-
- name: "libwavextractor",
- relative_install_path: "extractors",
-
- compile_multilib: "first",
-
- cflags: [
- "-Werror",
- "-Wall",
- "-fvisibility=hidden",
- ],
- version_script: "exports.lds",
-
- sanitize: {
- cfi: true,
- misc_undefined: [
- "unsigned-integer-overflow",
- "signed-integer-overflow",
- ],
- },
-
}
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 5bebd61..2f43b22 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -227,6 +227,8 @@
};
typedef int32_t aaudio_performance_mode_t;
+#define AAUDIO_SYSTEM_USAGE_OFFSET 1000
+
/**
* The USAGE attribute expresses "why" you are playing a sound, what is this sound used for.
* This information is used by certain platforms or routing policies
@@ -297,7 +299,31 @@
/**
* Use this for audio responses to user queries, audio instructions or help utterances.
*/
- AAUDIO_USAGE_ASSISTANT = 16
+ AAUDIO_USAGE_ASSISTANT = 16,
+
+ /**
+ * Use this in case of playing sounds in an emergency.
+ * Privileged MODIFY_AUDIO_ROUTING permission required.
+ */
+ AAUDIO_SYSTEM_USAGE_EMERGENCY = AAUDIO_SYSTEM_USAGE_OFFSET,
+
+ /**
+ * Use this for safety sounds and alerts, for example backup camera obstacle detection.
+ * Privileged MODIFY_AUDIO_ROUTING permission required.
+ */
+ AAUDIO_SYSTEM_USAGE_SAFETY = AAUDIO_SYSTEM_USAGE_OFFSET + 1,
+
+ /**
+ * Use this for vehicle status alerts and information, for example the check engine light.
+ * Privileged MODIFY_AUDIO_ROUTING permission required.
+ */
+ AAUDIO_SYSTEM_USAGE_VEHICLE_STATUS = AAUDIO_SYSTEM_USAGE_OFFSET + 2,
+
+ /**
+ * Use this for traffic announcements, etc.
+ * Privileged MODIFY_AUDIO_ROUTING permission required.
+ */
+ AAUDIO_SYSTEM_USAGE_ANNOUNCEMENT = AAUDIO_SYSTEM_USAGE_OFFSET + 3,
};
typedef int32_t aaudio_usage_t;
@@ -995,10 +1021,26 @@
// Stream Control
// ============================================================
+#if __ANDROID_API__ >= 30
/**
- * Free the resources associated with a stream created by AAudioStreamBuilder_openStream()
+ * Free the audio resources associated with a stream created by
+ * AAudioStreamBuilder_openStream().
+ * AAudioStream_close() should be called at some point after calling
+ * this function.
*
- * Available since API level 26.
+ * After this call, the stream will be in {@link #AAUDIO_STREAM_STATE_CLOSING}
+ *
+ * @param stream reference provided by AAudioStreamBuilder_openStream()
+ * @return {@link #AAUDIO_OK} or a negative error.
+ */
+AAUDIO_API aaudio_result_t AAudioStream_release(AAudioStream* stream) __INTRODUCED_IN(30);
+#endif // __ANDROID_API__
+
+/**
+ * Delete the internal data structures associated with the stream created
+ * by AAudioStreamBuilder_openStream().
+ *
+ * If AAudioStream_release() has not been called then it will be called automatically.
*
* @param stream reference provided by AAudioStreamBuilder_openStream()
* @return {@link #AAUDIO_OK} or a negative error.
diff --git a/media/libaaudio/src/Android.bp b/media/libaaudio/src/Android.bp
index baada22..8753aa9 100644
--- a/media/libaaudio/src/Android.bp
+++ b/media/libaaudio/src/Android.bp
@@ -44,14 +44,6 @@
sanitize: {
integer_overflow: true,
misc_undefined: ["bounds"],
- diag: {
- integer_overflow: true,
- misc_undefined: ["bounds"],
- no_recover: [
- "bounds",
- "integer",
- ],
- },
},
stubs: {
@@ -137,14 +129,5 @@
sanitize: {
integer_overflow: true,
misc_undefined: ["bounds"],
- diag: {
- integer_overflow: true,
- misc_undefined: ["bounds"],
- no_recover: [
- "bounds",
- "integer",
- ],
- },
},
-
}
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index bfad254..b6548e6 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -258,18 +258,17 @@
return result;
error:
- close();
+ releaseCloseFinal();
return result;
}
// This must be called under mStreamLock.
-aaudio_result_t AudioStreamInternal::close() {
+aaudio_result_t AudioStreamInternal::release_l() {
aaudio_result_t result = AAUDIO_OK;
ALOGV("%s(): mServiceStreamHandle = 0x%08X", __func__, mServiceStreamHandle);
if (mServiceStreamHandle != AAUDIO_HANDLE_INVALID) {
- // Don't close a stream while it is running.
aaudio_stream_state_t currentState = getState();
- // Don't close a stream while it is running. Stop it first.
+ // Don't release a stream while it is running. Stop it first.
// If DISCONNECTED then we should still try to stop in case the
// error callback is still running.
if (isActive() || currentState == AAUDIO_STREAM_STATE_DISCONNECTED) {
@@ -282,10 +281,8 @@
mServiceInterface.closeStream(serviceStreamHandle);
delete[] mCallbackBuffer;
mCallbackBuffer = nullptr;
-
- setState(AAUDIO_STREAM_STATE_CLOSED);
result = mEndPointParcelable.close();
- aaudio_result_t result2 = AudioStream::close();
+ aaudio_result_t result2 = AudioStream::release_l();
return (result != AAUDIO_OK) ? result : result2;
} else {
return AAUDIO_ERROR_INVALID_HANDLE;
diff --git a/media/libaaudio/src/client/AudioStreamInternal.h b/media/libaaudio/src/client/AudioStreamInternal.h
index 596d37f..8843a8a 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.h
+++ b/media/libaaudio/src/client/AudioStreamInternal.h
@@ -58,7 +58,7 @@
aaudio_result_t open(const AudioStreamBuilder &builder) override;
- aaudio_result_t close() override;
+ aaudio_result_t release_l() override;
aaudio_result_t setBufferSize(int32_t requestedFrames) override;
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
index dc9f48c..536009a 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
@@ -49,7 +49,7 @@
getDeviceChannelCount());
if (result != AAUDIO_OK) {
- close();
+ releaseCloseFinal();
}
// Sample rate is constrained to common values by now and should not overflow.
int32_t numFrames = kRampMSec * getSampleRate() / AAUDIO_MILLIS_PER_SECOND;
diff --git a/media/libaaudio/src/core/AAudioAudio.cpp b/media/libaaudio/src/core/AAudioAudio.cpp
index 184e9cb..8965875 100644
--- a/media/libaaudio/src/core/AAudioAudio.cpp
+++ b/media/libaaudio/src/core/AAudioAudio.cpp
@@ -25,7 +25,6 @@
#include <aaudio/AAudio.h>
#include <aaudio/AAudioTesting.h>
-
#include "AudioClock.h"
#include "AudioGlobal.h"
#include "AudioStreamBuilder.h"
@@ -231,21 +230,42 @@
return AAUDIO_ERROR_NULL;
}
-AAUDIO_API aaudio_result_t AAudioStream_close(AAudioStream* stream)
-{
+AAUDIO_API aaudio_result_t AAudioStream_release(AAudioStream* stream) {
aaudio_result_t result = AAUDIO_ERROR_NULL;
- AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
+ AudioStream* audioStream = convertAAudioStreamToAudioStream(stream);
if (audioStream != nullptr) {
aaudio_stream_id_t id = audioStream->getId();
ALOGD("%s(s#%u) called ---------------", __func__, id);
- result = audioStream->safeClose();
- // Close will only fail if called illegally, for example, from a callback.
+ result = audioStream->safeRelease();
+ // safeRelease() will only fail if called illegally, for example, from a callback.
+ // That would result in the release of an active stream, which would cause a crash.
+ if (result != AAUDIO_OK) {
+ ALOGW("%s(s#%u) failed. Release it from another thread.",
+ __func__, id);
+ }
+ ALOGD("%s(s#%u) returned %d %s ---------", __func__,
+ id, result, AAudio_convertResultToText(result));
+ }
+ return result;
+}
+
+AAUDIO_API aaudio_result_t AAudioStream_close(AAudioStream* stream) {
+ aaudio_result_t result = AAUDIO_ERROR_NULL;
+ AudioStream* audioStream = convertAAudioStreamToAudioStream(stream);
+ if (audioStream != nullptr) {
+ aaudio_stream_id_t id = audioStream->getId();
+ ALOGD("%s(s#%u) called ---------------", __func__, id);
+ result = audioStream->safeRelease();
+ // safeRelease will only fail if called illegally, for example, from a callback.
// That would result in deleting an active stream, which would cause a crash.
- if (result == AAUDIO_OK) {
- audioStream->unregisterPlayerBase();
- delete audioStream;
+ if (result != AAUDIO_OK) {
+ ALOGW("%s(s#%u) failed. Close it from another thread.",
+ __func__, id);
} else {
- ALOGW("%s attempt to close failed. Close it from another thread.", __func__);
+ audioStream->unregisterPlayerBase();
+ // Mark CLOSED to keep destructors from asserting.
+ audioStream->closeFinal();
+ delete audioStream;
}
ALOGD("%s(s#%u) returned %d ---------", __func__, id, result);
}
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.cpp b/media/libaaudio/src/core/AAudioStreamParameters.cpp
index 58058f5..5f45261 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.cpp
+++ b/media/libaaudio/src/core/AAudioStreamParameters.cpp
@@ -133,6 +133,10 @@
case AAUDIO_USAGE_ASSISTANCE_SONIFICATION:
case AAUDIO_USAGE_GAME:
case AAUDIO_USAGE_ASSISTANT:
+ case AAUDIO_SYSTEM_USAGE_EMERGENCY:
+ case AAUDIO_SYSTEM_USAGE_SAFETY:
+ case AAUDIO_SYSTEM_USAGE_VEHICLE_STATUS:
+ case AAUDIO_SYSTEM_USAGE_ANNOUNCEMENT:
break; // valid
default:
ALOGD("usage not valid = %d", mUsage);
diff --git a/media/libaaudio/src/core/AudioGlobal.cpp b/media/libaaudio/src/core/AudioGlobal.cpp
index e6d9a0d..d299761 100644
--- a/media/libaaudio/src/core/AudioGlobal.cpp
+++ b/media/libaaudio/src/core/AudioGlobal.cpp
@@ -87,9 +87,9 @@
AAUDIO_CASE_ENUM(AAUDIO_STREAM_STATE_FLUSHED);
AAUDIO_CASE_ENUM(AAUDIO_STREAM_STATE_STOPPING);
AAUDIO_CASE_ENUM(AAUDIO_STREAM_STATE_STOPPED);
- AAUDIO_CASE_ENUM(AAUDIO_STREAM_STATE_DISCONNECTED);
AAUDIO_CASE_ENUM(AAUDIO_STREAM_STATE_CLOSING);
AAUDIO_CASE_ENUM(AAUDIO_STREAM_STATE_CLOSED);
+ AAUDIO_CASE_ENUM(AAUDIO_STREAM_STATE_DISCONNECTED);
}
return "Unrecognized AAudio state.";
}
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index 1560e0c..f51db70 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -249,25 +249,29 @@
return requestStop();
}
-aaudio_result_t AudioStream::safeClose() {
- // This get temporarily unlocked in the close when joining callback threads.
+aaudio_result_t AudioStream::safeRelease() {
+ // This get temporarily unlocked in the release() when joining callback threads.
std::lock_guard<std::mutex> lock(mStreamLock);
if (collidesWithCallback()) {
ALOGE("%s cannot be called from a callback!", __func__);
return AAUDIO_ERROR_INVALID_STATE;
}
- return close();
+ if (getState() == AAUDIO_STREAM_STATE_CLOSING) {
+ return AAUDIO_OK;
+ }
+ return release_l();
}
void AudioStream::setState(aaudio_stream_state_t state) {
- ALOGV("%s(%d) from %d to %d", __func__, getId(), mState, state);
+ ALOGD("%s(s#%d) from %d to %d", __func__, getId(), mState, state);
// CLOSED is a final state
if (mState == AAUDIO_STREAM_STATE_CLOSED) {
ALOGE("%s(%d) tried to set to %d but already CLOSED", __func__, getId(), state);
- // Once DISCONNECTED, we can only move to CLOSED state.
+ // Once DISCONNECTED, we can only move to CLOSING or CLOSED state.
} else if (mState == AAUDIO_STREAM_STATE_DISCONNECTED
- && state != AAUDIO_STREAM_STATE_CLOSED) {
+ && !(state == AAUDIO_STREAM_STATE_CLOSING
+ || state == AAUDIO_STREAM_STATE_CLOSED)) {
ALOGE("%s(%d) tried to set to %d but already DISCONNECTED", __func__, getId(), state);
} else {
diff --git a/media/libaaudio/src/core/AudioStream.h b/media/libaaudio/src/core/AudioStream.h
index b4ffcf2..9bda41b 100644
--- a/media/libaaudio/src/core/AudioStream.h
+++ b/media/libaaudio/src/core/AudioStream.h
@@ -115,13 +115,32 @@
virtual aaudio_result_t open(const AudioStreamBuilder& builder);
/**
- * Close the stream and deallocate any resources from the open() call.
- * It is safe to call close() multiple times.
+ * Free any hardware or system resources from the open() call.
+ * It is safe to call release_l() multiple times.
*/
- virtual aaudio_result_t close() {
+ virtual aaudio_result_t release_l() {
+ setState(AAUDIO_STREAM_STATE_CLOSING);
return AAUDIO_OK;
}
+ aaudio_result_t closeFinal() {
+ // State is checked by destructor.
+ setState(AAUDIO_STREAM_STATE_CLOSED);
+ return AAUDIO_OK;
+ }
+
+ /**
+ * Release then close the stream.
+ * @return AAUDIO_OK or negative error.
+ */
+ aaudio_result_t releaseCloseFinal() {
+ aaudio_result_t result = release_l(); // TODO review locking
+ if (result == AAUDIO_OK) {
+ result = closeFinal();
+ }
+ return result;
+ }
+
// This is only used to identify a stream in the logs without
// revealing any pointers.
aaudio_stream_id_t getId() {
@@ -373,7 +392,7 @@
*/
aaudio_result_t systemStopFromCallback();
- aaudio_result_t safeClose();
+ aaudio_result_t safeRelease();
protected:
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.cpp b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
index 54af580..2ed82d2 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
@@ -182,7 +182,7 @@
// Did we get a valid track?
status_t status = mAudioRecord->initCheck();
if (status != OK) {
- close();
+ releaseCloseFinal();
ALOGE("open(), initCheck() returned %d", status);
return AAudioConvert_androidToAAudioResult(status);
}
@@ -278,16 +278,17 @@
return AAUDIO_OK;
}
-aaudio_result_t AudioStreamRecord::close()
-{
- // TODO add close() or release() to AudioRecord API then call it from here
- if (getState() != AAUDIO_STREAM_STATE_CLOSED) {
+aaudio_result_t AudioStreamRecord::release_l() {
+ // TODO add close() or release() to AudioFlinger's AudioRecord API.
+ // Then call it from here
+ if (getState() != AAUDIO_STREAM_STATE_CLOSING) {
mAudioRecord->removeAudioDeviceCallback(mDeviceCallback);
mAudioRecord.clear();
- setState(AAUDIO_STREAM_STATE_CLOSED);
+ mFixedBlockWriter.close();
+ return AudioStream::release_l();
+ } else {
+ return AAUDIO_OK; // already released
}
- mFixedBlockWriter.close();
- return AudioStream::close();
}
const void * AudioStreamRecord::maybeConvertDeviceData(const void *audioData, int32_t numFrames) {
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.h b/media/libaaudio/src/legacy/AudioStreamRecord.h
index 2f41d34..c5944c7 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.h
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.h
@@ -38,7 +38,7 @@
virtual ~AudioStreamRecord();
aaudio_result_t open(const AudioStreamBuilder & builder) override;
- aaudio_result_t close() override;
+ aaudio_result_t release_l() override;
aaudio_result_t requestStart() override;
aaudio_result_t requestStop() override;
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index 094cdd1..00963d6 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -176,7 +176,7 @@
// Did we get a valid track?
status_t status = mAudioTrack->initCheck();
if (status != NO_ERROR) {
- close();
+ releaseCloseFinal();
ALOGE("open(), initCheck() returned %d", status);
return AAudioConvert_androidToAAudioResult(status);
}
@@ -239,14 +239,18 @@
return AAUDIO_OK;
}
-aaudio_result_t AudioStreamTrack::close()
-{
- if (getState() != AAUDIO_STREAM_STATE_CLOSED) {
+aaudio_result_t AudioStreamTrack::release_l() {
+ if (getState() != AAUDIO_STREAM_STATE_CLOSING) {
mAudioTrack->removeAudioDeviceCallback(mDeviceCallback);
- setState(AAUDIO_STREAM_STATE_CLOSED);
+ // TODO Investigate why clear() causes a hang in test_various.cpp
+ // if I call close() from a data callback.
+ // But the same thing in AudioRecord is OK!
+ // mAudioTrack.clear();
+ mFixedBlockReader.close();
+ return AudioStream::release_l();
+ } else {
+ return AAUDIO_OK; // already released
}
- mFixedBlockReader.close();
- return AAUDIO_OK;
}
void AudioStreamTrack::processCallback(int event, void *info) {
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.h b/media/libaaudio/src/legacy/AudioStreamTrack.h
index 68608de..550f693 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.h
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.h
@@ -41,7 +41,7 @@
aaudio_result_t open(const AudioStreamBuilder & builder) override;
- aaudio_result_t close() override;
+ aaudio_result_t release_l() override;
aaudio_result_t requestStart() override;
aaudio_result_t requestPause() override;
diff --git a/media/libaaudio/src/libaaudio.map.txt b/media/libaaudio/src/libaaudio.map.txt
index a87ede3..2e00aa5 100644
--- a/media/libaaudio/src/libaaudio.map.txt
+++ b/media/libaaudio/src/libaaudio.map.txt
@@ -22,6 +22,7 @@
AAudioStreamBuilder_setInputPreset; # introduced=28
AAudioStreamBuilder_setAllowedCapturePolicy; # introduced=29
AAudioStreamBuilder_setSessionId; # introduced=28
+ AAudioStreamBuilder_setPrivacySensitive; # introduced=30
AAudioStreamBuilder_openStream;
AAudioStreamBuilder_delete;
AAudioStream_close;
@@ -56,6 +57,8 @@
AAudioStream_getSessionId; # introduced=28
AAudioStream_getTimestamp;
AAudioStream_isMMapUsed;
+ AAudioStream_isPrivacySensitive; # introduced=30
+ AAudioStream_release; # introduced=30
local:
*;
};
diff --git a/media/libaaudio/src/utility/AAudioUtilities.cpp b/media/libaaudio/src/utility/AAudioUtilities.cpp
index ef89697..9007b10 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.cpp
+++ b/media/libaaudio/src/utility/AAudioUtilities.cpp
@@ -183,6 +183,10 @@
STATIC_ASSERT(AAUDIO_USAGE_ASSISTANCE_SONIFICATION == AUDIO_USAGE_ASSISTANCE_SONIFICATION);
STATIC_ASSERT(AAUDIO_USAGE_GAME == AUDIO_USAGE_GAME);
STATIC_ASSERT(AAUDIO_USAGE_ASSISTANT == AUDIO_USAGE_ASSISTANT);
+ STATIC_ASSERT(AAUDIO_SYSTEM_USAGE_EMERGENCY == AUDIO_USAGE_EMERGENCY);
+ STATIC_ASSERT(AAUDIO_SYSTEM_USAGE_SAFETY == AUDIO_USAGE_SAFETY);
+ STATIC_ASSERT(AAUDIO_SYSTEM_USAGE_VEHICLE_STATUS == AUDIO_USAGE_VEHICLE_STATUS);
+ STATIC_ASSERT(AAUDIO_SYSTEM_USAGE_ANNOUNCEMENT == AUDIO_USAGE_ANNOUNCEMENT);
if (usage == AAUDIO_UNSPECIFIED) {
usage = AAUDIO_USAGE_MEDIA;
}
diff --git a/media/libaaudio/tests/test_attributes.cpp b/media/libaaudio/tests/test_attributes.cpp
index 32ee2a3..d540866 100644
--- a/media/libaaudio/tests/test_attributes.cpp
+++ b/media/libaaudio/tests/test_attributes.cpp
@@ -33,6 +33,7 @@
aaudio_content_type_t contentType,
aaudio_input_preset_t preset = DONT_SET,
aaudio_allowed_capture_policy_t capturePolicy = DONT_SET,
+ int privacyMode = DONT_SET,
aaudio_direction_t direction = AAUDIO_DIRECTION_OUTPUT) {
float *buffer = new float[kNumFrames * kChannelCount];
@@ -60,6 +61,9 @@
if (capturePolicy != DONT_SET) {
AAudioStreamBuilder_setAllowedCapturePolicy(aaudioBuilder, capturePolicy);
}
+ if (privacyMode != DONT_SET) {
+ AAudioStreamBuilder_setPrivacySensitive(aaudioBuilder, (bool)privacyMode);
+ }
// Create an AAudioStream using the Builder.
ASSERT_EQ(AAUDIO_OK, AAudioStreamBuilder_openStream(aaudioBuilder, &aaudioStream));
@@ -90,6 +94,13 @@
: preset;
EXPECT_EQ(expectedCapturePolicy, AAudioStream_getAllowedCapturePolicy(aaudioStream));
+ bool expectedPrivacyMode =
+ (privacyMode == DONT_SET) ?
+ ((preset == AAUDIO_INPUT_PRESET_VOICE_COMMUNICATION
+ || preset == AAUDIO_INPUT_PRESET_CAMCORDER) ? true : false) :
+ privacyMode;
+ EXPECT_EQ(expectedPrivacyMode, AAudioStream_isPrivacySensitive(aaudioStream));
+
EXPECT_EQ(AAUDIO_OK, AAudioStream_requestStart(aaudioStream));
if (direction == AAUDIO_DIRECTION_INPUT) {
@@ -120,7 +131,11 @@
AAUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
AAUDIO_USAGE_ASSISTANCE_SONIFICATION,
AAUDIO_USAGE_GAME,
- AAUDIO_USAGE_ASSISTANT
+ AAUDIO_USAGE_ASSISTANT,
+ AAUDIO_SYSTEM_USAGE_EMERGENCY,
+ AAUDIO_SYSTEM_USAGE_SAFETY,
+ AAUDIO_SYSTEM_USAGE_VEHICLE_STATUS,
+ AAUDIO_SYSTEM_USAGE_ANNOUNCEMENT
};
static const aaudio_content_type_t sContentypes[] = {
@@ -151,6 +166,12 @@
AAUDIO_ALLOW_CAPTURE_BY_NONE,
};
+static const int sPrivacyModes[] = {
+ DONT_SET,
+ false,
+ true,
+};
+
static void checkAttributesUsage(aaudio_performance_mode_t perfMode) {
for (aaudio_usage_t usage : sUsages) {
checkAttributes(perfMode, usage, DONT_SET);
@@ -170,6 +191,7 @@
DONT_SET,
inputPreset,
DONT_SET,
+ DONT_SET,
AAUDIO_DIRECTION_INPUT);
}
}
@@ -185,6 +207,18 @@
}
}
+static void checkAttributesPrivacySensitive(aaudio_performance_mode_t perfMode) {
+ for (int privacyMode : sPrivacyModes) {
+ checkAttributes(perfMode,
+ DONT_SET,
+ DONT_SET,
+ DONT_SET,
+ DONT_SET,
+ privacyMode,
+ AAUDIO_DIRECTION_INPUT);
+ }
+}
+
TEST(test_attributes, aaudio_usage_perfnone) {
checkAttributesUsage(AAUDIO_PERFORMANCE_MODE_NONE);
}
@@ -216,3 +250,7 @@
TEST(test_attributes, aaudio_allowed_capture_policy_lowlat) {
checkAttributesAllowedCapturePolicy(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
}
+
+TEST(test_attributes, aaudio_allowed_privacy_sensitive_lowlat) {
+ checkAttributesPrivacySensitive(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+}
diff --git a/media/libaaudio/tests/test_various.cpp b/media/libaaudio/tests/test_various.cpp
index 935d88b..5bb1046 100644
--- a/media/libaaudio/tests/test_various.cpp
+++ b/media/libaaudio/tests/test_various.cpp
@@ -40,10 +40,62 @@
return AAUDIO_CALLBACK_RESULT_CONTINUE;
}
-// Test AAudioStream_setBufferSizeInFrames()
-
constexpr int64_t NANOS_PER_MILLISECOND = 1000 * 1000;
+void checkReleaseThenClose(aaudio_performance_mode_t perfMode,
+ aaudio_sharing_mode_t sharingMode) {
+ AAudioStreamBuilder* aaudioBuilder = nullptr;
+ AAudioStream* aaudioStream = nullptr;
+
+ // Use an AAudioStreamBuilder to contain requested parameters.
+ ASSERT_EQ(AAUDIO_OK, AAudio_createStreamBuilder(&aaudioBuilder));
+
+ // Request stream properties.
+ AAudioStreamBuilder_setDataCallback(aaudioBuilder,
+ NoopDataCallbackProc,
+ nullptr);
+ AAudioStreamBuilder_setPerformanceMode(aaudioBuilder, perfMode);
+ AAudioStreamBuilder_setSharingMode(aaudioBuilder, sharingMode);
+
+ // Create an AAudioStream using the Builder.
+ ASSERT_EQ(AAUDIO_OK,
+ AAudioStreamBuilder_openStream(aaudioBuilder, &aaudioStream));
+ AAudioStreamBuilder_delete(aaudioBuilder);
+
+ ASSERT_EQ(AAUDIO_OK, AAudioStream_requestStart(aaudioStream));
+
+ sleep(1);
+
+ EXPECT_EQ(AAUDIO_OK, AAudioStream_requestStop(aaudioStream));
+
+ EXPECT_EQ(AAUDIO_OK, AAudioStream_release(aaudioStream));
+ aaudio_stream_state_t state = AAudioStream_getState(aaudioStream);
+ EXPECT_EQ(AAUDIO_STREAM_STATE_CLOSING, state);
+
+ // We should be able to call this again without crashing.
+ EXPECT_EQ(AAUDIO_OK, AAudioStream_release(aaudioStream));
+ state = AAudioStream_getState(aaudioStream);
+ EXPECT_EQ(AAUDIO_STREAM_STATE_CLOSING, state);
+
+ EXPECT_EQ(AAUDIO_OK, AAudioStream_close(aaudioStream));
+}
+
+TEST(test_various, aaudio_release_close_none) {
+ checkReleaseThenClose(AAUDIO_PERFORMANCE_MODE_NONE,
+ AAUDIO_SHARING_MODE_SHARED);
+ // No EXCLUSIVE streams with MODE_NONE.
+}
+
+TEST(test_various, aaudio_release_close_low_shared) {
+ checkReleaseThenClose(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+ AAUDIO_SHARING_MODE_SHARED);
+}
+
+TEST(test_various, aaudio_release_close_low_exclusive) {
+ checkReleaseThenClose(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+ AAUDIO_SHARING_MODE_EXCLUSIVE);
+}
+
enum FunctionToCall {
CALL_START, CALL_STOP, CALL_PAUSE, CALL_FLUSH
};
diff --git a/media/libaudioclient/AudioEffect.cpp b/media/libaudioclient/AudioEffect.cpp
index 7d5b690..a1b141b 100644
--- a/media/libaudioclient/AudioEffect.cpp
+++ b/media/libaudioclient/AudioEffect.cpp
@@ -138,6 +138,7 @@
mIEffectClient = new EffectClient(this);
mClientPid = IPCThreadState::self()->getCallingPid();
+ mClientUid = IPCThreadState::self()->getCallingUid();
iEffect = audioFlinger->createEffect((effect_descriptor_t *)&mDescriptor,
mIEffectClient, priority, io, mSessionId, device, mOpPackageName, mClientPid,
@@ -179,7 +180,7 @@
mStatus, mEnabled, mClientPid);
if (!audio_is_global_session(mSessionId)) {
- AudioSystem::acquireAudioSessionId(mSessionId, mClientPid);
+ AudioSystem::acquireAudioSessionId(mSessionId, mClientPid, mClientUid);
}
return mStatus;
diff --git a/media/libaudioclient/AudioPolicy.cpp b/media/libaudioclient/AudioPolicy.cpp
index 06fc23c..d468239 100644
--- a/media/libaudioclient/AudioPolicy.cpp
+++ b/media/libaudioclient/AudioPolicy.cpp
@@ -53,6 +53,10 @@
case RULE_EXCLUDE_UID:
mValue.mUid = (uid_t) parcel->readInt32();
break;
+ case RULE_MATCH_USERID:
+ case RULE_EXCLUDE_USERID:
+ mValue.mUserId = (int) parcel->readInt32();
+ break;
default:
ALOGE("Trying to build AudioMixMatchCriterion from unknown rule %d", mRule);
return BAD_VALUE;
@@ -163,9 +167,50 @@
return false;
}
+void AudioMix::setExcludeUserId(int userId) const {
+ AudioMixMatchCriterion crit;
+ crit.mRule = RULE_EXCLUDE_USERID;
+ crit.mValue.mUserId = userId;
+ mCriteria.add(crit);
+}
+
+void AudioMix::setMatchUserId(int userId) const {
+ AudioMixMatchCriterion crit;
+ crit.mRule = RULE_MATCH_USERID;
+ crit.mValue.mUserId = userId;
+ mCriteria.add(crit);
+}
+
+bool AudioMix::hasUserIdRule(bool match, int userId) const {
+ const uint32_t rule = match ? RULE_MATCH_USERID : RULE_EXCLUDE_USERID;
+ for (size_t i = 0; i < mCriteria.size(); i++) {
+ if (mCriteria[i].mRule == rule
+ && mCriteria[i].mValue.mUserId == userId) {
+ return true;
+ }
+ }
+ return false;
+}
+
+bool AudioMix::hasMatchUserIdRule() const {
+ for (size_t i = 0; i < mCriteria.size(); i++) {
+ if (mCriteria[i].mRule == RULE_MATCH_USERID) {
+ return true;
+ }
+ }
+ return false;
+}
+
bool AudioMix::isDeviceAffinityCompatible() const {
return ((mMixType == MIX_TYPE_PLAYERS)
&& (mRouteFlags == MIX_ROUTE_FLAG_RENDER));
}
+bool AudioMix::hasMatchingRuleForUsage(std::function<bool (audio_usage_t)>const& func) const {
+ return std::any_of(mCriteria.begin(), mCriteria.end(), [func](auto& criterion) {
+ return criterion.mRule == RULE_MATCH_ATTRIBUTE_USAGE
+ && func(criterion.mValue.mUsage);
+ });
+}
+
} // namespace android
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index a438c77..981cfee 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -186,7 +186,7 @@
IPCThreadState::self()->flushCommands();
ALOGV("%s(%d): releasing session id %d",
__func__, mPortId, mSessionId);
- AudioSystem::releaseAudioSessionId(mSessionId, -1 /*pid*/);
+ AudioSystem::releaseAudioSessionId(mSessionId, mClientPid);
}
}
@@ -361,7 +361,7 @@
mMarkerReached = false;
mNewPosition = 0;
mUpdatePeriod = 0;
- AudioSystem::acquireAudioSessionId(mSessionId, -1);
+ AudioSystem::acquireAudioSessionId(mSessionId, mClientPid, mClientUid);
mSequence = 1;
mObservedSequence = mSequence;
mInOverrun = false;
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 0742091..1769062 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -36,10 +36,11 @@
// client singleton for AudioFlinger binder interface
Mutex AudioSystem::gLock;
+Mutex AudioSystem::gLockErrorCallbacks;
Mutex AudioSystem::gLockAPS;
sp<IAudioFlinger> AudioSystem::gAudioFlinger;
sp<AudioSystem::AudioFlingerClient> AudioSystem::gAudioFlingerClient;
-audio_error_callback AudioSystem::gAudioErrorCallback = NULL;
+std::set<audio_error_callback> AudioSystem::gAudioErrorCallbacks;
dynamic_policy_callback AudioSystem::gDynPolicyCallback = NULL;
record_config_callback AudioSystem::gRecordConfigCallback = NULL;
@@ -63,9 +64,7 @@
if (gAudioFlingerClient == NULL) {
gAudioFlingerClient = new AudioFlingerClient();
} else {
- if (gAudioErrorCallback) {
- gAudioErrorCallback(NO_ERROR);
- }
+ reportError(NO_ERROR);
}
binder->linkToDeath(gAudioFlingerClient);
gAudioFlinger = interface_cast<IAudioFlinger>(binder);
@@ -434,11 +433,11 @@
return af->newAudioUniqueId(use);
}
-void AudioSystem::acquireAudioSessionId(audio_session_t audioSession, pid_t pid)
+void AudioSystem::acquireAudioSessionId(audio_session_t audioSession, pid_t pid, uid_t uid)
{
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
if (af != 0) {
- af->acquireAudioSessionId(audioSession, pid);
+ af->acquireAudioSessionId(audioSession, pid, uid);
}
}
@@ -500,19 +499,16 @@
void AudioSystem::AudioFlingerClient::binderDied(const wp<IBinder>& who __unused)
{
- audio_error_callback cb = NULL;
{
Mutex::Autolock _l(AudioSystem::gLock);
AudioSystem::gAudioFlinger.clear();
- cb = gAudioErrorCallback;
}
// clear output handles and stream to output map caches
clearIoCache();
- if (cb) {
- cb(DEAD_OBJECT);
- }
+ reportError(DEAD_OBJECT);
+
ALOGW("AudioFlinger server died!");
}
@@ -717,10 +713,23 @@
return NO_ERROR;
}
-/* static */ void AudioSystem::setErrorCallback(audio_error_callback cb)
+/* static */ uintptr_t AudioSystem::addErrorCallback(audio_error_callback cb)
{
- Mutex::Autolock _l(gLock);
- gAudioErrorCallback = cb;
+ Mutex::Autolock _l(gLockErrorCallbacks);
+ gAudioErrorCallbacks.insert(cb);
+ return reinterpret_cast<uintptr_t>(cb);
+}
+
+/* static */ void AudioSystem::removeErrorCallback(uintptr_t cb) {
+ Mutex::Autolock _l(gLockErrorCallbacks);
+ gAudioErrorCallbacks.erase(reinterpret_cast<audio_error_callback>(cb));
+}
+
+/* static */ void AudioSystem::reportError(status_t err) {
+ Mutex::Autolock _l(gLockErrorCallbacks);
+ for (auto callback : gAudioErrorCallbacks) {
+ callback(err);
+ }
}
/*static*/ void AudioSystem::setDynPolicyCallback(dynamic_policy_callback cb)
@@ -1133,6 +1142,12 @@
}
}
+status_t AudioSystem::setSupportedSystemUsages(const std::vector<audio_usage_t>& systemUsages) {
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == nullptr) return PERMISSION_DENIED;
+ return aps->setSupportedSystemUsages(systemUsages);
+}
+
status_t AudioSystem::setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t flags) {
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == nullptr) return PERMISSION_DENIED;
@@ -1344,6 +1359,21 @@
return aps->removeUidDeviceAffinities(uid);
}
+status_t AudioSystem::setUserIdDeviceAffinities(int userId,
+ const Vector<AudioDeviceTypeAddr>& devices)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->setUserIdDeviceAffinities(userId, devices);
+}
+
+status_t AudioSystem::removeUserIdDeviceAffinities(int userId)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->removeUserIdDeviceAffinities(userId);
+}
+
status_t AudioSystem::startAudioSource(const struct audio_port_config *source,
const audio_attributes_t *attributes,
audio_port_handle_t *portId)
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 3151feb..8cfee50 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -599,7 +599,7 @@
mReleased = 0;
mStartNs = 0;
mStartFromZeroUs = 0;
- AudioSystem::acquireAudioSessionId(mSessionId, mClientPid);
+ AudioSystem::acquireAudioSessionId(mSessionId, mClientPid, mClientUid);
mSequence = 1;
mObservedSequence = mSequence;
mInUnderrun = false;
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index cbc98bd..513da2b 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -571,12 +571,13 @@
return id;
}
- virtual void acquireAudioSessionId(audio_session_t audioSession, int pid)
+ void acquireAudioSessionId(audio_session_t audioSession, pid_t pid, uid_t uid) override
{
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
data.writeInt32(audioSession);
- data.writeInt32(pid);
+ data.writeInt32((int32_t)pid);
+ data.writeInt32((int32_t)uid);
remote()->transact(ACQUIRE_AUDIO_SESSION_ID, data, &reply);
}
@@ -1326,8 +1327,9 @@
case ACQUIRE_AUDIO_SESSION_ID: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
audio_session_t audioSession = (audio_session_t) data.readInt32();
- int pid = data.readInt32();
- acquireAudioSessionId(audioSession, pid);
+ const pid_t pid = (pid_t)data.readInt32();
+ const uid_t uid = (uid_t)data.readInt32();
+ acquireAudioSessionId(audioSession, pid, uid);
return NO_ERROR;
} break;
case RELEASE_AUDIO_SESSION_ID: {
diff --git a/media/libaudioclient/IAudioPolicyService.cpp b/media/libaudioclient/IAudioPolicyService.cpp
index 87802a1..ce38414 100644
--- a/media/libaudioclient/IAudioPolicyService.cpp
+++ b/media/libaudioclient/IAudioPolicyService.cpp
@@ -97,11 +97,14 @@
IS_HAPTIC_PLAYBACK_SUPPORTED,
SET_UID_DEVICE_AFFINITY,
REMOVE_UID_DEVICE_AFFINITY,
+ SET_USERID_DEVICE_AFFINITY,
+ REMOVE_USERID_DEVICE_AFFINITY,
GET_OFFLOAD_FORMATS_A2DP,
LIST_AUDIO_PRODUCT_STRATEGIES,
GET_STRATEGY_FOR_ATTRIBUTES,
LIST_AUDIO_VOLUME_GROUPS,
GET_VOLUME_GROUP_FOR_ATTRIBUTES,
+ SET_SUPPORTED_SYSTEM_USAGES,
SET_ALLOWED_CAPTURE_POLICY,
MOVE_EFFECTS_TO_IO,
SET_RTT_ENABLED,
@@ -332,6 +335,7 @@
ALOGE("getInputForAttr NULL portId - shouldn't happen");
return BAD_VALUE;
}
+
data.write(attr, sizeof(audio_attributes_t));
data.writeInt32(*input);
data.writeInt32(riid);
@@ -622,6 +626,20 @@
return status;
}
+ status_t setSupportedSystemUsages(const std::vector<audio_usage_t>& systemUsages) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.writeInt32(systemUsages.size());
+ for (auto systemUsage : systemUsages) {
+ data.writeInt32(systemUsage);
+ }
+ status_t status = remote()->transact(SET_SUPPORTED_SYSTEM_USAGES, data, &reply);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ return static_cast <status_t> (reply.readInt32());
+ }
+
status_t setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t flags) override {
Parcel data, reply;
data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
@@ -1181,6 +1199,52 @@
return status;
}
+ virtual status_t setUserIdDeviceAffinities(int userId,
+ const Vector<AudioDeviceTypeAddr>& devices)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+
+ data.writeInt32((int32_t) userId);
+ size_t size = devices.size();
+ size_t sizePosition = data.dataPosition();
+ data.writeInt32((int32_t) size);
+ size_t finalSize = size;
+ for (size_t i = 0; i < size; i++) {
+ size_t position = data.dataPosition();
+ if (devices[i].writeToParcel(&data) != NO_ERROR) {
+ data.setDataPosition(position);
+ finalSize--;
+ }
+ }
+ if (size != finalSize) {
+ size_t position = data.dataPosition();
+ data.setDataPosition(sizePosition);
+ data.writeInt32(finalSize);
+ data.setDataPosition(position);
+ }
+
+ status_t status = remote()->transact(SET_USERID_DEVICE_AFFINITY, data, &reply);
+ if (status == NO_ERROR) {
+ status = (status_t)reply.readInt32();
+ }
+ return status;
+ }
+
+ virtual status_t removeUserIdDeviceAffinities(int userId) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+
+ data.writeInt32((int32_t) userId);
+
+ status_t status =
+ remote()->transact(REMOVE_USERID_DEVICE_AFFINITY, data, &reply);
+ if (status == NO_ERROR) {
+ status = (status_t) reply.readInt32();
+ }
+ return status;
+ }
+
virtual status_t listAudioProductStrategies(AudioProductStrategyVector &strategies)
{
Parcel data, reply;
@@ -1441,6 +1505,8 @@
case SET_A11Y_SERVICES_UIDS:
case SET_UID_DEVICE_AFFINITY:
case REMOVE_UID_DEVICE_AFFINITY:
+ case SET_USERID_DEVICE_AFFINITY:
+ case REMOVE_USERID_DEVICE_AFFINITY:
case GET_OFFLOAD_FORMATS_A2DP:
case LIST_AUDIO_VOLUME_GROUPS:
case GET_VOLUME_GROUP_FOR_ATTRIBUTES:
@@ -1449,6 +1515,7 @@
case SET_RTT_ENABLED:
case IS_CALL_SCREEN_MODE_SUPPORTED:
case SET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY:
+ case SET_SUPPORTED_SYSTEM_USAGES:
case REMOVE_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY:
case GET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY:
case GET_DEVICES_FOR_ATTRIBUTES: {
@@ -2362,6 +2429,30 @@
return NO_ERROR;
}
+ case SET_USERID_DEVICE_AFFINITY: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ const int userId = (int) data.readInt32();
+ Vector<AudioDeviceTypeAddr> devices;
+ size_t size = (size_t)data.readInt32();
+ for (size_t i = 0; i < size; i++) {
+ AudioDeviceTypeAddr device;
+ if (device.readFromParcel((Parcel*)&data) == NO_ERROR) {
+ devices.add(device);
+ }
+ }
+ status_t status = setUserIdDeviceAffinities(userId, devices);
+ reply->writeInt32(status);
+ return NO_ERROR;
+ }
+
+ case REMOVE_USERID_DEVICE_AFFINITY: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ const int userId = (int) data.readInt32();
+ status_t status = removeUserIdDeviceAffinities(userId);
+ reply->writeInt32(status);
+ return NO_ERROR;
+ }
+
case LIST_AUDIO_PRODUCT_STRATEGIES: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
AudioProductStrategyVector strategies;
@@ -2442,16 +2533,46 @@
if (status != NO_ERROR) {
return status;
}
+
volume_group_t group;
status = getVolumeGroupFromAudioAttributes(attributes, group);
- reply->writeInt32(status);
if (status != NO_ERROR) {
return NO_ERROR;
}
+
+ reply->writeInt32(status);
reply->writeUint32(static_cast<int>(group));
return NO_ERROR;
}
+ case SET_SUPPORTED_SYSTEM_USAGES: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ std::vector<audio_usage_t> systemUsages;
+
+ int32_t size;
+ status_t status = data.readInt32(&size);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ if (size > MAX_ITEMS_PER_LIST) {
+ size = MAX_ITEMS_PER_LIST;
+ }
+
+ for (int32_t i = 0; i < size; i++) {
+ int32_t systemUsageInt;
+ status = data.readInt32(&systemUsageInt);
+ if (status != NO_ERROR) {
+ return status;
+ }
+
+ audio_usage_t systemUsage = static_cast<audio_usage_t>(systemUsageInt);
+ systemUsages.push_back(systemUsage);
+ }
+ status = setSupportedSystemUsages(systemUsages);
+ reply->writeInt32(static_cast <int32_t>(status));
+ return NO_ERROR;
+ }
+
case SET_ALLOWED_CAPTURE_POLICY: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
uid_t uid = data.readInt32();
diff --git a/media/libaudioclient/include/media/AudioEffect.h b/media/libaudioclient/include/media/AudioEffect.h
index f17d737..eec9dfc 100644
--- a/media/libaudioclient/include/media/AudioEffect.h
+++ b/media/libaudioclient/include/media/AudioEffect.h
@@ -639,7 +639,8 @@
sp<EffectClient> mIEffectClient; // IEffectClient implementation
sp<IMemory> mCblkMemory; // shared memory for deferred parameter setting
effect_param_cblk_t* mCblk; // control block for deferred parameter setting
- pid_t mClientPid;
+ pid_t mClientPid = (pid_t)-1;
+ uid_t mClientUid = (uid_t)-1;
};
diff --git a/media/libaudioclient/include/media/AudioPolicy.h b/media/libaudioclient/include/media/AudioPolicy.h
index 0ab1c9d..20d2c0b 100644
--- a/media/libaudioclient/include/media/AudioPolicy.h
+++ b/media/libaudioclient/include/media/AudioPolicy.h
@@ -18,12 +18,14 @@
#ifndef ANDROID_AUDIO_POLICY_H
#define ANDROID_AUDIO_POLICY_H
+#include <functional>
#include <binder/Parcel.h>
#include <media/AudioDeviceTypeAddr.h>
#include <system/audio.h>
#include <system/audio_policy.h>
#include <utils/String8.h>
#include <utils/Vector.h>
+#include <cutils/multiuser.h>
namespace android {
@@ -32,10 +34,12 @@
#define RULE_MATCH_ATTRIBUTE_USAGE 0x1
#define RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET (0x1 << 1)
#define RULE_MATCH_UID (0x1 << 2)
+#define RULE_MATCH_USERID (0x1 << 3)
#define RULE_EXCLUDE_ATTRIBUTE_USAGE (RULE_EXCLUSION_MASK|RULE_MATCH_ATTRIBUTE_USAGE)
#define RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET \
(RULE_EXCLUSION_MASK|RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET)
#define RULE_EXCLUDE_UID (RULE_EXCLUSION_MASK|RULE_MATCH_UID)
+#define RULE_EXCLUDE_USERID (RULE_EXCLUSION_MASK|RULE_MATCH_USERID)
#define MIX_TYPE_INVALID (-1)
#define MIX_TYPE_PLAYERS 0
@@ -73,6 +77,7 @@
audio_usage_t mUsage;
audio_source_t mSource;
uid_t mUid;
+ int mUserId;
} mValue;
uint32_t mRule;
};
@@ -98,9 +103,23 @@
bool hasUidRule(bool match, uid_t uid) const;
/** returns true if this mix has a rule for uid match (any uid) */
bool hasMatchUidRule() const;
+
+ void setExcludeUserId(int userId) const;
+ void setMatchUserId(int userId) const;
+ /** returns true if this mix has a rule to match or exclude the given userId */
+ bool hasUserIdRule(bool match, int userId) const;
+ /** returns true if this mix has a rule for userId match (any userId) */
+ bool hasMatchUserIdRule() const;
/** returns true if this mix can be used for uid-device affinity routing */
bool isDeviceAffinityCompatible() const;
+ /**
+ * returns true if the mix has a capture rule for a usage that
+ * matches the given predicate
+ */
+ bool hasMatchingRuleForUsage(
+ std::function<bool (audio_usage_t)>const& func) const;
+
mutable Vector<AudioMixMatchCriterion> mCriteria;
uint32_t mMixType;
audio_config_t mFormat;
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index ae8b59c..9d3f8b6 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -27,6 +27,7 @@
#include <media/IAudioFlingerClient.h>
#include <media/IAudioPolicyServiceClient.h>
#include <media/MicrophoneInfo.h>
+#include <set>
#include <system/audio.h>
#include <system/audio_effect.h>
#include <system/audio_policy.h>
@@ -107,7 +108,16 @@
static status_t setParameters(const String8& keyValuePairs);
static String8 getParameters(const String8& keys);
- static void setErrorCallback(audio_error_callback cb);
+ // Registers an error callback. When this callback is invoked, it means all
+ // state implied by this interface has been reset.
+ // Returns a token that can be used for un-registering.
+ // Might block while callbacks are being invoked.
+ static uintptr_t addErrorCallback(audio_error_callback cb);
+
+ // Un-registers a callback previously added with addErrorCallback.
+ // Might block while callbacks are being invoked.
+ static void removeErrorCallback(uintptr_t cb);
+
static void setDynPolicyCallback(dynamic_policy_callback cb);
static void setRecordConfigCallback(record_config_callback);
@@ -172,7 +182,7 @@
// or an unspecified existing unique ID.
static audio_unique_id_t newAudioUniqueId(audio_unique_id_use_t use);
- static void acquireAudioSessionId(audio_session_t audioSession, pid_t pid);
+ static void acquireAudioSessionId(audio_session_t audioSession, pid_t pid, uid_t uid);
static void releaseAudioSessionId(audio_session_t audioSession, pid_t pid);
// Get the HW synchronization source used for an audio session.
@@ -304,6 +314,8 @@
static status_t setLowRamDevice(bool isLowRamDevice, int64_t totalMemory);
+ static status_t setSupportedSystemUsages(const std::vector<audio_usage_t>& systemUsages);
+
static status_t setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t flags);
// Check if hw offload is possible for given format, stream type, sample rate,
@@ -352,6 +364,10 @@
static status_t removeUidDeviceAffinities(uid_t uid);
+ static status_t setUserIdDeviceAffinities(int userId, const Vector<AudioDeviceTypeAddr>& devices);
+
+ static status_t removeUserIdDeviceAffinities(int userId);
+
static status_t startAudioSource(const struct audio_port_config *source,
const audio_attributes_t *attributes,
audio_port_handle_t *portId);
@@ -562,15 +578,19 @@
static const sp<AudioFlingerClient> getAudioFlingerClient();
static sp<AudioIoDescriptor> getIoDescriptor(audio_io_handle_t ioHandle);
+ // Invokes all registered error callbacks with the given error code.
+ static void reportError(status_t err);
+
static sp<AudioFlingerClient> gAudioFlingerClient;
static sp<AudioPolicyServiceClient> gAudioPolicyServiceClient;
friend class AudioFlingerClient;
friend class AudioPolicyServiceClient;
- static Mutex gLock; // protects gAudioFlinger and gAudioErrorCallback,
+ static Mutex gLock; // protects gAudioFlinger
+ static Mutex gLockErrorCallbacks; // protects gAudioErrorCallbacks
static Mutex gLockAPS; // protects gAudioPolicyService and gAudioPolicyServiceClient
static sp<IAudioFlinger> gAudioFlinger;
- static audio_error_callback gAudioErrorCallback;
+ static std::set<audio_error_callback> gAudioErrorCallbacks;
static dynamic_policy_callback gDynPolicyCallback;
static record_config_callback gRecordConfigCallback;
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 8703f55..8bc1d83 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -446,7 +446,7 @@
virtual audio_unique_id_t newAudioUniqueId(audio_unique_id_use_t use) = 0;
- virtual void acquireAudioSessionId(audio_session_t audioSession, pid_t pid) = 0;
+ virtual void acquireAudioSessionId(audio_session_t audioSession, pid_t pid, uid_t uid) = 0;
virtual void releaseAudioSessionId(audio_session_t audioSession, pid_t pid) = 0;
virtual status_t queryNumberEffects(uint32_t *numEffects) const = 0;
diff --git a/media/libaudioclient/include/media/IAudioPolicyService.h b/media/libaudioclient/include/media/IAudioPolicyService.h
index 742762d..caa1d13 100644
--- a/media/libaudioclient/include/media/IAudioPolicyService.h
+++ b/media/libaudioclient/include/media/IAudioPolicyService.h
@@ -140,6 +140,7 @@
audio_unique_id_t* id) = 0;
virtual status_t removeSourceDefaultEffect(audio_unique_id_t id) = 0;
virtual status_t removeStreamDefaultEffect(audio_unique_id_t id) = 0;
+ virtual status_t setSupportedSystemUsages(const std::vector<audio_usage_t>& systemUsages) = 0;
virtual status_t setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t flags) = 0;
// Check if offload is possible for given format, stream type, sample rate,
// bit rate, duration, video and streaming or offload property is enabled
@@ -194,6 +195,11 @@
virtual status_t removeUidDeviceAffinities(uid_t uid) = 0;
+ virtual status_t setUserIdDeviceAffinities(int userId,
+ const Vector<AudioDeviceTypeAddr>& devices) = 0;
+
+ virtual status_t removeUserIdDeviceAffinities(int userId) = 0;
+
virtual status_t startAudioSource(const struct audio_port_config *source,
const audio_attributes_t *attributes,
audio_port_handle_t *portId) = 0;
diff --git a/media/libmedia/include/media/MediaCodecBuffer.h b/media/libmedia/include/media/MediaCodecBuffer.h
index 2c16fba..101377c 100644
--- a/media/libmedia/include/media/MediaCodecBuffer.h
+++ b/media/libmedia/include/media/MediaCodecBuffer.h
@@ -22,6 +22,8 @@
#include <utils/RefBase.h>
#include <utils/StrongPointer.h>
+class C2Buffer;
+
namespace android {
struct ABuffer;
@@ -57,6 +59,36 @@
void setFormat(const sp<AMessage> &format);
+ /**
+ * \return C2Buffer object represents this buffer.
+ */
+ virtual std::shared_ptr<C2Buffer> asC2Buffer() { return nullptr; }
+
+ /**
+ * Test if we can copy the content of |buffer| into this object.
+ *
+ * \param buffer C2Buffer object to copy.
+ * \return true if the content of buffer can be copied over to this buffer
+ * false otherwise.
+ */
+ virtual bool canCopy(const std::shared_ptr<C2Buffer> &buffer) const {
+ (void)buffer;
+ return false;
+ }
+
+ /**
+ * Copy the content of |buffer| into this object. This method assumes that
+ * canCopy() check already passed.
+ *
+ * \param buffer C2Buffer object to copy.
+ * \return true if successful
+ * false otherwise.
+ */
+ virtual bool copy(const std::shared_ptr<C2Buffer> &buffer) {
+ (void)buffer;
+ return false;
+ }
+
private:
MediaCodecBuffer() = delete;
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 60d2f85..1fadc94 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -60,7 +60,7 @@
mVideoWidth = mVideoHeight = 0;
mLockThreadId = 0;
mAudioSessionId = (audio_session_t) AudioSystem::newAudioUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
- AudioSystem::acquireAudioSessionId(mAudioSessionId, -1);
+ AudioSystem::acquireAudioSessionId(mAudioSessionId, (pid_t)-1, (uid_t)-1); // always in client.
mSendLevel = 0;
mRetransmitEndpointValid = false;
}
@@ -72,7 +72,7 @@
delete mAudioAttributesParcel;
mAudioAttributesParcel = NULL;
}
- AudioSystem::releaseAudioSessionId(mAudioSessionId, -1);
+ AudioSystem::releaseAudioSessionId(mAudioSessionId, (pid_t)-1);
disconnect();
IPCThreadState::self()->flushCommands();
}
@@ -709,8 +709,8 @@
return BAD_VALUE;
}
if (sessionId != mAudioSessionId) {
- AudioSystem::acquireAudioSessionId(sessionId, -1);
- AudioSystem::releaseAudioSessionId(mAudioSessionId, -1);
+ AudioSystem::acquireAudioSessionId(sessionId, (pid_t)-1, (uid_t)-1);
+ AudioSystem::releaseAudioSessionId(mAudioSessionId, (pid_t)-1);
mAudioSessionId = sessionId;
}
return NO_ERROR;
diff --git a/media/libmediahelper/TypeConverter.cpp b/media/libmediahelper/TypeConverter.cpp
index fc037a6..6f14081 100644
--- a/media/libmediahelper/TypeConverter.cpp
+++ b/media/libmediahelper/TypeConverter.cpp
@@ -360,6 +360,10 @@
MAKE_STRING_FROM_ENUM(AUDIO_USAGE_VIRTUAL_SOURCE),
MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ASSISTANT),
MAKE_STRING_FROM_ENUM(AUDIO_USAGE_CALL_ASSISTANT),
+ MAKE_STRING_FROM_ENUM(AUDIO_USAGE_EMERGENCY),
+ MAKE_STRING_FROM_ENUM(AUDIO_USAGE_SAFETY),
+ MAKE_STRING_FROM_ENUM(AUDIO_USAGE_VEHICLE_STATUS),
+ MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ANNOUNCEMENT),
TERMINATOR
};
diff --git a/media/libmediaplayerservice/Android.bp b/media/libmediaplayerservice/Android.bp
index 762ed19..5301f5c 100644
--- a/media/libmediaplayerservice/Android.bp
+++ b/media/libmediaplayerservice/Android.bp
@@ -39,7 +39,6 @@
"libpowermanager",
"libstagefright",
"libstagefright_foundation",
- "libstagefright_framecapture_utils",
"libstagefright_httplive",
"libutils",
],
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index f2a38dd..81ffcbc 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -58,7 +58,6 @@
#include <media/Metadata.h>
#include <media/AudioTrack.h>
#include <media/MemoryLeakTrackUtil.h>
-#include <media/stagefright/FrameCaptureProcessor.h>
#include <media/stagefright/InterfaceUtils.h>
#include <media/stagefright/MediaCodecConstants.h>
#include <media/stagefright/MediaCodecList.h>
@@ -448,8 +447,6 @@
mNextConnId = 1;
MediaPlayerFactory::registerBuiltinFactories();
- // initialize the frame capture utilities
- (void)FrameCaptureProcessor::getInstance();
}
MediaPlayerService::~MediaPlayerService()
diff --git a/media/libmediatranscoding/Android.bp b/media/libmediatranscoding/Android.bp
index 8f0e278..9d0c534 100644
--- a/media/libmediatranscoding/Android.bp
+++ b/media/libmediatranscoding/Android.bp
@@ -6,10 +6,11 @@
"aidl/android/media/IMediaTranscodingService.aidl",
"aidl/android/media/ITranscodingServiceClient.aidl",
"aidl/android/media/TranscodingErrorCode.aidl",
+ "aidl/android/media/TranscodingJobPriority.aidl",
"aidl/android/media/TranscodingType.aidl",
"aidl/android/media/TranscodingVideoCodecType.aidl",
- "aidl/android/media/TranscodingJob.aidl",
- "aidl/android/media/TranscodingRequest.aidl",
- "aidl/android/media/TranscodingResult.aidl",
+ "aidl/android/media/TranscodingJobParcel.aidl",
+ "aidl/android/media/TranscodingRequestParcel.aidl",
+ "aidl/android/media/TranscodingResultParcel.aidl",
],
}
diff --git a/media/libmediatranscoding/aidl/android/media/IMediaTranscodingService.aidl b/media/libmediatranscoding/aidl/android/media/IMediaTranscodingService.aidl
index cae8ff0..798300a 100644
--- a/media/libmediatranscoding/aidl/android/media/IMediaTranscodingService.aidl
+++ b/media/libmediatranscoding/aidl/android/media/IMediaTranscodingService.aidl
@@ -16,8 +16,8 @@
package android.media;
-import android.media.TranscodingJob;
-import android.media.TranscodingRequest;
+import android.media.TranscodingJobParcel;
+import android.media.TranscodingRequestParcel;
import android.media.ITranscodingServiceClient;
/**
@@ -27,6 +27,25 @@
*/
interface IMediaTranscodingService {
/**
+ * All MediaTranscoding service and device Binder calls may return a
+ * ServiceSpecificException with the following error codes
+ */
+ const int ERROR_PERMISSION_DENIED = 1;
+ const int ERROR_ALREADY_EXISTS = 2;
+ const int ERROR_ILLEGAL_ARGUMENT = 3;
+ const int ERROR_DISCONNECTED = 4;
+ const int ERROR_TIMED_OUT = 5;
+ const int ERROR_DISABLED = 6;
+ const int ERROR_INVALID_OPERATION = 7;
+
+ /**
+ * Default UID/PID values for non-privileged callers of
+ * registerClient().
+ */
+ const int USE_CALLING_UID = -1;
+ const int USE_CALLING_PID = -1;
+
+ /**
* Register the client with the MediaTranscodingService.
*
* Client must call this function to register itself with the service in order to perform
@@ -34,10 +53,16 @@
* Client should save this Id and use it for all the transaction with the service.
*
* @param client interface for the MediaTranscodingService to call the client.
+ * @param opPackageName op package name of the client.
+ * @param clientUid user id of the client.
+ * @param clientPid process id of the client.
* @return a unique positive Id assigned to the client by the service, -1 means failed to
* register.
*/
- int registerClient(in ITranscodingServiceClient client);
+ int registerClient(in ITranscodingServiceClient client,
+ in String opPackageName,
+ in int clientUid,
+ in int clientPid);
/**
* Unregister the client with the MediaTranscodingService.
@@ -58,8 +83,8 @@
* @return a unique positive jobId generated by the MediaTranscodingService, -1 means failure.
*/
int submitRequest(in int clientId,
- in TranscodingRequest request,
- out TranscodingJob job);
+ in TranscodingRequestParcel request,
+ out TranscodingJobParcel job);
/**
* Cancels a transcoding job.
@@ -77,5 +102,5 @@
* @param job(output variable) the TranscodingJob associated with the jobId.
* @return true if succeeds, false otherwise.
*/
- boolean getJobWithId(in int jobId, out TranscodingJob job);
+ boolean getJobWithId(in int jobId, out TranscodingJobParcel job);
}
diff --git a/media/libmediatranscoding/aidl/android/media/ITranscodingServiceClient.aidl b/media/libmediatranscoding/aidl/android/media/ITranscodingServiceClient.aidl
index 29b6f35..e23c833 100644
--- a/media/libmediatranscoding/aidl/android/media/ITranscodingServiceClient.aidl
+++ b/media/libmediatranscoding/aidl/android/media/ITranscodingServiceClient.aidl
@@ -17,8 +17,8 @@
package android.media;
import android.media.TranscodingErrorCode;
-import android.media.TranscodingJob;
-import android.media.TranscodingResult;
+import android.media.TranscodingJobParcel;
+import android.media.TranscodingResultParcel;
/**
* ITranscodingServiceClient interface for the MediaTranscodingervice to communicate with the
@@ -39,7 +39,7 @@
* @param jobId jobId assigned by the MediaTranscodingService upon receiving request.
* @param result contains the transcoded file stats and other transcoding metrics if requested.
*/
- oneway void onTranscodingFinished(in int jobId, in TranscodingResult result);
+ oneway void onTranscodingFinished(in int jobId, in TranscodingResultParcel result);
/**
* Called when the transcoding associated with the jobId failed.
@@ -60,7 +60,9 @@
* @param oldAwaitNumber previous number of jobs ahead of current job.
* @param newAwaitNumber updated number of jobs ahead of current job.
*/
- oneway void onAwaitNumberOfJobsChanged(in int jobId, in int oldAwaitNumber, in int newAwaitNumber);
+ oneway void onAwaitNumberOfJobsChanged(in int jobId,
+ in int oldAwaitNumber,
+ in int newAwaitNumber);
/**
* Called when there is an update on the progress of the TranscodingJob.
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingJob.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingJobParcel.aidl
similarity index 91%
rename from media/libmediatranscoding/aidl/android/media/TranscodingJob.aidl
rename to media/libmediatranscoding/aidl/android/media/TranscodingJobParcel.aidl
index 42ad8ad..d912c38 100644
--- a/media/libmediatranscoding/aidl/android/media/TranscodingJob.aidl
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingJobParcel.aidl
@@ -16,7 +16,7 @@
package android.media;
-import android.media.TranscodingRequest;
+import android.media.TranscodingRequestParcel;
/**
* TranscodingJob is generated by the MediaTranscodingService upon receiving a TranscodingRequest.
@@ -26,7 +26,7 @@
* {@hide}
*/
//TODO(hkuang): Implement the parcelable.
-parcelable TranscodingJob {
+parcelable TranscodingJobParcel {
/**
* A unique positive Id generated by the MediaTranscodingService.
*/
@@ -35,7 +35,7 @@
/**
* The request associated with the TranscodingJob.
*/
- TranscodingRequest request;
+ TranscodingRequestParcel request;
/**
* Current number of jobs ahead of this job. The service schedules the job based on the priority
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingJobPriority.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingJobPriority.aidl
new file mode 100644
index 0000000..1a5d81a
--- /dev/null
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingJobPriority.aidl
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * Priority of a transcoding job.
+ *
+ * {@hide}
+ */
+@Backing(type="int")
+enum TranscodingJobPriority {
+ // TODO(hkuang): define what each priority level actually mean.
+ kUnspecified = 0,
+ kLow = 1,
+ /**
+ * 2 ~ 20 is reserved for future use.
+ */
+ kNormal = 21,
+ /**
+ * 22 ~ 30 is reserved for future use.
+ */
+ kHigh = 31,
+}
\ No newline at end of file
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingRequest.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl
similarity index 91%
rename from media/libmediatranscoding/aidl/android/media/TranscodingRequest.aidl
rename to media/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl
index 6b51ee3..7b7986d 100644
--- a/media/libmediatranscoding/aidl/android/media/TranscodingRequest.aidl
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl
@@ -16,6 +16,7 @@
package android.media;
+import android.media.TranscodingJobPriority;
import android.media.TranscodingType;
/**
@@ -24,7 +25,7 @@
* {@hide}
*/
//TODO(hkuang): Implement the parcelable.
-parcelable TranscodingRequest {
+parcelable TranscodingRequestParcel {
/**
* Name of file to be transcoded.
*/
@@ -48,8 +49,7 @@
/**
* Priority of this transcoding. Service will schedule the transcoding based on the priority.
*/
- // TODO(hkuang): Define the priority level.
- int priority;
+ TranscodingJobPriority priority;
/**
* Whether to receive update on progress and change of awaitNumJobs.
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingResult.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl
similarity index 93%
rename from media/libmediatranscoding/aidl/android/media/TranscodingResult.aidl
rename to media/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl
index a41e025..65c49e7 100644
--- a/media/libmediatranscoding/aidl/android/media/TranscodingResult.aidl
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl
@@ -16,15 +16,13 @@
package android.media;
-import android.media.TranscodingJob;
-
/**
* Result of the transcoding.
*
* {@hide}
*/
//TODO(hkuang): Implement the parcelable.
-parcelable TranscodingResult {
+parcelable TranscodingResultParcel {
/**
* The jobId associated with the TranscodingResult.
*/
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index ef9d253..960120f 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -1310,7 +1310,7 @@
OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers;
status_t err = configureOutputBuffersFromNativeWindow(
&bufferCount, &bufferSize, &minUndequeuedBuffers,
- false /* preregister */);
+ mFlags & kFlagPreregisterMetadataBuffers /* preregister */);
if (err != OK)
return err;
mNumUndequeuedBuffers = minUndequeuedBuffers;
@@ -1896,6 +1896,19 @@
setPortMode(kPortIndexInput, IOMX::kPortModePresetByteBuffer);
err = OK; // ignore error for now
}
+
+ OMX_INDEXTYPE index;
+ if (mOMXNode->getExtensionIndex(
+ "OMX.google.android.index.preregisterMetadataBuffers", &index) == OK) {
+ OMX_CONFIG_BOOLEANTYPE param;
+ InitOMXParams(¶m);
+ param.bEnabled = OMX_FALSE;
+ if (mOMXNode->getParameter(index, ¶m, sizeof(param)) == OK) {
+ if (param.bEnabled == OMX_TRUE) {
+ mFlags |= kFlagPreregisterMetadataBuffers;
+ }
+ }
+ }
}
if (haveNativeWindow) {
sp<ANativeWindow> nativeWindow =
diff --git a/media/libstagefright/ACodecBufferChannel.cpp b/media/libstagefright/ACodecBufferChannel.cpp
index bf8f09c..e5115d9 100644
--- a/media/libstagefright/ACodecBufferChannel.cpp
+++ b/media/libstagefright/ACodecBufferChannel.cpp
@@ -20,6 +20,8 @@
#include <numeric>
+#include <C2Buffer.h>
+
#include <android/hardware/cas/native/1.0/IDescrambler.h>
#include <android/hardware/drm/1.0/types.h>
#include <binder/MemoryDealer.h>
@@ -250,6 +252,178 @@
return OK;
}
+status_t ACodecBufferChannel::attachBuffer(
+ const std::shared_ptr<C2Buffer> &c2Buffer,
+ const sp<MediaCodecBuffer> &buffer) {
+ switch (c2Buffer->data().type()) {
+ case C2BufferData::LINEAR: {
+ if (c2Buffer->data().linearBlocks().size() != 1u) {
+ return -ENOSYS;
+ }
+ C2ConstLinearBlock block{c2Buffer->data().linearBlocks().front()};
+ C2ReadView view{block.map().get()};
+ if (view.capacity() > buffer->capacity()) {
+ return -ENOSYS;
+ }
+ memcpy(buffer->base(), view.data(), view.capacity());
+ buffer->setRange(0, view.capacity());
+ break;
+ }
+ case C2BufferData::GRAPHIC: {
+ // TODO
+ return -ENOSYS;
+ }
+ case C2BufferData::LINEAR_CHUNKS: [[fallthrough]];
+ case C2BufferData::GRAPHIC_CHUNKS: [[fallthrough]];
+ default:
+ return -ENOSYS;
+ }
+
+ return OK;
+}
+
+int32_t ACodecBufferChannel::getHeapSeqNum(const sp<HidlMemory> &memory) {
+ CHECK(mCrypto);
+ auto it = mHeapSeqNumMap.find(memory);
+ int32_t heapSeqNum = -1;
+ if (it == mHeapSeqNumMap.end()) {
+ heapSeqNum = mCrypto->setHeap(memory);
+ mHeapSeqNumMap.emplace(memory, heapSeqNum);
+ } else {
+ heapSeqNum = it->second;
+ }
+ return heapSeqNum;
+}
+
+status_t ACodecBufferChannel::attachEncryptedBuffer(
+ const sp<hardware::HidlMemory> &memory,
+ bool secure,
+ const uint8_t *key,
+ const uint8_t *iv,
+ CryptoPlugin::Mode mode,
+ CryptoPlugin::Pattern pattern,
+ size_t offset,
+ const CryptoPlugin::SubSample *subSamples,
+ size_t numSubSamples,
+ const sp<MediaCodecBuffer> &buffer) {
+ std::shared_ptr<const std::vector<const BufferInfo>> array(
+ std::atomic_load(&mInputBuffers));
+ BufferInfoIterator it = findClientBuffer(array, buffer);
+ if (it == array->end()) {
+ return -ENOENT;
+ }
+
+ native_handle_t *secureHandle = NULL;
+ if (secure) {
+ sp<SecureBuffer> secureData =
+ static_cast<SecureBuffer *>(it->mCodecBuffer.get());
+ if (secureData->getDestinationType() != ICrypto::kDestinationTypeNativeHandle) {
+ return BAD_VALUE;
+ }
+ secureHandle = static_cast<native_handle_t *>(secureData->getDestinationPointer());
+ }
+ size_t size = 0;
+ for (size_t i = 0; i < numSubSamples; ++i) {
+ size += subSamples[i].mNumBytesOfClearData + subSamples[i].mNumBytesOfEncryptedData;
+ }
+ ssize_t result = -1;
+ ssize_t codecDataOffset = 0;
+ if (mCrypto != NULL) {
+ AString errorDetailMsg;
+ hardware::drm::V1_0::DestinationBuffer destination;
+ if (secure) {
+ destination.type = DrmBufferType::NATIVE_HANDLE;
+ destination.secureMemory = hidl_handle(secureHandle);
+ } else {
+ destination.type = DrmBufferType::SHARED_MEMORY;
+ IMemoryToSharedBuffer(
+ mDecryptDestination, mHeapSeqNum, &destination.nonsecureMemory);
+ }
+
+ int32_t heapSeqNum = getHeapSeqNum(memory);
+ hardware::drm::V1_0::SharedBuffer source{(uint32_t)heapSeqNum, offset, size};
+
+ result = mCrypto->decrypt(key, iv, mode, pattern,
+ source, it->mClientBuffer->offset(),
+ subSamples, numSubSamples, destination, &errorDetailMsg);
+
+ if (result < 0) {
+ return result;
+ }
+
+ if (destination.type == DrmBufferType::SHARED_MEMORY) {
+ memcpy(it->mCodecBuffer->base(), mDecryptDestination->unsecurePointer(), result);
+ }
+ } else {
+ // Here we cast CryptoPlugin::SubSample to hardware::cas::native::V1_0::SubSample
+ // directly, the structure definitions should match as checked in DescramblerImpl.cpp.
+ hidl_vec<SubSample> hidlSubSamples;
+ hidlSubSamples.setToExternal((SubSample *)subSamples, numSubSamples, false /*own*/);
+
+ hardware::cas::native::V1_0::SharedBuffer srcBuffer = {
+ .heapBase = *memory,
+ .offset = (uint64_t) offset,
+ .size = size
+ };
+
+ DestinationBuffer dstBuffer;
+ if (secure) {
+ dstBuffer.type = BufferType::NATIVE_HANDLE;
+ dstBuffer.secureMemory = hidl_handle(secureHandle);
+ } else {
+ dstBuffer.type = BufferType::SHARED_MEMORY;
+ dstBuffer.nonsecureMemory = srcBuffer;
+ }
+
+ Status status = Status::OK;
+ hidl_string detailedError;
+ ScramblingControl sctrl = ScramblingControl::UNSCRAMBLED;
+
+ if (key != NULL) {
+ sctrl = (ScramblingControl)key[0];
+ // Adjust for the PES offset
+ codecDataOffset = key[2] | (key[3] << 8);
+ }
+
+ auto returnVoid = mDescrambler->descramble(
+ sctrl,
+ hidlSubSamples,
+ srcBuffer,
+ 0,
+ dstBuffer,
+ 0,
+ [&status, &result, &detailedError] (
+ Status _status, uint32_t _bytesWritten,
+ const hidl_string& _detailedError) {
+ status = _status;
+ result = (ssize_t)_bytesWritten;
+ detailedError = _detailedError;
+ });
+
+ if (!returnVoid.isOk() || status != Status::OK || result < 0) {
+ ALOGE("descramble failed, trans=%s, status=%d, result=%zd",
+ returnVoid.description().c_str(), status, result);
+ return UNKNOWN_ERROR;
+ }
+
+ if (result < codecDataOffset) {
+ ALOGD("invalid codec data offset: %zd, result %zd", codecDataOffset, result);
+ return BAD_VALUE;
+ }
+
+ ALOGV("descramble succeeded, %zd bytes", result);
+
+ if (dstBuffer.type == BufferType::SHARED_MEMORY) {
+ memcpy(it->mCodecBuffer->base(),
+ (uint8_t*)it->mSharedEncryptedBuffer->unsecurePointer(),
+ result);
+ }
+ }
+
+ it->mCodecBuffer->setRange(codecDataOffset, result - codecDataOffset);
+ return OK;
+}
+
status_t ACodecBufferChannel::renderOutputBuffer(
const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) {
std::shared_ptr<const std::vector<const BufferInfo>> array(
@@ -434,6 +608,16 @@
}
void ACodecBufferChannel::setCrypto(const sp<ICrypto> &crypto) {
+ if (mCrypto != nullptr) {
+ for (std::pair<wp<HidlMemory>, int32_t> entry : mHeapSeqNumMap) {
+ mCrypto->unsetHeap(entry.second);
+ }
+ mHeapSeqNumMap.clear();
+ if (mHeapSeqNum >= 0) {
+ mCrypto->unsetHeap(mHeapSeqNum);
+ mHeapSeqNum = -1;
+ }
+ }
mCrypto = crypto;
}
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 1f5ec55..a8fea90 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -41,7 +41,11 @@
cfi: true,
},
- shared_libs: ["libmedia", "libmediandk"],
+ header_libs: [
+ "libstagefright_foundation_headers",
+ ],
+ shared_libs: ["libmediandk"],
+ export_include_dirs: ["include"],
}
cc_library_shared {
@@ -99,8 +103,6 @@
shared_libs: [
"liblog",
- "libmedia",
- "libmedia_omx",
],
export_include_dirs: [
@@ -108,7 +110,9 @@
],
header_libs: [
- "libmedia_helper_headers",
+ "libaudioclient_headers",
+ "libmedia_headers",
+ "media_ndk_headers",
],
cflags: [
@@ -230,6 +234,8 @@
"libbinder",
"libbinder_ndk",
"libcamera_client",
+ "libcodec2",
+ "libcodec2_vndk",
"libcutils",
"libdatasource",
"libdl",
diff --git a/media/libstagefright/FrameCaptureProcessor.cpp b/media/libstagefright/FrameCaptureProcessor.cpp
index c517e33..96c1195 100644
--- a/media/libstagefright/FrameCaptureProcessor.cpp
+++ b/media/libstagefright/FrameCaptureProcessor.cpp
@@ -136,7 +136,7 @@
status_t FrameCaptureProcessor::onCapture(const sp<Layer> &layer,
const Rect &sourceCrop, const sp<GraphicBuffer> &buffer) {
renderengine::DisplaySettings clientCompositionDisplay;
- std::vector<renderengine::LayerSettings> clientCompositionLayers;
+ std::vector<const renderengine::LayerSettings*> clientCompositionLayers;
clientCompositionDisplay.physicalDisplay = sourceCrop;
clientCompositionDisplay.clip = sourceCrop;
@@ -150,7 +150,7 @@
layer->getLayerSettings(sourceCrop, mTextureName, &layerSettings);
- clientCompositionLayers.push_back(layerSettings);
+ clientCompositionLayers.push_back(&layerSettings);
// Use an empty fence for the buffer fence, since we just created the buffer so
// there is no need for synchronization with the GPU.
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 712be41..6757474 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -21,6 +21,8 @@
#include <inttypes.h>
#include <stdlib.h>
+#include <C2Buffer.h>
+
#include "include/SoftwareRenderer.h"
#include <android/hardware/cas/native/1.0/IDescrambler.h>
@@ -38,6 +40,7 @@
#include <mediadrm/ICrypto.h>
#include <media/IOMX.h>
#include <media/MediaCodecBuffer.h>
+#include <media/MediaCodecInfo.h>
#include <media/MediaMetricsItem.h>
#include <media/MediaResource.h>
#include <media/stagefright/foundation/ABuffer.h>
@@ -128,6 +131,9 @@
static const int kMaxReclaimWaitTimeInUs = 500000; // 0.5s
static const int kNumBuffersAlign = 16;
+static const C2MemoryUsage kDefaultReadWriteUsage{
+ C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+
////////////////////////////////////////////////////////////////////////////////
struct ResourceManagerClient : public BnResourceManagerClient {
@@ -992,6 +998,28 @@
}
}
+struct CodecListCache {
+ CodecListCache()
+ : mCodecInfoMap{[] {
+ const sp<IMediaCodecList> mcl = MediaCodecList::getInstance();
+ size_t count = mcl->countCodecs();
+ std::map<std::string, sp<MediaCodecInfo>> codecInfoMap;
+ for (size_t i = 0; i < count; ++i) {
+ sp<MediaCodecInfo> info = mcl->getCodecInfo(i);
+ codecInfoMap.emplace(info->getCodecName(), info);
+ }
+ return codecInfoMap;
+ }()} {
+ }
+
+ const std::map<std::string, sp<MediaCodecInfo>> mCodecInfoMap;
+};
+
+static const CodecListCache &GetCodecListCache() {
+ static CodecListCache sCache{};
+ return sCache;
+}
+
status_t MediaCodec::init(const AString &name) {
mResourceManagerProxy->init();
@@ -1501,6 +1529,75 @@
return err;
}
+status_t MediaCodec::queueBuffer(
+ size_t index,
+ const std::shared_ptr<C2Buffer> &buffer,
+ int64_t presentationTimeUs,
+ uint32_t flags,
+ const sp<AMessage> &tunings,
+ AString *errorDetailMsg) {
+ if (errorDetailMsg != NULL) {
+ errorDetailMsg->clear();
+ }
+
+ sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
+ msg->setSize("index", index);
+ sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
+ new WrapperObject<std::shared_ptr<C2Buffer>>{buffer}};
+ msg->setObject("c2buffer", obj);
+ msg->setInt64("timeUs", presentationTimeUs);
+ msg->setInt32("flags", flags);
+ msg->setMessage("tunings", tunings);
+ msg->setPointer("errorDetailMsg", errorDetailMsg);
+
+ sp<AMessage> response;
+ status_t err = PostAndAwaitResponse(msg, &response);
+
+ return err;
+}
+
+status_t MediaCodec::queueEncryptedBuffer(
+ size_t index,
+ const sp<hardware::HidlMemory> &buffer,
+ size_t offset,
+ const CryptoPlugin::SubSample *subSamples,
+ size_t numSubSamples,
+ const uint8_t key[16],
+ const uint8_t iv[16],
+ CryptoPlugin::Mode mode,
+ const CryptoPlugin::Pattern &pattern,
+ int64_t presentationTimeUs,
+ uint32_t flags,
+ const sp<AMessage> &tunings,
+ AString *errorDetailMsg) {
+ if (errorDetailMsg != NULL) {
+ errorDetailMsg->clear();
+ }
+
+ sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
+ msg->setSize("index", index);
+ sp<WrapperObject<sp<hardware::HidlMemory>>> memory{
+ new WrapperObject<sp<hardware::HidlMemory>>{buffer}};
+ msg->setObject("memory", memory);
+ msg->setSize("offset", offset);
+ msg->setPointer("subSamples", (void *)subSamples);
+ msg->setSize("numSubSamples", numSubSamples);
+ msg->setPointer("key", (void *)key);
+ msg->setPointer("iv", (void *)iv);
+ msg->setInt32("mode", mode);
+ msg->setInt32("encryptBlocks", pattern.mEncryptBlocks);
+ msg->setInt32("skipBlocks", pattern.mSkipBlocks);
+ msg->setInt64("timeUs", presentationTimeUs);
+ msg->setInt32("flags", flags);
+ msg->setMessage("tunings", tunings);
+ msg->setPointer("errorDetailMsg", errorDetailMsg);
+
+ sp<AMessage> response;
+ status_t err = PostAndAwaitResponse(msg, &response);
+
+ return err;
+}
+
status_t MediaCodec::dequeueInputBuffer(size_t *index, int64_t timeoutUs) {
sp<AMessage> msg = new AMessage(kWhatDequeueInputBuffer, this);
msg->setInt64("timeoutUs", timeoutUs);
@@ -2355,6 +2452,23 @@
sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
if (mOutputFormat != buffer->format()) {
+ if (mFlags & kFlagUseBlockModel) {
+ sp<AMessage> diff1 = mOutputFormat->changesFrom(buffer->format());
+ sp<AMessage> diff2 = buffer->format()->changesFrom(mOutputFormat);
+ std::set<std::string> keys;
+ size_t numEntries = diff1->countEntries();
+ AMessage::Type type;
+ for (size_t i = 0; i < numEntries; ++i) {
+ keys.emplace(diff1->getEntryNameAt(i, &type));
+ }
+ numEntries = diff2->countEntries();
+ for (size_t i = 0; i < numEntries; ++i) {
+ keys.emplace(diff2->getEntryNameAt(i, &type));
+ }
+ sp<WrapperObject<std::set<std::string>>> changedKeys{
+ new WrapperObject<std::set<std::string>>{std::move(keys)}};
+ buffer->meta()->setObject("changedKeys", changedKeys);
+ }
mOutputFormat = buffer->format();
ALOGV("[%s] output format changed to: %s",
mComponentName.c_str(), mOutputFormat->debugString(4).c_str());
@@ -2622,6 +2736,15 @@
handleSetSurface(NULL);
}
+ uint32_t flags;
+ CHECK(msg->findInt32("flags", (int32_t *)&flags));
+ if (flags & CONFIGURE_FLAG_USE_BLOCK_MODEL) {
+ if (!(mFlags & kFlagIsAsync)) {
+ PostReplyWithError(replyID, INVALID_OPERATION);
+ break;
+ }
+ mFlags |= kFlagUseBlockModel;
+ }
mReplyID = replyID;
setState(CONFIGURING);
@@ -2647,9 +2770,7 @@
mDescrambler = static_cast<IDescrambler *>(descrambler);
mBufferChannel->setDescrambler(mDescrambler);
- uint32_t flags;
- CHECK(msg->findInt32("flags", (int32_t *)&flags));
-
+ format->setInt32("flags", flags);
if (flags & CONFIGURE_FLAG_ENCODE) {
format->setInt32("encoder", true);
mFlags |= kFlagIsEncoder;
@@ -3234,22 +3355,36 @@
status_t MediaCodec::queueCSDInputBuffer(size_t bufferIndex) {
CHECK(!mCSD.empty());
- const BufferInfo &info = mPortBuffers[kPortIndexInput][bufferIndex];
-
sp<ABuffer> csd = *mCSD.begin();
mCSD.erase(mCSD.begin());
+ std::shared_ptr<C2Buffer> c2Buffer;
- const sp<MediaCodecBuffer> &codecInputData = info.mData;
+ if ((mFlags & kFlagUseBlockModel) && mOwnerName.startsWith("codec2::")) {
+ std::shared_ptr<C2LinearBlock> block =
+ FetchLinearBlock(csd->size(), {std::string{mComponentName.c_str()}});
+ C2WriteView view{block->map().get()};
+ if (view.error() != C2_OK) {
+ return -EINVAL;
+ }
+ if (csd->size() > view.capacity()) {
+ return -EINVAL;
+ }
+ memcpy(view.base(), csd->data(), csd->size());
+ c2Buffer = C2Buffer::CreateLinearBuffer(block->share(0, csd->size(), C2Fence{}));
+ } else {
+ const BufferInfo &info = mPortBuffers[kPortIndexInput][bufferIndex];
+ const sp<MediaCodecBuffer> &codecInputData = info.mData;
- if (csd->size() > codecInputData->capacity()) {
- return -EINVAL;
+ if (csd->size() > codecInputData->capacity()) {
+ return -EINVAL;
+ }
+ if (codecInputData->data() == NULL) {
+ ALOGV("Input buffer %zu is not properly allocated", bufferIndex);
+ return -EINVAL;
+ }
+
+ memcpy(codecInputData->data(), csd->data(), csd->size());
}
- if (codecInputData->data() == NULL) {
- ALOGV("Input buffer %zu is not properly allocated", bufferIndex);
- return -EINVAL;
- }
-
- memcpy(codecInputData->data(), csd->data(), csd->size());
AString errorDetailMsg;
@@ -3260,6 +3395,12 @@
msg->setInt64("timeUs", 0LL);
msg->setInt32("flags", BUFFER_FLAG_CODECCONFIG);
msg->setPointer("errorDetailMsg", &errorDetailMsg);
+ if (c2Buffer) {
+ sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
+ new WrapperObject<std::shared_ptr<C2Buffer>>{c2Buffer}};
+ msg->setObject("c2buffer", obj);
+ msg->setMessage("tunings", new AMessage);
+ }
return onQueueInputBuffer(msg);
}
@@ -3365,10 +3506,20 @@
int64_t timeUs;
uint32_t flags;
CHECK(msg->findSize("index", &index));
- CHECK(msg->findSize("offset", &offset));
CHECK(msg->findInt64("timeUs", &timeUs));
CHECK(msg->findInt32("flags", (int32_t *)&flags));
-
+ std::shared_ptr<C2Buffer> c2Buffer;
+ sp<hardware::HidlMemory> memory;
+ sp<RefBase> obj;
+ if (msg->findObject("c2buffer", &obj)) {
+ CHECK(obj);
+ c2Buffer = static_cast<WrapperObject<std::shared_ptr<C2Buffer>> *>(obj.get())->value;
+ } else if (msg->findObject("memory", &obj)) {
+ CHECK(obj);
+ memory = static_cast<WrapperObject<sp<hardware::HidlMemory>> *>(obj.get())->value;
+ } else {
+ CHECK(msg->findSize("offset", &offset));
+ }
const CryptoPlugin::SubSample *subSamples;
size_t numSubSamples;
const uint8_t *key;
@@ -3392,7 +3543,7 @@
pattern.mEncryptBlocks = 0;
pattern.mSkipBlocks = 0;
}
- } else {
+ } else if (!c2Buffer) {
if (!hasCryptoOrDescrambler()) {
ALOGE("[%s] queuing secure buffer without mCrypto or mDescrambler!",
mComponentName.c_str());
@@ -3423,31 +3574,52 @@
}
BufferInfo *info = &mPortBuffers[kPortIndexInput][index];
+ sp<MediaCodecBuffer> buffer = info->mData;
- if (info->mData == nullptr || !info->mOwnedByClient) {
+ if (c2Buffer || memory) {
+ sp<AMessage> tunings;
+ CHECK(msg->findMessage("tunings", &tunings));
+ onSetParameters(tunings);
+
+ status_t err = OK;
+ if (c2Buffer) {
+ err = mBufferChannel->attachBuffer(c2Buffer, buffer);
+ } else if (memory) {
+ err = mBufferChannel->attachEncryptedBuffer(
+ memory, (mFlags & kFlagIsSecure), key, iv, mode, pattern,
+ offset, subSamples, numSubSamples, buffer);
+ }
+ offset = buffer->offset();
+ size = buffer->size();
+ if (err != OK) {
+ return err;
+ }
+ }
+
+ if (buffer == nullptr || !info->mOwnedByClient) {
return -EACCES;
}
- if (offset + size > info->mData->capacity()) {
+ if (offset + size > buffer->capacity()) {
return -EINVAL;
}
- info->mData->setRange(offset, size);
- info->mData->meta()->setInt64("timeUs", timeUs);
+ buffer->setRange(offset, size);
+ buffer->meta()->setInt64("timeUs", timeUs);
if (flags & BUFFER_FLAG_EOS) {
- info->mData->meta()->setInt32("eos", true);
+ buffer->meta()->setInt32("eos", true);
}
if (flags & BUFFER_FLAG_CODECCONFIG) {
- info->mData->meta()->setInt32("csd", true);
+ buffer->meta()->setInt32("csd", true);
}
- sp<MediaCodecBuffer> buffer = info->mData;
status_t err = OK;
if (hasCryptoOrDescrambler()) {
AString *errorDetailMsg;
CHECK(msg->findPointer("errorDetailMsg", (void **)&errorDetailMsg));
+ ALOGI("calling queueSec");
err = mBufferChannel->queueSecureInputBuffer(
buffer,
(mFlags & kFlagIsSecure),
@@ -3835,4 +4007,70 @@
return rval;
}
+// static
+status_t MediaCodec::CanFetchLinearBlock(
+ const std::vector<std::string> &names, bool *isCompatible) {
+ *isCompatible = false;
+ if (names.size() == 0) {
+ *isCompatible = true;
+ return OK;
+ }
+ const CodecListCache &cache = GetCodecListCache();
+ for (const std::string &name : names) {
+ auto it = cache.mCodecInfoMap.find(name);
+ if (it == cache.mCodecInfoMap.end()) {
+ return NAME_NOT_FOUND;
+ }
+ const char *owner = it->second->getOwnerName();
+ if (owner == nullptr || strncmp(owner, "default", 8) == 0) {
+ *isCompatible = false;
+ return OK;
+ } else if (strncmp(owner, "codec2::", 8) != 0) {
+ return NAME_NOT_FOUND;
+ }
+ }
+ return CCodec::CanFetchLinearBlock(names, kDefaultReadWriteUsage, isCompatible);
+}
+
+// static
+std::shared_ptr<C2LinearBlock> MediaCodec::FetchLinearBlock(
+ size_t capacity, const std::vector<std::string> &names) {
+ return CCodec::FetchLinearBlock(capacity, kDefaultReadWriteUsage, names);
+}
+
+// static
+status_t MediaCodec::CanFetchGraphicBlock(
+ const std::vector<std::string> &names, bool *isCompatible) {
+ *isCompatible = false;
+ if (names.size() == 0) {
+ *isCompatible = true;
+ return OK;
+ }
+ const CodecListCache &cache = GetCodecListCache();
+ for (const std::string &name : names) {
+ auto it = cache.mCodecInfoMap.find(name);
+ if (it == cache.mCodecInfoMap.end()) {
+ return NAME_NOT_FOUND;
+ }
+ const char *owner = it->second->getOwnerName();
+ if (owner == nullptr || strncmp(owner, "default", 8) == 0) {
+ *isCompatible = false;
+ return OK;
+ } else if (strncmp(owner, "codec2.", 7) != 0) {
+ return NAME_NOT_FOUND;
+ }
+ }
+ return CCodec::CanFetchGraphicBlock(names, isCompatible);
+}
+
+// static
+std::shared_ptr<C2GraphicBlock> MediaCodec::FetchGraphicBlock(
+ int32_t width,
+ int32_t height,
+ int32_t format,
+ uint64_t usage,
+ const std::vector<std::string> &names) {
+ return CCodec::FetchGraphicBlock(width, height, format, usage, names);
+}
+
} // namespace android
diff --git a/media/libstagefright/MetaDataUtils.cpp b/media/libstagefright/MetaDataUtils.cpp
index 3f0bc7d..db60f04 100644
--- a/media/libstagefright/MetaDataUtils.cpp
+++ b/media/libstagefright/MetaDataUtils.cpp
@@ -25,7 +25,6 @@
#include <media/stagefright/foundation/ByteUtils.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MetaDataUtils.h>
-#include <media/stagefright/Utils.h>
#include <media/NdkMediaFormat.h>
namespace android {
diff --git a/media/libstagefright/flac/dec/Android.bp b/media/libstagefright/flac/dec/Android.bp
index 7ebe71f..d65a663 100644
--- a/media/libstagefright/flac/dec/Android.bp
+++ b/media/libstagefright/flac/dec/Android.bp
@@ -1,4 +1,4 @@
-cc_library_shared {
+cc_library {
name: "libstagefright_flacdec",
vendor_available: true,
diff --git a/media/libstagefright/include/ACodecBufferChannel.h b/media/libstagefright/include/ACodecBufferChannel.h
index 5f65575..da962d1 100644
--- a/media/libstagefright/include/ACodecBufferChannel.h
+++ b/media/libstagefright/include/ACodecBufferChannel.h
@@ -81,6 +81,20 @@
const CryptoPlugin::SubSample *subSamples,
size_t numSubSamples,
AString *errorDetailMsg) override;
+ virtual status_t attachBuffer(
+ const std::shared_ptr<C2Buffer> &c2Buffer,
+ const sp<MediaCodecBuffer> &buffer) override;
+ virtual status_t attachEncryptedBuffer(
+ const sp<hardware::HidlMemory> &memory,
+ bool secure,
+ const uint8_t *key,
+ const uint8_t *iv,
+ CryptoPlugin::Mode mode,
+ CryptoPlugin::Pattern pattern,
+ size_t offset,
+ const CryptoPlugin::SubSample *subSamples,
+ size_t numSubSamples,
+ const sp<MediaCodecBuffer> &buffer) override;
virtual status_t renderOutputBuffer(
const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) override;
virtual status_t discardBuffer(const sp<MediaCodecBuffer> &buffer) override;
@@ -118,12 +132,15 @@
void drainThisBuffer(IOMX::buffer_id bufferID, OMX_U32 omxFlags);
private:
+ int32_t getHeapSeqNum(const sp<HidlMemory> &memory);
+
const sp<AMessage> mInputBufferFilled;
const sp<AMessage> mOutputBufferDrained;
sp<MemoryDealer> mDealer;
sp<IMemory> mDecryptDestination;
int32_t mHeapSeqNum;
+ std::map<wp<HidlMemory>, int32_t> mHeapSeqNumMap;
sp<HidlMemory> mHidlMemory;
// These should only be accessed via std::atomic_* functions.
diff --git a/media/libstagefright/include/media/stagefright/ACodec.h b/media/libstagefright/include/media/stagefright/ACodec.h
index 7754de4..d56ec4f 100644
--- a/media/libstagefright/include/media/stagefright/ACodec.h
+++ b/media/libstagefright/include/media/stagefright/ACodec.h
@@ -158,6 +158,7 @@
kFlagIsSecure = 1,
kFlagPushBlankBuffersToNativeWindowOnShutdown = 2,
kFlagIsGrallocUsageProtected = 4,
+ kFlagPreregisterMetadataBuffers = 8,
};
enum {
diff --git a/media/libstagefright/include/media/stagefright/CodecBase.h b/media/libstagefright/include/media/stagefright/CodecBase.h
index bc7881c..dd6df90 100644
--- a/media/libstagefright/include/media/stagefright/CodecBase.h
+++ b/media/libstagefright/include/media/stagefright/CodecBase.h
@@ -34,6 +34,8 @@
#include <system/graphics.h>
#include <utils/NativeHandle.h>
+class C2Buffer;
+
namespace android {
class BufferChannelBase;
struct BufferProducerWrapper;
@@ -45,6 +47,7 @@
class IMemory;
namespace hardware {
+class HidlMemory;
namespace cas {
namespace native {
namespace V1_0 {
@@ -295,6 +298,50 @@
size_t numSubSamples,
AString *errorDetailMsg) = 0;
/**
+ * Attach a Codec 2.0 buffer to MediaCodecBuffer.
+ *
+ * @return OK if successful;
+ * -ENOENT if index is not recognized
+ * -ENOSYS if attaching buffer is not possible or not supported
+ */
+ virtual status_t attachBuffer(
+ const std::shared_ptr<C2Buffer> &c2Buffer,
+ const sp<MediaCodecBuffer> &buffer) {
+ (void)c2Buffer;
+ (void)buffer;
+ return -ENOSYS;
+ }
+ /**
+ * Attach an encrypted HidlMemory buffer to an index
+ *
+ * @return OK if successful;
+ * -ENOENT if index is not recognized
+ * -ENOSYS if attaching buffer is not possible or not supported
+ */
+ virtual status_t attachEncryptedBuffer(
+ const sp<hardware::HidlMemory> &memory,
+ bool secure,
+ const uint8_t *key,
+ const uint8_t *iv,
+ CryptoPlugin::Mode mode,
+ CryptoPlugin::Pattern pattern,
+ size_t offset,
+ const CryptoPlugin::SubSample *subSamples,
+ size_t numSubSamples,
+ const sp<MediaCodecBuffer> &buffer) {
+ (void)memory;
+ (void)secure;
+ (void)key;
+ (void)iv;
+ (void)mode;
+ (void)pattern;
+ (void)offset;
+ (void)subSamples;
+ (void)numSubSamples;
+ (void)buffer;
+ return -ENOSYS;
+ }
+ /**
* Request buffer rendering at specified time.
*
* @param timestampNs nanosecond timestamp for rendering time.
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 5c5c23a..022c48e 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -29,6 +29,10 @@
#include <media/stagefright/FrameRenderTracker.h>
#include <utils/Vector.h>
+class C2Buffer;
+class C2GraphicBlock;
+class C2LinearBlock;
+
namespace aidl {
namespace android {
namespace media {
@@ -65,7 +69,8 @@
struct MediaCodec : public AHandler {
enum ConfigureFlags {
- CONFIGURE_FLAG_ENCODE = 1,
+ CONFIGURE_FLAG_ENCODE = 1,
+ CONFIGURE_FLAG_USE_BLOCK_MODEL = 2,
};
enum BufferFlags {
@@ -157,6 +162,38 @@
uint32_t flags,
AString *errorDetailMsg = NULL);
+ status_t queueBuffer(
+ size_t index,
+ const std::shared_ptr<C2Buffer> &buffer,
+ int64_t presentationTimeUs,
+ uint32_t flags,
+ const sp<AMessage> &tunings,
+ AString *errorDetailMsg = NULL);
+
+ status_t queueEncryptedBuffer(
+ size_t index,
+ const sp<hardware::HidlMemory> &memory,
+ size_t offset,
+ const CryptoPlugin::SubSample *subSamples,
+ size_t numSubSamples,
+ const uint8_t key[16],
+ const uint8_t iv[16],
+ CryptoPlugin::Mode mode,
+ const CryptoPlugin::Pattern &pattern,
+ int64_t presentationTimeUs,
+ uint32_t flags,
+ const sp<AMessage> &tunings,
+ AString *errorDetailMsg = NULL);
+
+ std::shared_ptr<C2Buffer> decrypt(
+ const std::shared_ptr<C2Buffer> &buffer,
+ const CryptoPlugin::SubSample *subSamples,
+ size_t numSubSamples,
+ const uint8_t key[16],
+ const uint8_t iv[16],
+ CryptoPlugin::Mode mode,
+ const CryptoPlugin::Pattern &pattern);
+
status_t dequeueInputBuffer(size_t *index, int64_t timeoutUs = 0ll);
status_t dequeueOutputBuffer(
@@ -206,6 +243,29 @@
static size_t CreateFramesRenderedMessage(
const std::list<FrameRenderTracker::Info> &done, sp<AMessage> &msg);
+ static status_t CanFetchLinearBlock(
+ const std::vector<std::string> &names, bool *isCompatible);
+
+ static std::shared_ptr<C2LinearBlock> FetchLinearBlock(
+ size_t capacity, const std::vector<std::string> &names);
+
+ static status_t CanFetchGraphicBlock(
+ const std::vector<std::string> &names, bool *isCompatible);
+
+ static std::shared_ptr<C2GraphicBlock> FetchGraphicBlock(
+ int32_t width,
+ int32_t height,
+ int32_t format,
+ uint64_t usage,
+ const std::vector<std::string> &names);
+
+ template <typename T>
+ struct WrapperObject : public RefBase {
+ WrapperObject(const T& v) : value(v) {}
+ WrapperObject(T&& v) : value(std::move(v)) {}
+ T value;
+ };
+
protected:
virtual ~MediaCodec();
virtual void onMessageReceived(const sp<AMessage> &msg);
@@ -282,6 +342,7 @@
kFlagIsAsync = 1024,
kFlagIsComponentAllocated = 2048,
kFlagPushBlankBuffersOnShutdown = 4096,
+ kFlagUseBlockModel = 8192,
};
struct BufferInfo {
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index 16e207d..eb7e5f7 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -830,6 +830,7 @@
constexpr int32_t BUFFER_FLAG_PARTIAL_FRAME = 8;
constexpr int32_t BUFFER_FLAG_SYNC_FRAME = 1;
constexpr int32_t CONFIGURE_FLAG_ENCODE = 1;
+constexpr int32_t CONFIGURE_FLAG_USE_BLOCK_MODEL = 2;
constexpr int32_t CRYPTO_MODE_AES_CBC = 2;
constexpr int32_t CRYPTO_MODE_AES_CTR = 1;
constexpr int32_t CRYPTO_MODE_UNENCRYPTED = 0;
diff --git a/media/libstagefright/include/media/stagefright/MediaErrors.h b/media/libstagefright/include/media/stagefright/MediaErrors.h
index 6f48c5d..5418f10 100644
--- a/media/libstagefright/include/media/stagefright/MediaErrors.h
+++ b/media/libstagefright/include/media/stagefright/MediaErrors.h
@@ -105,7 +105,8 @@
ERROR_CAS_CARD_MUTE = CAS_ERROR_BASE - 15,
ERROR_CAS_CARD_INVALID = CAS_ERROR_BASE - 16,
ERROR_CAS_BLACKOUT = CAS_ERROR_BASE - 17,
- ERROR_CAS_LAST_USED_ERRORCODE = CAS_ERROR_BASE - 17,
+ ERROR_CAS_REBOOTING = CAS_ERROR_BASE - 18,
+ ERROR_CAS_LAST_USED_ERRORCODE = CAS_ERROR_BASE - 18,
ERROR_CAS_VENDOR_MAX = CAS_ERROR_BASE - 500,
ERROR_CAS_VENDOR_MIN = CAS_ERROR_BASE - 999,
diff --git a/media/libstagefright/mpeg2ts/Android.bp b/media/libstagefright/mpeg2ts/Android.bp
index a507b91..42afea3 100644
--- a/media/libstagefright/mpeg2ts/Android.bp
+++ b/media/libstagefright/mpeg2ts/Android.bp
@@ -29,7 +29,6 @@
shared_libs: [
"libcrypto",
- "libmedia",
"libhidlmemory",
"android.hardware.cas.native@1.0",
"android.hidl.memory@1.0",
@@ -37,9 +36,13 @@
],
header_libs: [
+ "libmedia_headers",
+ "libaudioclient_headers",
"media_ndk_headers",
],
+ export_include_dirs: ["."],
+
whole_static_libs: [
"libstagefright_metadatautils",
],
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index e1c3916..2f69f45 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -492,6 +492,12 @@
}
case OMX_StateLoaded:
+ {
+ if (mActiveBuffers.size() > 0) {
+ freeActiveBuffers();
+ }
+ FALLTHROUGH_INTENDED;
+ }
case OMX_StateInvalid:
break;
diff --git a/media/libstagefright/tests/writer/README.md b/media/libstagefright/tests/writer/README.md
index 52db6f0..ae07917 100644
--- a/media/libstagefright/tests/writer/README.md
+++ b/media/libstagefright/tests/writer/README.md
@@ -19,12 +19,13 @@
adb push ${OUT}/data/nativetest/writerTest/writerTest /data/local/tmp/
-The resource file for the tests is taken from Codec2 VTS resource folder. Push these files into device for testing.
+The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/writer/writerTestRes.zip).
+Download and extract the folder. Push all the files in this folder to /data/local/tmp/ on the device.
```
-adb push $ANDROID_BUILD_TOP/frameworks/av/media/codec2/hidl/1.0/vts/functional/res /sdcard/
+adb push writerTestRes /data/local/tmp/
```
usage: writerTest -P \<path_to_res_folder\>
```
-adb shell /data/local/tmp/writerTest -P /sdcard/res/
+adb shell /data/local/tmp/writerTest -P /data/local/tmp/
```
diff --git a/media/libstagefright/tests/writer/WriterTest.cpp b/media/libstagefright/tests/writer/WriterTest.cpp
index d68438c..409f141 100644
--- a/media/libstagefright/tests/writer/WriterTest.cpp
+++ b/media/libstagefright/tests/writer/WriterTest.cpp
@@ -29,9 +29,9 @@
#include <media/stagefright/AACWriter.h>
#include <media/stagefright/AMRWriter.h>
-#include <media/stagefright/OggWriter.h>
-#include <media/stagefright/MPEG4Writer.h>
#include <media/stagefright/MPEG2TSWriter.h>
+#include <media/stagefright/MPEG4Writer.h>
+#include <media/stagefright/OggWriter.h>
#include <webm/WebmWriter.h>
#include "WriterTestEnvironment.h"
@@ -89,19 +89,36 @@
class WriterTest : public ::testing::TestWithParam<pair<string, int32_t>> {
public:
+ WriterTest() : mWriter(nullptr), mFileMeta(nullptr), mCurrentTrack(nullptr) {}
+
+ ~WriterTest() {
+ if (mWriter) {
+ mWriter.clear();
+ mWriter = nullptr;
+ }
+ if (mFileMeta) {
+ mFileMeta.clear();
+ mFileMeta = nullptr;
+ }
+ if (mCurrentTrack) {
+ mCurrentTrack.clear();
+ mCurrentTrack = nullptr;
+ }
+ }
+
virtual void SetUp() override {
mNumCsds = 0;
mInputFrameId = 0;
mWriterName = unknown_comp;
mDisableTest = false;
- std::map<std::string, standardWriters> mapWriter = {
+ static const std::map<std::string, standardWriters> mapWriter = {
{"ogg", OGG}, {"aac", AAC}, {"aac_adts", AAC_ADTS}, {"webm", WEBM},
{"mpeg4", MPEG4}, {"amrnb", AMR_NB}, {"amrwb", AMR_WB}, {"mpeg2Ts", MPEG2TS}};
// Find the component type
string writerFormat = GetParam().first;
if (mapWriter.find(writerFormat) != mapWriter.end()) {
- mWriterName = mapWriter[writerFormat];
+ mWriterName = mapWriter.at(writerFormat);
}
if (mWriterName == standardWriters::unknown_comp) {
cout << "[ WARN ] Test Skipped. No specific writer mentioned\n";
@@ -110,10 +127,8 @@
}
virtual void TearDown() override {
- mWriter.clear();
- mFileMeta.clear();
mBufferInfo.clear();
- if (mInputStream) mInputStream.close();
+ if (mInputStream.is_open()) mInputStream.close();
}
void getInputBufferInfo(string inputFileName, string inputInfo);
@@ -149,7 +164,7 @@
void WriterTest::getInputBufferInfo(string inputFileName, string inputInfo) {
std::ifstream eleInfo;
eleInfo.open(inputInfo.c_str());
- CHECK_EQ(eleInfo.is_open(), true);
+ ASSERT_EQ(eleInfo.is_open(), true);
int32_t bytesCount = 0;
uint32_t flags = 0;
int64_t timestamp = 0;
@@ -162,7 +177,7 @@
}
eleInfo.close();
mInputStream.open(inputFileName.c_str(), std::ifstream::binary);
- CHECK_EQ(mInputStream.is_open(), true);
+ ASSERT_EQ(mInputStream.is_open(), true);
}
int32_t WriterTest::createWriter(int32_t fd) {
@@ -258,14 +273,11 @@
string outputFile = OUTPUT_FILE_NAME;
int32_t fd =
open(outputFile.c_str(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
- if (fd < 0) return;
+ ASSERT_GE(fd, 0) << "Failed to open output file to dump writer's data";
// Creating writer within a test scope. Destructor should be called when the test ends
- int32_t status = createWriter(fd);
- if (status) {
- cout << "Failed to create writer for output format:" << GetParam().first << "\n";
- ASSERT_TRUE(false);
- }
+ ASSERT_EQ((status_t)OK, createWriter(fd))
+ << "Failed to create writer for output format:" << GetParam().first;
}
TEST_P(WriterTest, WriterTest) {
@@ -276,38 +288,32 @@
string outputFile = OUTPUT_FILE_NAME;
int32_t fd =
open(outputFile.c_str(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
- if (fd < 0) return;
+ ASSERT_GE(fd, 0) << "Failed to open output file to dump writer's data";
+
int32_t status = createWriter(fd);
- if (status) {
- cout << "Failed to create writer for output format:" << writerFormat << "\n";
- ASSERT_TRUE(false);
- }
+ ASSERT_EQ((status_t)OK, status) << "Failed to create writer for output format:" << writerFormat;
+
string inputFile = gEnv->getRes();
string inputInfo = gEnv->getRes();
configFormat param;
bool isAudio;
int32_t inputFileIdx = GetParam().second;
getFileDetails(inputFile, inputInfo, param, isAudio, inputFileIdx);
- if (!inputFile.compare(gEnv->getRes())) {
- ALOGV("No input file specified");
- return;
- }
+ ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
+
getInputBufferInfo(inputFile, inputInfo);
status = addWriterSource(isAudio, param);
- if (status) {
- cout << "Failed to add source for " << writerFormat << "Writer \n";
- ASSERT_TRUE(false);
- }
- CHECK_EQ((status_t)OK, mWriter->start(mFileMeta.get()));
+ ASSERT_EQ((status_t)OK, status) << "Failed to add source for " << writerFormat << "Writer";
+
+ status = mWriter->start(mFileMeta.get());
+ ASSERT_EQ((status_t)OK, status);
status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack, 0,
mBufferInfo.size());
+ ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
mCurrentTrack->stop();
- if (status) {
- cout << writerFormat << " writer failed \n";
- mWriter->stop();
- ASSERT_TRUE(false);
- }
- CHECK_EQ((status_t)OK, mWriter->stop());
+
+ status = mWriter->stop();
+ ASSERT_EQ((status_t)OK, status) << "Failed to stop the writer";
close(fd);
}
@@ -319,59 +325,125 @@
string outputFile = OUTPUT_FILE_NAME;
int32_t fd =
open(outputFile.c_str(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
- if (fd < 0) return;
+ ASSERT_GE(fd, 0) << "Failed to open output file to dump writer's data";
+
int32_t status = createWriter(fd);
- if (status) {
- cout << "Failed to create writer for output format:" << writerFormat << "\n";
- ASSERT_TRUE(false);
- }
+ ASSERT_EQ((status_t)OK, status) << "Failed to create writer for output format:" << writerFormat;
+
string inputFile = gEnv->getRes();
string inputInfo = gEnv->getRes();
configFormat param;
bool isAudio;
int32_t inputFileIdx = GetParam().second;
getFileDetails(inputFile, inputInfo, param, isAudio, inputFileIdx);
- if (!inputFile.compare(gEnv->getRes())) {
- ALOGV("No input file specified");
- return;
- }
+ ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
+
getInputBufferInfo(inputFile, inputInfo);
status = addWriterSource(isAudio, param);
- if (status) {
- cout << "Failed to add source for " << writerFormat << "Writer \n";
- ASSERT_TRUE(false);
- }
- CHECK_EQ((status_t)OK, mWriter->start(mFileMeta.get()));
+ ASSERT_EQ((status_t)OK, status) << "Failed to add source for " << writerFormat << "Writer";
+
+ status = mWriter->start(mFileMeta.get());
+ ASSERT_EQ((status_t)OK, status);
status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack, 0,
mBufferInfo.size() / 4);
- if (status) {
- cout << writerFormat << " writer failed \n";
- mCurrentTrack->stop();
- mWriter->stop();
- ASSERT_TRUE(false);
- }
+ ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
bool isPaused = false;
if ((mWriterName != standardWriters::MPEG2TS) && (mWriterName != standardWriters::MPEG4)) {
- CHECK_EQ((status_t)OK, mWriter->pause());
+ status = mWriter->pause();
+ ASSERT_EQ((status_t)OK, status);
isPaused = true;
}
// In the pause state, writers shouldn't write anything. Testing the writers for the same
int32_t numFramesPaused = mBufferInfo.size() / 4;
- status |= sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
+ status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
mInputFrameId, numFramesPaused, isPaused);
+ ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
+
if (isPaused) {
- CHECK_EQ((status_t)OK, mWriter->start(mFileMeta.get()));
+ status = mWriter->start(mFileMeta.get());
+ ASSERT_EQ((status_t)OK, status);
}
- status |= sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
+ status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
mInputFrameId, mBufferInfo.size());
+ ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
mCurrentTrack->stop();
- if (status) {
- cout << writerFormat << " writer failed \n";
- mWriter->stop();
- ASSERT_TRUE(false);
+
+ status = mWriter->stop();
+ ASSERT_EQ((status_t)OK, status) << "Failed to stop the writer";
+ close(fd);
+}
+
+TEST_P(WriterTest, MultiStartStopPauseTest) {
+ // TODO: (b/144821804)
+ // Enable the test for MPE2TS writer
+ if (mDisableTest || mWriterName == standardWriters::MPEG2TS) return;
+ ALOGV("Test writers for multiple start, stop and pause calls");
+
+ string outputFile = OUTPUT_FILE_NAME;
+ int32_t fd =
+ open(outputFile.c_str(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
+ ASSERT_GE(fd, 0) << "Failed to open output file to dump writer's data";
+
+ string writerFormat = GetParam().first;
+ int32_t status = createWriter(fd);
+ ASSERT_EQ(status, (status_t)OK) << "Failed to create writer for output format:" << writerFormat;
+
+ string inputFile = gEnv->getRes();
+ string inputInfo = gEnv->getRes();
+ configFormat param;
+ bool isAudio;
+ int32_t inputFileIdx = GetParam().second;
+ getFileDetails(inputFile, inputInfo, param, isAudio, inputFileIdx);
+ ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
+
+ getInputBufferInfo(inputFile, inputInfo);
+ status = addWriterSource(isAudio, param);
+ ASSERT_EQ((status_t)OK, status) << "Failed to add source for " << writerFormat << "Writer";
+
+ // first start should succeed.
+ status = mWriter->start(mFileMeta.get());
+ ASSERT_EQ((status_t)OK, status) << "Could not start the writer";
+
+ // Multiple start() may/may not succeed.
+ // Writers are expected to not crash on multiple start() calls.
+ for (int32_t count = 0; count < kMaxCount; count++) {
+ mWriter->start(mFileMeta.get());
}
- CHECK_EQ((status_t)OK, mWriter->stop());
+
+ status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack, 0,
+ mBufferInfo.size() / 4);
+ ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
+
+ for (int32_t count = 0; count < kMaxCount; count++) {
+ mWriter->pause();
+ mWriter->start(mFileMeta.get());
+ }
+
+ mWriter->pause();
+ int32_t numFramesPaused = mBufferInfo.size() / 4;
+ status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
+ mInputFrameId, numFramesPaused, true);
+ ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
+
+ for (int32_t count = 0; count < kMaxCount; count++) {
+ mWriter->start(mFileMeta.get());
+ }
+
+ status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
+ mInputFrameId, mBufferInfo.size());
+ ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
+
+ mCurrentTrack->stop();
+
+ // first stop should succeed.
+ status = mWriter->stop();
+ ASSERT_EQ((status_t)OK, status) << "Failed to stop the writer";
+ // Multiple stop() may/may not succeed.
+ // Writers are expected to not crash on multiple stop() calls.
+ for (int32_t count = 0; count < kMaxCount; count++) {
+ mWriter->stop();
+ }
close(fd);
}
diff --git a/media/libstagefright/tests/writer/WriterTestEnvironment.h b/media/libstagefright/tests/writer/WriterTestEnvironment.h
index 34c2baa..99e686f 100644
--- a/media/libstagefright/tests/writer/WriterTestEnvironment.h
+++ b/media/libstagefright/tests/writer/WriterTestEnvironment.h
@@ -25,7 +25,7 @@
class WriterTestEnvironment : public ::testing::Environment {
public:
- WriterTestEnvironment() : res("/sdcard/media/") {}
+ WriterTestEnvironment() : res("/data/local/tmp/") {}
// Parses the command line arguments
int initFromOptions(int argc, char **argv);
diff --git a/media/libstagefright/tests/writer/WriterUtility.cpp b/media/libstagefright/tests/writer/WriterUtility.cpp
index 2ba90a0..f24ccb6 100644
--- a/media/libstagefright/tests/writer/WriterUtility.cpp
+++ b/media/libstagefright/tests/writer/WriterUtility.cpp
@@ -26,7 +26,7 @@
int32_t &inputFrameId, sp<MediaAdapter> ¤tTrack, int32_t offset,
int32_t range, bool isPaused) {
while (1) {
- if (inputFrameId == (int)bufferInfo.size() || inputFrameId >= (offset + range)) break;
+ if (inputFrameId >= (int)bufferInfo.size() || inputFrameId >= (offset + range)) break;
int32_t size = bufferInfo[inputFrameId].size;
char *data = (char *)malloc(size);
if (!data) {
diff --git a/media/libstagefright/tests/writer/WriterUtility.h b/media/libstagefright/tests/writer/WriterUtility.h
index d402798..cdd6246 100644
--- a/media/libstagefright/tests/writer/WriterUtility.h
+++ b/media/libstagefright/tests/writer/WriterUtility.h
@@ -33,6 +33,7 @@
#define CODEC_CONFIG_FLAG 32
constexpr uint32_t kMaxCSDStrlen = 16;
+constexpr uint32_t kMaxCount = 20;
struct BufferInfo {
int32_t size;
diff --git a/media/tests/benchmark/src/native/common/BenchmarkC2Common.cpp b/media/tests/benchmark/src/native/common/BenchmarkC2Common.cpp
index 622a0e1..e09f468 100644
--- a/media/tests/benchmark/src/native/common/BenchmarkC2Common.cpp
+++ b/media/tests/benchmark/src/native/common/BenchmarkC2Common.cpp
@@ -22,6 +22,9 @@
int32_t BenchmarkC2Common::setupCodec2() {
ALOGV("In %s", __func__);
mClient = android::Codec2Client::CreateFromService("default");
+ if (!mClient) {
+ mClient = android::Codec2Client::CreateFromService("software");
+ }
if (!mClient) return -1;
std::shared_ptr<C2AllocatorStore> store = android::GetCodec2PlatformAllocatorStore();
diff --git a/media/tests/benchmark/src/native/decoder/Decoder.cpp b/media/tests/benchmark/src/native/decoder/Decoder.cpp
index b797cc9..2171589 100644
--- a/media/tests/benchmark/src/native/decoder/Decoder.cpp
+++ b/media/tests/benchmark/src/native/decoder/Decoder.cpp
@@ -225,12 +225,12 @@
}
void Decoder::deInitCodec() {
- int64_t sTime = mStats->getCurTime();
if (mFormat) {
AMediaFormat_delete(mFormat);
mFormat = nullptr;
}
if (!mCodec) return;
+ int64_t sTime = mStats->getCurTime();
AMediaCodec_stop(mCodec);
AMediaCodec_delete(mCodec);
int64_t eTime = mStats->getCurTime();
diff --git a/media/tests/benchmark/src/native/encoder/Encoder.cpp b/media/tests/benchmark/src/native/encoder/Encoder.cpp
index f119cf3..2db612c 100644
--- a/media/tests/benchmark/src/native/encoder/Encoder.cpp
+++ b/media/tests/benchmark/src/native/encoder/Encoder.cpp
@@ -154,11 +154,12 @@
}
void Encoder::deInitCodec() {
- int64_t sTime = mStats->getCurTime();
if (mFormat) {
AMediaFormat_delete(mFormat);
mFormat = nullptr;
}
+ if (!mCodec) return;
+ int64_t sTime = mStats->getCurTime();
AMediaCodec_stop(mCodec);
AMediaCodec_delete(mCodec);
int64_t eTime = mStats->getCurTime();
diff --git a/media/tests/benchmark/src/native/muxer/Muxer.cpp b/media/tests/benchmark/src/native/muxer/Muxer.cpp
index f8627cb..3e150ca 100644
--- a/media/tests/benchmark/src/native/muxer/Muxer.cpp
+++ b/media/tests/benchmark/src/native/muxer/Muxer.cpp
@@ -49,12 +49,12 @@
}
void Muxer::deInitMuxer() {
- int64_t sTime = mStats->getCurTime();
if (mFormat) {
AMediaFormat_delete(mFormat);
mFormat = nullptr;
}
if (!mMuxer) return;
+ int64_t sTime = mStats->getCurTime();
AMediaMuxer_stop(mMuxer);
AMediaMuxer_delete(mMuxer);
int64_t eTime = mStats->getCurTime();
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index 343ec05..87ea084 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -145,6 +145,15 @@
return ok;
}
+bool captureVoiceCommunicationOutputAllowed(pid_t pid, uid_t uid) {
+ if (isAudioServerOrRootUid(uid)) return true;
+ static const String16 sCaptureVoiceCommOutput(
+ "android.permission.CAPTURE_VOICE_COMMUNICATION_OUTPUT");
+ bool ok = PermissionCache::checkPermission(sCaptureVoiceCommOutput, pid, uid);
+ if (!ok) ALOGE("Request requires android.permission.CAPTURE_VOICE_COMMUNICATION_OUTPUT");
+ return ok;
+}
+
bool captureHotwordAllowed(const String16& opPackageName, pid_t pid, uid_t uid) {
// CAPTURE_AUDIO_HOTWORD permission implies RECORD_AUDIO permission
bool ok = recordingAllowed(opPackageName, pid, uid);
diff --git a/media/utils/include/mediautils/ServiceUtilities.h b/media/utils/include/mediautils/ServiceUtilities.h
index 4925cdb..212599a 100644
--- a/media/utils/include/mediautils/ServiceUtilities.h
+++ b/media/utils/include/mediautils/ServiceUtilities.h
@@ -83,6 +83,7 @@
void finishRecording(const String16& opPackageName, uid_t uid);
bool captureAudioOutputAllowed(pid_t pid, uid_t uid);
bool captureMediaOutputAllowed(pid_t pid, uid_t uid);
+bool captureVoiceCommunicationOutputAllowed(pid_t pid, uid_t uid);
bool captureHotwordAllowed(const String16& opPackageName, pid_t pid, uid_t uid);
bool settingsAllowed();
bool modifyAudioRoutingAllowed();
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index 3873600..f06d026 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -3,6 +3,8 @@
cc_library_shared {
name: "libaudioflinger",
+ tidy: false, // b/146435095, segmentation fault with Effects.cpp
+
srcs: [
"AudioFlinger.cpp",
"AudioHwDevice.cpp",
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index ee4c5ba..d6f0824 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -464,10 +464,12 @@
}
result.append("Global session refs:\n");
- result.append(" session cnt pid\n");
+ result.append(" session cnt pid uid name\n");
for (size_t i = 0; i < mAudioSessionRefs.size(); i++) {
AudioSessionRef *r = mAudioSessionRefs[i];
- result.appendFormat(" %7d %4d %7d\n", r->mSessionid, r->mCnt, r->mPid);
+ const mediautils::UidInfo::Info info = mUidInfo.getInfo(r->mUid);
+ result.appendFormat(" %7d %4d %7d %6u %s\n", r->mSessionid, r->mCnt, r->mPid,
+ r->mUid, info.package.c_str());
}
write(fd, result.string(), result.size());
}
@@ -2895,14 +2897,18 @@
return nextUniqueId(use);
}
-void AudioFlinger::acquireAudioSessionId(audio_session_t audioSession, pid_t pid)
+void AudioFlinger::acquireAudioSessionId(
+ audio_session_t audioSession, pid_t pid, uid_t uid)
{
Mutex::Autolock _l(mLock);
pid_t caller = IPCThreadState::self()->getCallingPid();
ALOGV("acquiring %d from %d, for %d", audioSession, caller, pid);
const uid_t callerUid = IPCThreadState::self()->getCallingUid();
- if (pid != -1 && isAudioServerUid(callerUid)) { // check must match releaseAudioSessionId()
- caller = pid;
+ if (pid != (pid_t)-1 && isAudioServerOrMediaServerUid(callerUid)) {
+ caller = pid; // check must match releaseAudioSessionId()
+ }
+ if (uid == (uid_t)-1 || !isAudioServerOrMediaServerUid(callerUid)) {
+ uid = callerUid;
}
{
@@ -2926,7 +2932,7 @@
return;
}
}
- mAudioSessionRefs.push(new AudioSessionRef(audioSession, caller));
+ mAudioSessionRefs.push(new AudioSessionRef(audioSession, caller, uid));
ALOGV(" added new entry for %d", audioSession);
}
@@ -2938,8 +2944,8 @@
pid_t caller = IPCThreadState::self()->getCallingPid();
ALOGV("releasing %d from %d for %d", audioSession, caller, pid);
const uid_t callerUid = IPCThreadState::self()->getCallingUid();
- if (pid != -1 && isAudioServerUid(callerUid)) { // check must match acquireAudioSessionId()
- caller = pid;
+ if (pid != (pid_t)-1 && isAudioServerOrMediaServerUid(callerUid)) {
+ caller = pid; // check must match acquireAudioSessionId()
}
size_t num = mAudioSessionRefs.size();
for (size_t i = 0; i < num; i++) {
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 5c975e1..d4e0ae2 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -214,7 +214,7 @@
// This is the binder API. For the internal API see nextUniqueId().
virtual audio_unique_id_t newAudioUniqueId(audio_unique_id_use_t use);
- virtual void acquireAudioSessionId(audio_session_t audioSession, pid_t pid);
+ void acquireAudioSessionId(audio_session_t audioSession, pid_t pid, uid_t uid) override;
virtual void releaseAudioSessionId(audio_session_t audioSession, pid_t pid);
@@ -343,7 +343,10 @@
virtual ~SyncEvent() {}
- void trigger() { Mutex::Autolock _l(mLock); if (mCallback) mCallback(this); }
+ void trigger() {
+ Mutex::Autolock _l(mLock);
+ if (mCallback) mCallback(wp<SyncEvent>(this));
+ }
bool isCancelled() const { Mutex::Autolock _l(mLock); return (mCallback == NULL); }
void cancel() { Mutex::Autolock _l(mLock); mCallback = NULL; }
AudioSystem::sync_event_t type() const { return mType; }
@@ -808,10 +811,11 @@
// for mAudioSessionRefs only
struct AudioSessionRef {
- AudioSessionRef(audio_session_t sessionid, pid_t pid) :
- mSessionid(sessionid), mPid(pid), mCnt(1) {}
+ AudioSessionRef(audio_session_t sessionid, pid_t pid, uid_t uid) :
+ mSessionid(sessionid), mPid(pid), mUid(uid), mCnt(1) {}
const audio_session_t mSessionid;
const pid_t mPid;
+ const uid_t mUid;
int mCnt;
};
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 70c56e3..2f3724f 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -737,7 +737,7 @@
}
if (!mSupportsFloat) { // convert input to int16_t as effect doesn't support float.
if (!auxType) {
- if (mInConversionBuffer.get() == nullptr) {
+ if (mInConversionBuffer == nullptr) {
ALOGW("%s: mInConversionBuffer is null, bypassing", __func__);
goto data_bypass;
}
@@ -748,7 +748,7 @@
inBuffer = mInConversionBuffer;
}
if (mConfig.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
- if (mOutConversionBuffer.get() == nullptr) {
+ if (mOutConversionBuffer == nullptr) {
ALOGW("%s: mOutConversionBuffer is null, bypassing", __func__);
goto data_bypass;
}
@@ -921,9 +921,8 @@
mConfig.outputCfg.buffer.frameCount = mConfig.inputCfg.buffer.frameCount;
ALOGV("configure() %p chain %p buffer %p framecount %zu",
- this, mCallback->chain().promote() != nullptr ? mCallback->chain().promote().get() :
- nullptr,
- mConfig.inputCfg.buffer.raw, mConfig.inputCfg.buffer.frameCount);
+ this, mCallback->chain().promote().get(),
+ mConfig.inputCfg.buffer.raw, mConfig.inputCfg.buffer.frameCount);
status_t cmdStatus;
size = sizeof(int);
@@ -1290,7 +1289,7 @@
const uint32_t inChannelCount =
audio_channel_count_from_out_mask(mConfig.inputCfg.channels);
const bool formatMismatch = !mSupportsFloat || mInChannelCountRequested != inChannelCount;
- if (!auxType && formatMismatch && mInBuffer.get() != nullptr) {
+ if (!auxType && formatMismatch && mInBuffer != nullptr) {
// we need to translate - create hidl shared buffer and intercept
const size_t inFrameCount = mConfig.inputCfg.buffer.frameCount;
// Use FCC_2 in case mInChannelCountRequested is mono and the effect is stereo.
@@ -1300,13 +1299,13 @@
ALOGV("%s: setInBuffer updating for inChannels:%d inFrameCount:%zu total size:%zu",
__func__, inChannels, inFrameCount, size);
- if (size > 0 && (mInConversionBuffer.get() == nullptr
+ if (size > 0 && (mInConversionBuffer == nullptr
|| size > mInConversionBuffer->getSize())) {
mInConversionBuffer.clear();
ALOGV("%s: allocating mInConversionBuffer %zu", __func__, size);
(void)mCallback->allocateHalBuffer(size, &mInConversionBuffer);
}
- if (mInConversionBuffer.get() != nullptr) {
+ if (mInConversionBuffer != nullptr) {
mInConversionBuffer->setFrameCount(inFrameCount);
mEffectInterface->setInBuffer(mInConversionBuffer);
} else if (size > 0) {
@@ -1335,7 +1334,7 @@
const uint32_t outChannelCount =
audio_channel_count_from_out_mask(mConfig.outputCfg.channels);
const bool formatMismatch = !mSupportsFloat || mOutChannelCountRequested != outChannelCount;
- if (formatMismatch && mOutBuffer.get() != nullptr) {
+ if (formatMismatch && mOutBuffer != nullptr) {
const size_t outFrameCount = mConfig.outputCfg.buffer.frameCount;
// Use FCC_2 in case mOutChannelCountRequested is mono and the effect is stereo.
const uint32_t outChannels = std::max((uint32_t)FCC_2, mOutChannelCountRequested);
@@ -1344,13 +1343,13 @@
ALOGV("%s: setOutBuffer updating for outChannels:%d outFrameCount:%zu total size:%zu",
__func__, outChannels, outFrameCount, size);
- if (size > 0 && (mOutConversionBuffer.get() == nullptr
+ if (size > 0 && (mOutConversionBuffer == nullptr
|| size > mOutConversionBuffer->getSize())) {
mOutConversionBuffer.clear();
ALOGV("%s: allocating mOutConversionBuffer %zu", __func__, size);
(void)mCallback->allocateHalBuffer(size, &mOutConversionBuffer);
}
- if (mOutConversionBuffer.get() != nullptr) {
+ if (mOutConversionBuffer != nullptr) {
mOutConversionBuffer->setFrameCount(outFrameCount);
mEffectInterface->setOutBuffer(mOutConversionBuffer);
} else if (size > 0) {
@@ -1521,7 +1520,7 @@
static std::string dumpInOutBuffer(bool isInput, const sp<EffectBufferHalInterface> &buffer) {
std::stringstream ss;
- if (buffer.get() == nullptr) {
+ if (buffer == nullptr) {
return "nullptr"; // make different than below
} else if (buffer->externalData() != nullptr) {
ss << (isInput ? buffer->externalData() : buffer->audioBuffer()->raw)
@@ -1937,19 +1936,20 @@
#undef LOG_TAG
#define LOG_TAG "AudioFlinger::EffectChain"
-AudioFlinger::EffectChain::EffectChain(ThreadBase *thread,
- audio_session_t sessionId)
+AudioFlinger::EffectChain::EffectChain(const wp<ThreadBase>& thread,
+ audio_session_t sessionId)
: mSessionId(sessionId), mActiveTrackCnt(0), mTrackCnt(0), mTailBufferCount(0),
mVolumeCtrlIdx(-1), mLeftVolume(UINT_MAX), mRightVolume(UINT_MAX),
mNewLeftVolume(UINT_MAX), mNewRightVolume(UINT_MAX),
- mEffectCallback(new EffectCallback(this, thread, thread->mAudioFlinger.get()))
+ mEffectCallback(new EffectCallback(wp<EffectChain>(this), thread))
{
mStrategy = AudioSystem::getStrategyForStream(AUDIO_STREAM_MUSIC);
- if (thread == nullptr) {
+ sp<ThreadBase> p = thread.promote();
+ if (p == nullptr) {
return;
}
- mMaxTailBuffers = ((kProcessTailDurationMs * thread->sampleRate()) / 1000) /
- thread->frameCount();
+ mMaxTailBuffers = ((kProcessTailDurationMs * p->sampleRate()) / 1000) /
+ p->frameCount();
}
AudioFlinger::EffectChain::~EffectChain()
@@ -2638,7 +2638,7 @@
void AudioFlinger::EffectChain::setThread(const sp<ThreadBase>& thread)
{
Mutex::Autolock _l(mLock);
- mEffectCallback->setThread(thread.get());
+ mEffectCallback->setThread(thread);
}
void AudioFlinger::EffectChain::checkOutputFlagCompatibility(audio_output_flags_t *flags) const
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index 40bb226..1dfa1ae 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -402,7 +402,6 @@
class EffectChain : public RefBase {
public:
EffectChain(const wp<ThreadBase>& wThread, audio_session_t sessionId);
- EffectChain(ThreadBase *thread, audio_session_t sessionId);
virtual ~EffectChain();
// special key used for an entry in mSuspendedEffects keyed vector
@@ -513,8 +512,11 @@
class EffectCallback : public EffectCallbackInterface {
public:
- EffectCallback(EffectChain *chain, ThreadBase *thread, AudioFlinger *audioFlinger)
- : mChain(chain), mThread(thread), mAudioFlinger(audioFlinger) {}
+ EffectCallback(const wp<EffectChain>& chain,
+ const wp<ThreadBase>& thread)
+ : mChain(chain) {
+ setThread(thread);
+ }
status_t createEffectHal(const effect_uuid_t *pEffectUuid,
int32_t sessionId, int32_t deviceId, sp<EffectHalInterface> *effect) override;
@@ -550,7 +552,12 @@
wp<EffectChain> chain() const override { return mChain; }
wp<ThreadBase> thread() { return mThread; }
- void setThread(ThreadBase *thread) { mThread = thread; };
+
+ void setThread(const wp<ThreadBase>& thread) {
+ mThread = thread;
+ sp<ThreadBase> p = thread.promote();
+ mAudioFlinger = p ? p->mAudioFlinger : nullptr;
+ }
private:
wp<EffectChain> mChain;
@@ -623,7 +630,8 @@
effect_descriptor_t *desc, int id)
: EffectBase(callback, desc, id, AUDIO_SESSION_DEVICE, false),
mDevice(device), mManagerCallback(callback),
- mMyCallback(new ProxyCallback(this, callback)) {}
+ mMyCallback(new ProxyCallback(wp<DeviceEffectProxy>(this),
+ callback)) {}
status_t setEnabled(bool enabled, bool fromHandle) override;
sp<DeviceEffectProxy> asDeviceEffectProxy() override { return this; }
@@ -649,7 +657,7 @@
class ProxyCallback : public EffectCallbackInterface {
public:
- ProxyCallback(DeviceEffectProxy *proxy,
+ ProxyCallback(const wp<DeviceEffectProxy>& proxy,
const sp<DeviceEffectManagerCallback>& callback)
: mProxy(proxy), mManagerCallback(callback) {}
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index 5501856..b58fd8b 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -497,10 +497,12 @@
}
sp<RecordThread::PatchRecord> tempRecordTrack;
+ const bool usePassthruPatchRecord =
+ (inputFlags & AUDIO_INPUT_FLAG_DIRECT) && (outputFlags & AUDIO_OUTPUT_FLAG_DIRECT);
const size_t playbackFrameCount = mPlayback.thread()->frameCount();
const size_t recordFrameCount = mRecord.thread()->frameCount();
size_t frameCount = 0;
- if ((inputFlags & AUDIO_INPUT_FLAG_DIRECT) && (outputFlags & AUDIO_OUTPUT_FLAG_DIRECT)) {
+ if (usePassthruPatchRecord) {
// PassthruPatchRecord producesBufferOnDemand, so use
// maximum of playback and record thread framecounts
frameCount = std::max(playbackFrameCount, recordFrameCount);
@@ -557,8 +559,14 @@
}
// tie playback and record tracks together
- mRecord.setTrackAndPeer(tempRecordTrack, tempPatchTrack);
- mPlayback.setTrackAndPeer(tempPatchTrack, tempRecordTrack);
+ // In the case of PassthruPatchRecord no I/O activity happens on RecordThread,
+ // everything is driven from PlaybackThread. Thus AudioBufferProvider methods
+ // of PassthruPatchRecord can only be called if the corresponding PatchTrack
+ // is alive. There is no need to hold a reference, and there is no need
+ // to clear it. In fact, since playback stopping is asynchronous, there is
+ // no proper time when clearing could be done.
+ mRecord.setTrackAndPeer(tempRecordTrack, tempPatchTrack, !usePassthruPatchRecord);
+ mPlayback.setTrackAndPeer(tempPatchTrack, tempRecordTrack, true /*holdReference*/);
// start capture and playback
mRecord.track()->start(AudioSystem::SYNC_EVENT_NONE, AUDIO_SESSION_NONE);
diff --git a/services/audioflinger/PatchPanel.h b/services/audioflinger/PatchPanel.h
index 4e0d243..89d4eb1 100644
--- a/services/audioflinger/PatchPanel.h
+++ b/services/audioflinger/PatchPanel.h
@@ -128,18 +128,20 @@
mCloseThread = closeThread;
}
template <typename T>
- void setTrackAndPeer(const sp<TrackType>& track, const sp<T> &peer) {
+ void setTrackAndPeer(const sp<TrackType>& track, const sp<T> &peer, bool holdReference) {
mTrack = track;
mThread->addPatchTrack(mTrack);
- mTrack->setPeerProxy(peer, true /* holdReference */);
+ mTrack->setPeerProxy(peer, holdReference);
+ mClearPeerProxy = holdReference;
}
- void clearTrackPeer() { if (mTrack) mTrack->clearPeerProxy(); }
+ void clearTrackPeer() { if (mClearPeerProxy && mTrack) mTrack->clearPeerProxy(); }
void stopTrack() { if (mTrack) mTrack->stop(); }
void swap(Endpoint &other) noexcept {
using std::swap;
swap(mThread, other.mThread);
swap(mCloseThread, other.mCloseThread);
+ swap(mClearPeerProxy, other.mClearPeerProxy);
swap(mHandle, other.mHandle);
swap(mTrack, other.mTrack);
}
@@ -151,6 +153,7 @@
private:
sp<ThreadType> mThread;
bool mCloseThread = true;
+ bool mClearPeerProxy = true;
audio_patch_handle_t mHandle = AUDIO_PATCH_HANDLE_NONE;
sp<TrackType> mTrack;
};
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index dd0cd9b..4d53be4 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -250,6 +250,10 @@
= 0;
virtual status_t removeUidDeviceAffinities(uid_t uid) = 0;
+ virtual status_t setUserIdDeviceAffinities(int userId,
+ const Vector<AudioDeviceTypeAddr>& devices) = 0;
+ virtual status_t removeUserIdDeviceAffinities(int userId) = 0;
+
virtual status_t startAudioSource(const struct audio_port_config *source,
const audio_attributes_t *attributes,
audio_port_handle_t *portId,
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
index fc79ab1..a757551 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
@@ -105,12 +105,27 @@
status_t removeUidDeviceAffinities(uid_t uid);
status_t getDevicesForUid(uid_t uid, Vector<AudioDeviceTypeAddr>& devices) const;
+ /**
+ * Updates the mix rules in order to make streams associated with the given user
+ * be routed to the given audio devices.
+ * @param userId the userId for which the device affinity is set
+ * @param devices the vector of devices that this userId may be routed to. A typical
+ * use is to pass the devices associated with a given zone in a multi-zone setup.
+ * @return NO_ERROR if the update was successful, INVALID_OPERATION otherwise.
+ * An example of failure is when there are already rules in place to restrict
+ * a mix to the given userId (i.e. when a MATCH_USERID rule was set for it).
+ */
+ status_t setUserIdDeviceAffinities(int userId, const Vector<AudioDeviceTypeAddr>& devices);
+ status_t removeUserIdDeviceAffinities(int userId);
+ status_t getDevicesForUserId(int userId, Vector<AudioDeviceTypeAddr>& devices) const;
+
void dump(String8 *dst) const;
private:
enum class MixMatchStatus { MATCH, NO_MATCH, INVALID_MIX };
MixMatchStatus mixMatch(const AudioMix* mix, size_t mixIndex,
- const audio_attributes_t& attributes, uid_t uid);
+ const audio_attributes_t& attributes,
+ uid_t uid);
};
} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index b1103ab..11660f0 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -60,6 +60,9 @@
case RULE_MATCH_UID:
ruleValue = std::to_string(criterion.mValue.mUid);
break;
+ case RULE_MATCH_USERID:
+ ruleValue = std::to_string(criterion.mValue.mUserId);
+ break;
default:
unknownRule = true;
}
@@ -223,10 +226,14 @@
}
if (!(attributes.usage == AUDIO_USAGE_UNKNOWN ||
attributes.usage == AUDIO_USAGE_MEDIA ||
- attributes.usage == AUDIO_USAGE_GAME)) {
+ attributes.usage == AUDIO_USAGE_GAME ||
+ attributes.usage == AUDIO_USAGE_VOICE_COMMUNICATION)) {
return MixMatchStatus::NO_MATCH;
}
}
+
+ int userId = (int) multiuser_get_user_id(uid);
+
// TODO if adding more player rules (currently only 2), make rule handling "generic"
// as there is no difference in the treatment of usage- or uid-based rules
bool hasUsageMatchRules = false;
@@ -239,6 +246,12 @@
bool uidMatchFound = false;
bool uidExclusionFound = false;
+ bool hasUserIdExcludeRules = false;
+ bool userIdExclusionFound = false;
+ bool hasUserIdMatchRules = false;
+ bool userIdMatchFound = false;
+
+
bool hasAddrMatch = false;
// iterate over all mix criteria to list what rules this mix contains
@@ -290,6 +303,24 @@
uidExclusionFound = true;
}
break;
+ case RULE_MATCH_USERID:
+ ALOGV("\tmix has RULE_MATCH_USERID for userId %d",
+ mix->mCriteria[j].mValue.mUserId);
+ hasUserIdMatchRules = true;
+ if (mix->mCriteria[j].mValue.mUserId == userId) {
+ // found one userId match against all allowed userIds
+ userIdMatchFound = true;
+ }
+ break;
+ case RULE_EXCLUDE_USERID:
+ ALOGV("\tmix has RULE_EXCLUDE_USERID for userId %d",
+ mix->mCriteria[j].mValue.mUserId);
+ hasUserIdExcludeRules = true;
+ if (mix->mCriteria[j].mValue.mUserId == userId) {
+ // found this userId is to be excluded
+ userIdExclusionFound = true;
+ }
+ break;
default:
break;
}
@@ -306,20 +337,27 @@
" and RULE_EXCLUDE_UID in mix %zu", mixIndex);
return MixMatchStatus::INVALID_MIX;
}
-
- if ((hasUsageExcludeRules && usageExclusionFound)
- || (hasUidExcludeRules && uidExclusionFound)) {
- break; // stop iterating on criteria because an exclusion was found (will fail)
+ if (hasUserIdMatchRules && hasUserIdExcludeRules) {
+ ALOGE("getOutputForAttr: invalid combination of RULE_MATCH_USERID"
+ " and RULE_EXCLUDE_USERID in mix %zu", mixIndex);
+ return MixMatchStatus::INVALID_MIX;
}
+ if ((hasUsageExcludeRules && usageExclusionFound)
+ || (hasUidExcludeRules && uidExclusionFound)
+ || (hasUserIdExcludeRules && userIdExclusionFound)) {
+ break; // stop iterating on criteria because an exclusion was found (will fail)
+ }
}//iterate on mix criteria
// determine if exiting on success (or implicit failure as desc is 0)
if (hasAddrMatch ||
!((hasUsageExcludeRules && usageExclusionFound) ||
+ (hasUserIdExcludeRules && userIdExclusionFound) ||
(hasUsageMatchRules && !usageMatchFound) ||
(hasUidExcludeRules && uidExclusionFound) ||
- (hasUidMatchRules && !uidMatchFound))) {
+ (hasUidMatchRules && !uidMatchFound)) ||
+ (hasUserIdMatchRules && !userIdMatchFound)) {
ALOGV("\tgetOutputForAttr will use mix %zu", mixIndex);
return MixMatchStatus::MATCH;
}
@@ -530,6 +568,109 @@
return NO_ERROR;
}
+status_t AudioPolicyMixCollection::setUserIdDeviceAffinities(int userId,
+ const Vector<AudioDeviceTypeAddr>& devices) {
+ // verify feasibility: for each player mix: if it already contains a
+ // "match userId" rule for this userId, return an error
+ // (adding a userId-device affinity would result in contradictory rules)
+ for (size_t i = 0; i < size(); i++) {
+ const AudioPolicyMix* mix = itemAt(i).get();
+ if (!mix->isDeviceAffinityCompatible()) {
+ continue;
+ }
+ if (mix->hasUserIdRule(true /*match*/, userId)) {
+ return INVALID_OPERATION;
+ }
+ }
+
+ // remove existing rules for this userId
+ removeUserIdDeviceAffinities(userId);
+
+ // for each player mix:
+ // IF device is not a target for the mix,
+ // AND it doesn't have a "match userId" rule
+ // THEN add a rule to exclude the userId
+ for (size_t i = 0; i < size(); i++) {
+ const AudioPolicyMix *mix = itemAt(i).get();
+ if (!mix->isDeviceAffinityCompatible()) {
+ continue;
+ }
+ // check if this mix goes to a device in the list of devices
+ bool deviceMatch = false;
+ const AudioDeviceTypeAddr mixDevice(mix->mDeviceType, mix->mDeviceAddress.string());
+ for (size_t j = 0; j < devices.size(); j++) {
+ if (mixDevice.equals(devices[j])) {
+ deviceMatch = true;
+ break;
+ }
+ }
+ if (!deviceMatch && !mix->hasMatchUserIdRule()) {
+ // this mix doesn't go to one of the listed devices for the given userId,
+ // and it's not already restricting the mix on a userId,
+ // modify its rules to exclude the userId
+ if (!mix->hasUserIdRule(false /*match*/, userId)) {
+ // no need to do it again if userId is already excluded
+ mix->setExcludeUserId(userId);
+ }
+ }
+ }
+
+ return NO_ERROR;
+}
+
+status_t AudioPolicyMixCollection::removeUserIdDeviceAffinities(int userId) {
+ // for each player mix: remove existing rules that match or exclude this userId
+ for (size_t i = 0; i < size(); i++) {
+ bool foundUserIdRule = false;
+ const AudioPolicyMix *mix = itemAt(i).get();
+ if (!mix->isDeviceAffinityCompatible()) {
+ continue;
+ }
+ std::vector<size_t> criteriaToRemove;
+ for (size_t j = 0; j < mix->mCriteria.size(); j++) {
+ const uint32_t rule = mix->mCriteria[j].mRule;
+ // is this rule excluding the userId? (not considering userId match rules
+ // as those are not used for userId-device affinity)
+ if (rule == RULE_EXCLUDE_USERID
+ && userId == mix->mCriteria[j].mValue.mUserId) {
+ foundUserIdRule = true;
+ criteriaToRemove.insert(criteriaToRemove.begin(), j);
+ }
+ }
+ if (foundUserIdRule) {
+ for (size_t j = 0; j < criteriaToRemove.size(); j++) {
+ mix->mCriteria.removeAt(criteriaToRemove[j]);
+ }
+ }
+ }
+ return NO_ERROR;
+}
+
+status_t AudioPolicyMixCollection::getDevicesForUserId(int userId,
+ Vector<AudioDeviceTypeAddr>& devices) const {
+ // for each player mix:
+ // find rules that don't exclude this userId, and add the device to the list
+ for (size_t i = 0; i < size(); i++) {
+ bool ruleAllowsUserId = true;
+ const AudioPolicyMix *mix = itemAt(i).get();
+ if (mix->mMixType != MIX_TYPE_PLAYERS) {
+ continue;
+ }
+ for (size_t j = 0; j < mix->mCriteria.size(); j++) {
+ const uint32_t rule = mix->mCriteria[j].mRule;
+ if (rule == RULE_EXCLUDE_USERID
+ && userId == mix->mCriteria[j].mValue.mUserId) {
+ ruleAllowsUserId = false;
+ break;
+ }
+ }
+ if (ruleAllowsUserId) {
+ devices.add(AudioDeviceTypeAddr(mix->mDeviceType, mix->mDeviceAddress.string()));
+ }
+ }
+ return NO_ERROR;
+}
+
void AudioPolicyMixCollection::dump(String8 *dst) const
{
dst->append("\nAudio Policy Mix:\n");
diff --git a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
index 4376802..1b2f7c7 100644
--- a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
@@ -200,6 +200,7 @@
{
static constexpr const char *speakerDrcEnabled = "speaker_drc_enabled";
static constexpr const char *callScreenModeSupported= "call_screen_mode_supported";
+ static constexpr const char *engineLibrarySuffix = "engine_library";
};
static status_t deserialize(const xmlNode *root, AudioPolicyConfig *config);
@@ -692,6 +693,11 @@
convertTo<std::string, bool>(attr, value)) {
config->setCallScreenModeSupported(value);
}
+ std::string engineLibrarySuffix = getXmlAttribute(cur, Attributes::engineLibrarySuffix);
+ if (!engineLibrarySuffix.empty()) {
+ config->setEngineLibraryNameSuffix(engineLibrarySuffix);
+ }
+ return NO_ERROR;
}
}
return NO_ERROR;
diff --git a/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp b/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
index 2b5455e..c5b3546 100644
--- a/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
@@ -55,9 +55,11 @@
MAKE_STRING_FROM_ENUM(RULE_MATCH_ATTRIBUTE_USAGE),
MAKE_STRING_FROM_ENUM(RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET),
MAKE_STRING_FROM_ENUM(RULE_MATCH_UID),
+ MAKE_STRING_FROM_ENUM(RULE_MATCH_USERID),
MAKE_STRING_FROM_ENUM(RULE_EXCLUDE_ATTRIBUTE_USAGE),
MAKE_STRING_FROM_ENUM(RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET),
MAKE_STRING_FROM_ENUM(RULE_EXCLUDE_UID),
+ MAKE_STRING_FROM_ENUM(RULE_EXCLUDE_USERID),
TERMINATOR
};
diff --git a/services/audiopolicy/config/Android.bp b/services/audiopolicy/config/Android.bp
index 4b5e788..f4610bb 100644
--- a/services/audiopolicy/config/Android.bp
+++ b/services/audiopolicy/config/Android.bp
@@ -92,6 +92,10 @@
srcs: ["audio_policy_configuration_generic.xml"],
}
filegroup {
+ name: "audio_policy_configuration_generic_configurable",
+ srcs: ["audio_policy_configuration_generic_configurable.xml"],
+}
+filegroup {
name: "usb_audio_policy_configuration",
srcs: ["usb_audio_policy_configuration.xml"],
}
diff --git a/services/audiopolicy/config/audio_policy_configuration_generic_configurable.xml b/services/audiopolicy/config/audio_policy_configuration_generic_configurable.xml
new file mode 100644
index 0000000..fbe4f7f
--- /dev/null
+++ b/services/audiopolicy/config/audio_policy_configuration_generic_configurable.xml
@@ -0,0 +1,50 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<audioPolicyConfiguration version="1.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+ <!-- version section contains a “version” tag in the form “major.minor” e.g version=”1.0” -->
+
+ <!-- Global configuration Decalaration -->
+ <globalConfiguration speaker_drc_enabled="false" engine_library="configurable"/>
+
+ <modules>
+ <!-- Primary Audio HAL -->
+ <xi:include href="primary_audio_policy_configuration.xml"/>
+
+ <!-- Remote Submix Audio HAL -->
+ <xi:include href="r_submix_audio_policy_configuration.xml"/>
+
+ </modules>
+ <!-- End of Modules section -->
+
+ <!-- Volume section:
+ IMPORTANT NOTE: Volume tables have been moved to engine configuration.
+ Keep it here for legacy.
+ Engine will fallback on these files if none are provided by engine.
+ -->
+
+ <xi:include href="audio_policy_volumes.xml"/>
+ <xi:include href="default_volume_tables.xml"/>
+
+ <!-- End of Volume section -->
+
+ <!-- Surround Sound configuration -->
+
+ <xi:include href="surround_sound_configuration_5_0.xml"/>
+
+ <!-- End of Surround Sound configuration -->
+
+</audioPolicyConfiguration>
diff --git a/services/audiopolicy/engine/config/Android.bp b/services/audiopolicy/engine/config/Android.bp
index 885b5fa..ff840f9 100644
--- a/services/audiopolicy/engine/config/Android.bp
+++ b/services/audiopolicy/engine/config/Android.bp
@@ -1,4 +1,4 @@
-cc_library_static {
+cc_library {
name: "libaudiopolicyengine_config",
export_include_dirs: ["include"],
include_dirs: [
@@ -14,17 +14,14 @@
],
shared_libs: [
"libmedia_helper",
- "libandroidicu",
"libxml2",
"libutils",
"liblog",
"libcutils",
],
- static_libs: [
- "libaudiopolicycomponents",
- ],
header_libs: [
"libaudio_system_headers",
- "libaudiopolicycommon",
+ "libmedia_headers",
+ "libaudioclient_headers",
],
}
diff --git a/services/audiopolicy/engine/config/src/EngineConfig.cpp b/services/audiopolicy/engine/config/src/EngineConfig.cpp
index d47fbd2..7f8cdd9 100644
--- a/services/audiopolicy/engine/config/src/EngineConfig.cpp
+++ b/services/audiopolicy/engine/config/src/EngineConfig.cpp
@@ -18,7 +18,6 @@
//#define LOG_NDEBUG 0
#include "EngineConfig.h"
-#include <policy.h>
#include <cutils/properties.h>
#include <media/TypeConverter.h>
#include <media/convert.h>
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.cpp b/services/audiopolicy/engineconfigurable/src/Engine.cpp
index 752ba92..7e5c5e3 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.cpp
+++ b/services/audiopolicy/engineconfigurable/src/Engine.cpp
@@ -80,8 +80,9 @@
status_t Engine::initCheck()
{
- if (mPolicyParameterMgr == nullptr || mPolicyParameterMgr->start() != NO_ERROR) {
- ALOGE("%s: could not start Policy PFW", __FUNCTION__);
+ std::string error;
+ if (mPolicyParameterMgr == nullptr || mPolicyParameterMgr->start(error) != NO_ERROR) {
+ ALOGE("%s: could not start Policy PFW: %s", __FUNCTION__, error.c_str());
return NO_INIT;
}
return EngineBase::initCheck();
@@ -162,21 +163,21 @@
return mPolicyParameterMgr->getForceUse(usage);
}
-status_t Engine::setDeviceConnectionState(const sp<DeviceDescriptor> devDesc,
+status_t Engine::setDeviceConnectionState(const sp<DeviceDescriptor> device,
audio_policy_dev_state_t state)
{
- mPolicyParameterMgr->setDeviceConnectionState(devDesc, state);
-
- if (audio_is_output_device(devDesc->type())) {
+ mPolicyParameterMgr->setDeviceConnectionState(
+ device->type(), device->address().c_str(), state);
+ if (audio_is_output_device(device->type())) {
// FIXME: Use DeviceTypeSet when the interface is ready
return mPolicyParameterMgr->setAvailableOutputDevices(
deviceTypesToBitMask(getApmObserver()->getAvailableOutputDevices().types()));
- } else if (audio_is_input_device(devDesc->type())) {
+ } else if (audio_is_input_device(device->type())) {
// FIXME: Use DeviceTypeSet when the interface is ready
return mPolicyParameterMgr->setAvailableInputDevices(
deviceTypesToBitMask(getApmObserver()->getAvailableInputDevices().types()));
}
- return EngineBase::setDeviceConnectionState(devDesc, state);
+ return EngineBase::setDeviceConnectionState(device, state);
}
status_t Engine::loadAudioPolicyEngineConfig()
diff --git a/services/audiopolicy/engineconfigurable/wrapper/Android.bp b/services/audiopolicy/engineconfigurable/wrapper/Android.bp
index 6f59487..301ecc0 100644
--- a/services/audiopolicy/engineconfigurable/wrapper/Android.bp
+++ b/services/audiopolicy/engineconfigurable/wrapper/Android.bp
@@ -11,7 +11,6 @@
"libbase_headers",
"libaudiopolicycommon",
],
- static_libs: ["libaudiopolicycomponents"],
shared_libs: [
"liblog",
"libutils",
diff --git a/services/audiopolicy/engineconfigurable/wrapper/ParameterManagerWrapper.cpp b/services/audiopolicy/engineconfigurable/wrapper/ParameterManagerWrapper.cpp
index 465a6f9..1b3b9a0 100644
--- a/services/audiopolicy/engineconfigurable/wrapper/ParameterManagerWrapper.cpp
+++ b/services/audiopolicy/engineconfigurable/wrapper/ParameterManagerWrapper.cpp
@@ -92,7 +92,8 @@
template <>
struct ParameterManagerWrapper::parameterManagerElementSupported<ISelectionCriterionTypeInterface> {};
-ParameterManagerWrapper::ParameterManagerWrapper()
+ParameterManagerWrapper::ParameterManagerWrapper(bool enableSchemaVerification,
+ const std::string &schemaUri)
: mPfwConnectorLogger(new ParameterMgrPlatformConnectorLogger)
{
// Connector
@@ -104,6 +105,15 @@
// Logger
mPfwConnector->setLogger(mPfwConnectorLogger);
+
+ // Schema validation
+ std::string error;
+ bool ret = mPfwConnector->setValidateSchemasOnStart(enableSchemaVerification, error);
+ ALOGE_IF(!ret, "Failed to activate schema validation: %s", error.c_str());
+ if (enableSchemaVerification && ret && !schemaUri.empty()) {
+ ALOGE("Schema verification activated with schema URI: %s", schemaUri.c_str());
+ mPfwConnector->setSchemaUri(schemaUri);
+ }
}
status_t ParameterManagerWrapper::addCriterion(const std::string &name, bool isInclusive,
@@ -145,11 +155,10 @@
delete mPfwConnector;
}
-status_t ParameterManagerWrapper::start()
+status_t ParameterManagerWrapper::start(std::string &error)
{
ALOGD("%s: in", __FUNCTION__);
/// Start PFW
- std::string error;
if (!mPfwConnector->start(error)) {
ALOGE("%s: Policy PFW start error: %s", __FUNCTION__, error.c_str());
return NO_INIT;
@@ -253,13 +262,13 @@
return interface->getLiteralValue(valueToCheck, literalValue);
}
-status_t ParameterManagerWrapper::setDeviceConnectionState(const sp<DeviceDescriptor> devDesc,
- audio_policy_dev_state_t state)
+status_t ParameterManagerWrapper::setDeviceConnectionState(
+ audio_devices_t type, const std::string address, audio_policy_dev_state_t state)
{
- std::string criterionName = audio_is_output_device(devDesc->type()) ?
+ std::string criterionName = audio_is_output_device(type) ?
gOutputDeviceAddressCriterionName : gInputDeviceAddressCriterionName;
- ALOGV("%s: device with address %s %s", __FUNCTION__, devDesc->address().c_str(),
+ ALOGV("%s: device with address %s %s", __FUNCTION__, address.c_str(),
state != AUDIO_POLICY_DEVICE_STATE_AVAILABLE? "disconnected" : "connected");
ISelectionCriterionInterface *criterion =
getElement<ISelectionCriterionInterface>(criterionName, mPolicyCriteria);
@@ -271,8 +280,8 @@
auto criterionType = criterion->getCriterionType();
int deviceAddressId;
- if (not criterionType->getNumericalValue(devDesc->address().c_str(), deviceAddressId)) {
- ALOGW("%s: unknown device address reported (%s)", __FUNCTION__, devDesc->address().c_str());
+ if (not criterionType->getNumericalValue(address.c_str(), deviceAddressId)) {
+ ALOGW("%s: unknown device address reported (%s)", __FUNCTION__, address.c_str());
return BAD_TYPE;
}
int currentValueMask = criterion->getCriterionState();
diff --git a/services/audiopolicy/engineconfigurable/wrapper/include/ParameterManagerWrapper.h b/services/audiopolicy/engineconfigurable/wrapper/include/ParameterManagerWrapper.h
index 8443008..62b129a 100644
--- a/services/audiopolicy/engineconfigurable/wrapper/include/ParameterManagerWrapper.h
+++ b/services/audiopolicy/engineconfigurable/wrapper/include/ParameterManagerWrapper.h
@@ -16,9 +16,6 @@
#pragma once
-#include <PolicyAudioPort.h>
-#include <HwModule.h>
-#include <DeviceDescriptor.h>
#include <system/audio.h>
#include <system/audio_policy.h>
#include <utils/Errors.h>
@@ -47,16 +44,18 @@
using Criteria = std::map<std::string, ISelectionCriterionInterface *>;
public:
- ParameterManagerWrapper();
+ ParameterManagerWrapper(bool enableSchemaVerification = false,
+ const std::string &schemaUri = {});
~ParameterManagerWrapper();
/**
* Starts the platform state service.
* It starts the parameter framework policy instance.
+ * @param[out] contains human readable error if starts failed
*
- * @return NO_ERROR if success, error code otherwise.
+ * @return NO_ERROR if success, error code otherwise, and error is set to human readable string.
*/
- status_t start();
+ status_t start(std::string &error);
/**
* The following API wrap policy action to criteria
@@ -117,7 +116,15 @@
*/
status_t setAvailableOutputDevices(audio_devices_t outputDevices);
- status_t setDeviceConnectionState(const sp<DeviceDescriptor> devDesc,
+ /**
+ * @brief setDeviceConnectionState propagates a state event on a given device(s)
+ * @param type bit mask of the device whose state has changed
+ * @param address of the device whose state has changed
+ * @param state new state of the given device
+ * @return NO_ERROR if new state corretly propagated to Engine Parameter-Framework, error
+ * code otherwise.
+ */
+ status_t setDeviceConnectionState(audio_devices_t type, const std::string address,
audio_policy_dev_state_t state);
/**
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index a3fa974..a7d7cf6 100755
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -463,8 +463,8 @@
"getDevicesForStrategy() no default device defined");
}
- ALOGVV("getDevices"
- "ForStrategy() strategy %d, device %x", strategy, devices.types());
+ ALOGVV("getDevices ForStrategy() strategy %d, device %s",
+ strategy, dumpDeviceTypes(devices.types()).c_str());
return devices;
}
@@ -636,8 +636,9 @@
String8(preferredStrategyDevice.mAddress.c_str()),
AUDIO_FORMAT_DEFAULT);
if (preferredAvailableDevDescr != nullptr) {
- ALOGVV("%s using pref device 0x%08x/%s for strategy %u", __FUNCTION__,
- preferredStrategyDevice.mType, preferredStrategyDevice.mAddress, strategy);
+ ALOGVV("%s using pref device 0x%08x/%s for strategy %u",
+ __func__, preferredStrategyDevice.mType,
+ preferredStrategyDevice.mAddress.c_str(), strategy);
return DeviceVector(preferredAvailableDevDescr);
}
}
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 75c89aa..c3b1b9d 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -3150,6 +3150,49 @@
return mEngine->getPreferredDeviceForStrategy(strategy, device);
}
+status_t AudioPolicyManager::setUserIdDeviceAffinities(int userId,
+ const Vector<AudioDeviceTypeAddr>& devices) {
+ ALOGI("%s() userId=%d num devices %zu", __FUNCTION__, userId, devices.size());
+ // userId/device affinity is only for output devices
+ for (size_t i = 0; i < devices.size(); i++) {
+ if (!audio_is_output_device(devices[i].mType)) {
+ ALOGE("%s() device=%08x is NOT an output device",
+ __FUNCTION__,
+ devices[i].mType);
+ return BAD_VALUE;
+ }
+ }
+
+ status_t status = mPolicyMixes.setUserIdDeviceAffinities(userId, devices);
+ if (status != NO_ERROR) {
+ ALOGE("%s() could not set device affinity for userId %d",
+ __FUNCTION__, userId);
+ return status;
+ }
+
+ // reevaluate outputs for all devices
+ checkForDeviceAndOutputChanges();
+ updateCallAndOutputRouting();
+
+ return NO_ERROR;
+}
+
+status_t AudioPolicyManager::removeUserIdDeviceAffinities(int userId) {
+ ALOGI("%s() userId=%d", __FUNCTION__, userId);
+ status_t status = mPolicyMixes.removeUserIdDeviceAffinities(userId);
+ if (status != NO_ERROR) {
+ ALOGE("%s() Could not remove all device affinities fo userId = %d",
+ __FUNCTION__, userId);
+ return status;
+ }
+
+ // reevaluate outputs for all devices
+ checkForDeviceAndOutputChanges();
+ updateCallAndOutputRouting();
+
+ return NO_ERROR;
+}
+
void AudioPolicyManager::dump(String8 *dst) const
{
dst->appendFormat("\nAudioPolicyManager Dump: %p\n", this);
@@ -6047,7 +6090,7 @@
int delayMs,
bool force)
{
- ALOGVV("applyStreamVolumes() for device %08x", device);
+ ALOGVV("applyStreamVolumes() for device %s", dumpDeviceTypes(deviceTypes).c_str());
for (const auto &volumeGroup : mEngine->getVolumeGroups()) {
auto &curves = getVolumeCurves(toVolumeSource(volumeGroup));
checkAndSetVolume(curves, toVolumeSource(volumeGroup),
@@ -6141,6 +6184,10 @@
case AUDIO_USAGE_VIRTUAL_SOURCE:
case AUDIO_USAGE_ASSISTANT:
case AUDIO_USAGE_CALL_ASSISTANT:
+ case AUDIO_USAGE_EMERGENCY:
+ case AUDIO_USAGE_SAFETY:
+ case AUDIO_USAGE_VEHICLE_STATUS:
+ case AUDIO_USAGE_ANNOUNCEMENT:
break;
default:
return false;
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 7e0e16f..10adeb4 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -262,6 +262,9 @@
virtual status_t setUidDeviceAffinities(uid_t uid,
const Vector<AudioDeviceTypeAddr>& devices);
virtual status_t removeUidDeviceAffinities(uid_t uid);
+ virtual status_t setUserIdDeviceAffinities(int userId,
+ const Vector<AudioDeviceTypeAddr>& devices);
+ virtual status_t removeUserIdDeviceAffinities(int userId);
virtual status_t setPreferredDeviceForStrategy(product_strategy_t strategy,
const AudioDeviceTypeAddr &device);
diff --git a/services/audiopolicy/service/Android.mk b/services/audiopolicy/service/Android.mk
index 80f4eab..d34dbe3 100644
--- a/services/audiopolicy/service/Android.mk
+++ b/services/audiopolicy/service/Android.mk
@@ -26,6 +26,7 @@
libaudioutils \
libaudiofoundation \
libhardware_legacy \
+ libaudiopolicy \
libaudiopolicymanager \
libmedia_helper \
libmediametrics \
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 68a2a8c..ee32afe 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -25,6 +25,44 @@
namespace android {
+const std::vector<audio_usage_t>& SYSTEM_USAGES = {
+ AUDIO_USAGE_CALL_ASSISTANT,
+ AUDIO_USAGE_EMERGENCY,
+ AUDIO_USAGE_SAFETY,
+ AUDIO_USAGE_VEHICLE_STATUS,
+ AUDIO_USAGE_ANNOUNCEMENT
+};
+
+bool isSystemUsage(audio_usage_t usage) {
+ return std::find(std::begin(SYSTEM_USAGES), std::end(SYSTEM_USAGES), usage)
+ != std::end(SYSTEM_USAGES);
+}
+
+bool AudioPolicyService::isSupportedSystemUsage(audio_usage_t usage) {
+ return std::find(std::begin(mSupportedSystemUsages), std::end(mSupportedSystemUsages), usage)
+ != std::end(mSupportedSystemUsages);
+}
+
+status_t AudioPolicyService::validateUsage(audio_usage_t usage) {
+ return validateUsage(usage, IPCThreadState::self()->getCallingPid(),
+ IPCThreadState::self()->getCallingUid());
+}
+
+status_t AudioPolicyService::validateUsage(audio_usage_t usage, pid_t pid, uid_t uid) {
+ if (isSystemUsage(usage)) {
+ if (isSupportedSystemUsage(usage)) {
+ if (!modifyAudioRoutingAllowed(pid, uid)) {
+ ALOGE("permission denied: modify audio routing not allowed for uid %d", uid);
+ return PERMISSION_DENIED;
+ }
+ } else {
+ return BAD_VALUE;
+ }
+ }
+ return NO_ERROR;
+}
+
+
// ----------------------------------------------------------------------------
@@ -181,6 +219,12 @@
if (mAudioPolicyManager == NULL) {
return NO_INIT;
}
+
+ status_t result = validateUsage(attr->usage, pid, uid);
+ if (result != NO_ERROR) {
+ return result;
+ }
+
ALOGV("%s()", __func__);
Mutex::Autolock _l(mLock);
@@ -199,7 +243,7 @@
}
AutoCallerClear acc;
AudioPolicyInterface::output_type_t outputType;
- status_t result = mAudioPolicyManager->getOutputForAttr(attr, output, session, stream, uid,
+ result = mAudioPolicyManager->getOutputForAttr(attr, output, session, stream, uid,
config,
&flags, selectedDeviceId, portId,
secondaryOutputs,
@@ -363,6 +407,11 @@
return NO_INIT;
}
+ status_t result = validateUsage(attr->usage, pid, uid);
+ if (result != NO_ERROR) {
+ return result;
+ }
+
audio_source_t inputSource = attr->source;
if (inputSource == AUDIO_SOURCE_DEFAULT) {
inputSource = AUDIO_SOURCE_MIC;
@@ -807,7 +856,7 @@
}
status_t AudioPolicyService::getDevicesForAttributes(const AudioAttributes &aa,
- AudioDeviceTypeAddrVector *devices) const
+ AudioDeviceTypeAddrVector *devices) const
{
if (mAudioPolicyManager == NULL) {
return NO_INIT;
@@ -1002,6 +1051,22 @@
return audioPolicyEffects->removeStreamDefaultEffect(id);
}
+status_t AudioPolicyService::setSupportedSystemUsages(const std::vector<audio_usage_t>& systemUsages) {
+ Mutex::Autolock _l(mLock);
+ if(!modifyAudioRoutingAllowed()) {
+ return PERMISSION_DENIED;
+ }
+
+ bool areAllSystemUsages = std::all_of(begin(systemUsages), end(systemUsages),
+ [](audio_usage_t usage) { return isSystemUsage(usage); });
+ if (!areAllSystemUsages) {
+ return BAD_VALUE;
+ }
+
+ mSupportedSystemUsages = systemUsages;
+ return NO_ERROR;
+}
+
status_t AudioPolicyService::setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t capturePolicy) {
Mutex::Autolock _l(mLock);
if (mAudioPolicyManager == NULL) {
@@ -1033,6 +1098,12 @@
ALOGV("mAudioPolicyManager == NULL");
return false;
}
+
+ status_t result = validateUsage(attributes.usage);
+ if (result != NO_ERROR) {
+ return result;
+ }
+
Mutex::Autolock _l(mLock);
return mAudioPolicyManager->isDirectOutputSupported(config, attributes);
}
@@ -1149,14 +1220,39 @@
return PERMISSION_DENIED;
}
+ // Require CAPTURE_VOICE_COMMUNICATION_OUTPUT if one of the
+ // mixes is a render|loopback mix that aim to capture audio played with
+ // USAGE_VOICE_COMMUNICATION.
+ bool needCaptureVoiceCommunicationOutput =
+ std::any_of(mixes.begin(), mixes.end(), [](auto& mix) {
+ return is_mix_loopback_render(mix.mRouteFlags) &&
+ mix.hasMatchingRuleForUsage([] (auto usage) {
+ return usage == AUDIO_USAGE_VOICE_COMMUNICATION;});
+ });
+
+ // Require CAPTURE_MEDIA_OUTPUT if there is a mix for priveliged capture
+ // which is trying to capture any usage which is not USAGE_VOICE_COMMUNICATION.
+ // (If USAGE_VOICE_COMMUNICATION should be captured, then CAPTURE_VOICE_COMMUNICATION_OUTPUT
+ // is required, even if it is not privileged capture).
bool needCaptureMediaOutput = std::any_of(mixes.begin(), mixes.end(), [](auto& mix) {
- return mix.mAllowPrivilegedPlaybackCapture; });
+ return mix.mAllowPrivilegedPlaybackCapture &&
+ mix.hasMatchingRuleForUsage([] (auto usage) {
+ return usage != AUDIO_USAGE_VOICE_COMMUNICATION;
+ });
+ });
+
const uid_t callingUid = IPCThreadState::self()->getCallingUid();
const pid_t callingPid = IPCThreadState::self()->getCallingPid();
+
if (needCaptureMediaOutput && !captureMediaOutputAllowed(callingPid, callingUid)) {
return PERMISSION_DENIED;
}
+ if (needCaptureVoiceCommunicationOutput &&
+ !captureVoiceCommunicationOutputAllowed(callingPid, callingUid)) {
+ return PERMISSION_DENIED;
+ }
+
if (mAudioPolicyManager == NULL) {
return NO_INIT;
}
@@ -1193,6 +1289,31 @@
return mAudioPolicyManager->removeUidDeviceAffinities(uid);
}
+status_t AudioPolicyService::setUserIdDeviceAffinities(int userId,
+ const Vector<AudioDeviceTypeAddr>& devices) {
+ Mutex::Autolock _l(mLock);
+ if(!modifyAudioRoutingAllowed()) {
+ return PERMISSION_DENIED;
+ }
+ if (mAudioPolicyManager == NULL) {
+ return NO_INIT;
+ }
+ AutoCallerClear acc;
+ return mAudioPolicyManager->setUserIdDeviceAffinities(userId, devices);
+}
+
+status_t AudioPolicyService::removeUserIdDeviceAffinities(int userId) {
+ Mutex::Autolock _l(mLock);
+ if(!modifyAudioRoutingAllowed()) {
+ return PERMISSION_DENIED;
+ }
+ if (mAudioPolicyManager == NULL) {
+ return NO_INIT;
+ }
+ AutoCallerClear acc;
+ return mAudioPolicyManager->removeUserIdDeviceAffinities(userId);
+}
+
status_t AudioPolicyService::startAudioSource(const struct audio_port_config *source,
const audio_attributes_t *attributes,
audio_port_handle_t *portId)
@@ -1201,6 +1322,12 @@
if (mAudioPolicyManager == NULL) {
return NO_INIT;
}
+
+ status_t result = validateUsage(attributes->usage);
+ if (result != NO_ERROR) {
+ return result;
+ }
+
// startAudioSource should be created as the calling uid
const uid_t callingUid = IPCThreadState::self()->getCallingUid();
AutoCallerClear acc;
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index c83512f..e5c36ea 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -74,6 +74,8 @@
mAudioPolicyClient = new AudioPolicyClient(this);
mAudioPolicyManager = createAudioPolicyManager(mAudioPolicyClient);
+
+ mSupportedSystemUsages = std::vector<audio_usage_t> {};
}
// load audio processing modules
sp<AudioPolicyEffects>audioPolicyEffects = new AudioPolicyEffects();
@@ -392,6 +394,14 @@
snprintf(buffer, SIZE, "Command Thread: %p\n", mAudioCommandThread.get());
result.append(buffer);
+ snprintf(buffer, SIZE, "Supported System Usages:\n");
+ result.append(buffer);
+ for (std::vector<audio_usage_t>::iterator it = mSupportedSystemUsages.begin();
+ it != mSupportedSystemUsages.end(); ++it) {
+ snprintf(buffer, SIZE, "\t%d\n", *it);
+ result.append(buffer);
+ }
+
write(fd, result.string(), result.size());
return NO_ERROR;
}
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 84adcc2..e297f34 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -187,6 +187,7 @@
audio_io_handle_t output,
int delayMs = 0);
virtual status_t setVoiceVolume(float volume, int delayMs = 0);
+ status_t setSupportedSystemUsages(const std::vector<audio_usage_t>& systemUsages);
status_t setAllowedCapturePolicy(uint_t uid, audio_flags_mask_t capturePolicy) override;
virtual bool isOffloadSupported(const audio_offload_info_t &config);
virtual bool isDirectOutputSupported(const audio_config_base_t& config,
@@ -234,6 +235,9 @@
virtual status_t getPreferredDeviceForStrategy(product_strategy_t strategy,
AudioDeviceTypeAddr &device);
+ virtual status_t setUserIdDeviceAffinities(int userId, const Vector<AudioDeviceTypeAddr>& devices);
+
+ virtual status_t removeUserIdDeviceAffinities(int userId);
virtual status_t startAudioSource(const struct audio_port_config *source,
const audio_attributes_t *attributes,
@@ -344,6 +348,10 @@
app_state_t apmStatFromAmState(int amState);
+ bool isSupportedSystemUsage(audio_usage_t usage);
+ status_t validateUsage(audio_usage_t usage);
+ status_t validateUsage(audio_usage_t usage, pid_t pid, uid_t uid);
+
void updateUidStates();
void updateUidStates_l();
@@ -863,6 +871,7 @@
struct audio_policy *mpAudioPolicy;
AudioPolicyInterface *mAudioPolicyManager;
AudioPolicyClient *mAudioPolicyClient;
+ std::vector<audio_usage_t> mSupportedSystemUsages;
DefaultKeyedVector< int64_t, sp<NotificationClient> > mNotificationClients;
Mutex mNotificationClientsLock; // protects mNotificationClients
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index c63feb2..87cb0b5 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -41,11 +41,13 @@
"api1/client2/CaptureSequencer.cpp",
"api1/client2/ZslProcessor.cpp",
"api2/CameraDeviceClient.cpp",
+ "api2/CameraOfflineSessionClient.cpp",
"api2/CompositeStream.cpp",
"api2/DepthCompositeStream.cpp",
"api2/HeicEncoderInfoManager.cpp",
"api2/HeicCompositeStream.cpp",
"device1/CameraHardwareInterface.cpp",
+ "device3/BufferUtils.cpp",
"device3/Camera3Device.cpp",
"device3/Camera3OfflineSession.cpp",
"device3/Camera3Stream.cpp",
@@ -60,6 +62,8 @@
"device3/CoordinateMapper.cpp",
"device3/DistortionMapper.cpp",
"device3/ZoomRatioMapper.cpp",
+ "device3/Camera3OutputStreamInterface.cpp",
+ "device3/Camera3OutputUtils.cpp",
"gui/RingBufferConsumer.cpp",
"utils/CameraThreadState.cpp",
"hidl/AidlCameraDeviceCallbacks.cpp",
@@ -113,6 +117,7 @@
"android.hardware.camera.common@1.0",
"android.hardware.camera.provider@2.4",
"android.hardware.camera.provider@2.5",
+ "android.hardware.camera.provider@2.6",
"android.hardware.camera.device@1.0",
"android.hardware.camera.device@3.2",
"android.hardware.camera.device@3.3",
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 18ed3a5..8666b7b 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -128,6 +128,7 @@
static constexpr int32_t kVendorClientScore = 200;
// Matches with PROCESS_STATE_PERSISTENT_UI in ActivityManager.java
static constexpr int32_t kVendorClientState = 1;
+const String8 CameraService::kOfflineDevice("offline-");
Mutex CameraService::sProxyMutex;
sp<hardware::ICameraServiceProxy> CameraService::sCameraServiceProxy;
@@ -394,7 +395,7 @@
// to this device until the status changes
updateStatus(StatusInternal::NOT_PRESENT, id);
- sp<BasicClient> clientToDisconnect;
+ sp<BasicClient> clientToDisconnectOnline, clientToDisconnectOffline;
{
// Don't do this in updateStatus to avoid deadlock over mServiceLock
Mutex::Autolock lock(mServiceLock);
@@ -402,23 +403,14 @@
// Remove cached shim parameters
state->setShimParams(CameraParameters());
- // Remove the client from the list of active clients, if there is one
- clientToDisconnect = removeClientLocked(id);
+ // Remove online as well as offline client from the list of active clients,
+ // if they are present
+ clientToDisconnectOnline = removeClientLocked(id);
+ clientToDisconnectOffline = removeClientLocked(kOfflineDevice + id);
}
- // Disconnect client
- if (clientToDisconnect.get() != nullptr) {
- ALOGI("%s: Client for camera ID %s evicted due to device status change from HAL",
- __FUNCTION__, id.string());
- // Notify the client of disconnection
- clientToDisconnect->notifyError(
- hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISCONNECTED,
- CaptureResultExtras{});
- // Ensure not in binder RPC so client disconnect PID checks work correctly
- LOG_ALWAYS_FATAL_IF(CameraThreadState::getCallingPid() != getpid(),
- "onDeviceStatusChanged must be called from the camera service process!");
- clientToDisconnect->disconnect();
- }
+ disconnectClient(id, clientToDisconnectOnline);
+ disconnectClient(kOfflineDevice + id, clientToDisconnectOffline);
removeStates(id);
} else {
@@ -428,7 +420,67 @@
}
updateStatus(newStatus, id);
}
+}
+void CameraService::onDeviceStatusChanged(const String8& id,
+ const String8& physicalId,
+ CameraDeviceStatus newHalStatus) {
+ ALOGI("%s: Status changed for cameraId=%s, physicalCameraId=%s, newStatus=%d",
+ __FUNCTION__, id.string(), physicalId.string(), newHalStatus);
+
+ StatusInternal newStatus = mapToInternal(newHalStatus);
+
+ std::shared_ptr<CameraState> state = getCameraState(id);
+
+ if (state == nullptr) {
+ ALOGE("%s: Physical camera id %s status change on a non-present ID %s",
+ __FUNCTION__, id.string(), physicalId.string());
+ return;
+ }
+
+ StatusInternal logicalCameraStatus = state->getStatus();
+ if (logicalCameraStatus != StatusInternal::PRESENT &&
+ logicalCameraStatus != StatusInternal::NOT_AVAILABLE) {
+ ALOGE("%s: Physical camera id %s status %d change for an invalid logical camera state %d",
+ __FUNCTION__, physicalId.string(), newHalStatus, logicalCameraStatus);
+ return;
+ }
+
+ bool updated = false;
+ if (newStatus == StatusInternal::PRESENT) {
+ updated = state->removeUnavailablePhysicalId(physicalId);
+ } else {
+ updated = state->addUnavailablePhysicalId(physicalId);
+ }
+
+ if (updated) {
+ logDeviceRemoved(id, String8::format("Device %s-%s availability changed from %d to %d",
+ id.string(), physicalId.string(),
+ newStatus != StatusInternal::PRESENT,
+ newStatus == StatusInternal::PRESENT));
+
+ String16 id16(id), physicalId16(physicalId);
+ Mutex::Autolock lock(mStatusListenerLock);
+ for (auto& listener : mListenerList) {
+ listener->getListener()->onPhysicalCameraStatusChanged(mapToInterface(newStatus),
+ id16, physicalId16);
+ }
+ }
+}
+
+void CameraService::disconnectClient(const String8& id, sp<BasicClient> clientToDisconnect) {
+ if (clientToDisconnect.get() != nullptr) {
+ ALOGI("%s: Client for camera ID %s evicted due to device status change from HAL",
+ __FUNCTION__, id.string());
+ // Notify the client of disconnection
+ clientToDisconnect->notifyError(
+ hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISCONNECTED,
+ CaptureResultExtras{});
+ // Ensure not in binder RPC so client disconnect PID checks work correctly
+ LOG_ALWAYS_FATAL_IF(CameraThreadState::getCallingPid() != getpid(),
+ "onDeviceStatusChanged must be called from the camera service process!");
+ clientToDisconnect->disconnect();
+ }
}
void CameraService::onTorchStatusChanged(const String8& cameraId,
@@ -761,6 +813,7 @@
case CAMERA_DEVICE_API_VERSION_3_3:
case CAMERA_DEVICE_API_VERSION_3_4:
case CAMERA_DEVICE_API_VERSION_3_5:
+ case CAMERA_DEVICE_API_VERSION_3_6:
if (effectiveApiLevel == API_1) { // Camera1 API route
sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
*client = new Camera2Client(cameraService, tmp, packageName, featureId,
@@ -1695,6 +1748,77 @@
return ret;
}
+status_t CameraService::addOfflineClient(String8 cameraId, sp<BasicClient> offlineClient) {
+ if (offlineClient.get() == nullptr) {
+ return BAD_VALUE;
+ }
+
+ {
+ // Acquire mServiceLock and prevent other clients from connecting
+ std::unique_ptr<AutoConditionLock> lock =
+ AutoConditionLock::waitAndAcquire(mServiceLockWrapper, DEFAULT_CONNECT_TIMEOUT_NS);
+
+ if (lock == nullptr) {
+ ALOGE("%s: (PID %d) rejected (too many other clients connecting)."
+ , __FUNCTION__, offlineClient->getClientPid());
+ return TIMED_OUT;
+ }
+
+ auto onlineClientDesc = mActiveClientManager.get(cameraId);
+ if (onlineClientDesc.get() == nullptr) {
+ ALOGE("%s: No active online client using camera id: %s", __FUNCTION__,
+ cameraId.c_str());
+ return BAD_VALUE;
+ }
+
+ // Offline clients do not evict or conflict with other online devices. Resource sharing
+ // conflicts are handled by the camera provider which will either succeed or fail before
+ // reaching this method.
+ const auto& onlinePriority = onlineClientDesc->getPriority();
+ auto offlineClientDesc = CameraClientManager::makeClientDescriptor(
+ kOfflineDevice + onlineClientDesc->getKey(), offlineClient, /*cost*/ 0,
+ /*conflictingKeys*/ std::set<String8>(), onlinePriority.getScore(),
+ onlineClientDesc->getOwnerId(), onlinePriority.getState());
+
+ // Allow only one offline device per camera
+ auto incompatibleClients = mActiveClientManager.getIncompatibleClients(offlineClientDesc);
+ if (!incompatibleClients.empty()) {
+ ALOGE("%s: Incompatible offline clients present!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ auto err = offlineClient->initialize(mCameraProviderManager, mMonitorTags);
+ if (err != OK) {
+ ALOGE("%s: Could not initialize offline client.", __FUNCTION__);
+ return err;
+ }
+
+ auto evicted = mActiveClientManager.addAndEvict(offlineClientDesc);
+ if (evicted.size() > 0) {
+ for (auto& i : evicted) {
+ ALOGE("%s: Invalid state: Offline client for camera %s was not removed ",
+ __FUNCTION__, i->getKey().string());
+ }
+
+ LOG_ALWAYS_FATAL("%s: Invalid state for CameraService, offline clients not evicted "
+ "properly", __FUNCTION__);
+
+ return BAD_VALUE;
+ }
+
+ logConnectedOffline(offlineClientDesc->getKey(),
+ static_cast<int>(offlineClientDesc->getOwnerId()),
+ String8(offlineClient->getPackageName()));
+
+ sp<IBinder> remoteCallback = offlineClient->getRemote();
+ if (remoteCallback != nullptr) {
+ remoteCallback->linkToDeath(this);
+ }
+ } // lock is destroyed, allow further connect calls
+
+ return OK;
+}
+
Status CameraService::setTorchMode(const String16& cameraId, bool enabled,
const sp<IBinder>& clientBinder) {
Mutex::Autolock lock(mServiceLock);
@@ -1966,7 +2090,8 @@
{
Mutex::Autolock lock(mCameraStatesLock);
for (auto& i : mCameraStates) {
- cameraStatuses->emplace_back(i.first, mapToInterface(i.second->getStatus()));
+ cameraStatuses->emplace_back(i.first,
+ mapToInterface(i.second->getStatus()), i.second->getUnavailablePhysicalIds());
}
}
// Remove the camera statuses that should be hidden from the client, we do
@@ -2093,6 +2218,7 @@
case CAMERA_DEVICE_API_VERSION_3_3:
case CAMERA_DEVICE_API_VERSION_3_4:
case CAMERA_DEVICE_API_VERSION_3_5:
+ case CAMERA_DEVICE_API_VERSION_3_6:
ALOGV("%s: Camera id %s uses HAL3.2 or newer, supports api1/api2 directly",
__FUNCTION__, id.string());
*isSupported = true;
@@ -2298,6 +2424,13 @@
clientPackage, clientPid));
}
+void CameraService::logDisconnectedOffline(const char* cameraId, int clientPid,
+ const char* clientPackage) {
+ // Log the clients evicted
+ logEvent(String8::format("DISCONNECT offline device %s client for package %s (PID %d)",
+ cameraId, clientPackage, clientPid));
+}
+
void CameraService::logConnected(const char* cameraId, int clientPid,
const char* clientPackage) {
// Log the clients evicted
@@ -2305,6 +2438,13 @@
clientPackage, clientPid));
}
+void CameraService::logConnectedOffline(const char* cameraId, int clientPid,
+ const char* clientPackage) {
+ // Log the clients evicted
+ logEvent(String8::format("CONNECT offline device %s client for package %s (PID %d)", cameraId,
+ clientPackage, clientPid));
+}
+
void CameraService::logRejected(const char* cameraId, int clientPid,
const char* clientPackage, const char* reason) {
// Log the client rejected
@@ -2742,6 +2882,7 @@
if (mAppOpsManager == nullptr) {
return;
}
+ // TODO : add offline camera session case
if (op != AppOpsManager::OP_CAMERA) {
ALOGW("Unexpected app ops notification received: %d", op);
return;
@@ -2776,20 +2917,6 @@
// ----------------------------------------------------------------------------
-sp<CameraService> CameraService::OfflineClient::sCameraService;
-
-status_t CameraService::OfflineClient::startCameraOps() {
- // TODO
- return OK;
-}
-
-status_t CameraService::OfflineClient::finishCameraOps() {
- // TODO
- return OK;
-}
-
-// ----------------------------------------------------------------------------
-
void CameraService::Client::notifyError(int32_t errorCode,
const CaptureResultExtras& resultExtras) {
(void) resultExtras;
@@ -3107,6 +3234,12 @@
return mStatus;
}
+std::vector<String8> CameraService::CameraState::getUnavailablePhysicalIds() const {
+ Mutex::Autolock lock(mStatusLock);
+ std::vector<String8> res(mUnavailablePhysicalIds.begin(), mUnavailablePhysicalIds.end());
+ return res;
+}
+
CameraParameters CameraService::CameraState::getShimParams() const {
return mShimParams;
}
@@ -3131,6 +3264,18 @@
return mSystemCameraKind;
}
+bool CameraService::CameraState::addUnavailablePhysicalId(const String8& physicalId) {
+ Mutex::Autolock lock(mStatusLock);
+ auto result = mUnavailablePhysicalIds.insert(physicalId);
+ return result.second;
+}
+
+bool CameraService::CameraState::removeUnavailablePhysicalId(const String8& physicalId) {
+ Mutex::Autolock lock(mStatusLock);
+ auto count = mUnavailablePhysicalIds.erase(physicalId);
+ return count > 0;
+}
+
// ----------------------------------------------------------------------------
// ClientEventListener
// ----------------------------------------------------------------------------
@@ -3488,7 +3633,7 @@
return;
}
// Update the status for this camera state, then send the onStatusChangedCallbacks to each
- // of the listeners with both the mStatusStatus and mStatusListenerLock held
+ // of the listeners with both the mStatusLock and mStatusListenerLock held
state->updateStatus(status, cameraId, rejectSourceStates, [this, &deviceKind, &supportsHAL3]
(const String8& cameraId, StatusInternal status) {
@@ -3510,6 +3655,8 @@
Mutex::Autolock lock(mStatusListenerLock);
+ notifyPhysicalCameraStatusLocked(mapToInterface(status), cameraId);
+
for (auto& listener : mListenerList) {
bool isVendorListener = listener->isVendorListener();
if (shouldSkipStatusUpdates(deviceKind, isVendorListener,
@@ -3603,6 +3750,28 @@
return OK;
}
+void CameraService::notifyPhysicalCameraStatusLocked(int32_t status, const String8& cameraId) {
+ Mutex::Autolock lock(mCameraStatesLock);
+ for (const auto& state : mCameraStates) {
+ std::vector<std::string> physicalCameraIds;
+ if (!mCameraProviderManager->isLogicalCamera(state.first.c_str(), &physicalCameraIds)) {
+ // This is not a logical multi-camera.
+ continue;
+ }
+ if (std::find(physicalCameraIds.begin(), physicalCameraIds.end(), cameraId.c_str())
+ == physicalCameraIds.end()) {
+ // cameraId is not a physical camera of this logical multi-camera.
+ continue;
+ }
+
+ String16 id16(state.first), physicalId16(cameraId);
+ for (auto& listener : mListenerList) {
+ listener->getListener()->onPhysicalCameraStatusChanged(status,
+ id16, physicalId16);
+ }
+ }
+}
+
void CameraService::blockClientsForUid(uid_t uid) {
const auto clients = mActiveClientManager.getAll();
for (auto& current : clients) {
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index dae9c09..34b970d 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -68,6 +68,7 @@
{
friend class BinderService<CameraService>;
friend class CameraClient;
+ friend class CameraOfflineSessionClient;
public:
class Client;
class BasicClient;
@@ -102,6 +103,9 @@
virtual void onDeviceStatusChanged(const String8 &cameraId,
hardware::camera::common::V1_0::CameraDeviceStatus newHalStatus) override;
+ virtual void onDeviceStatusChanged(const String8 &cameraId,
+ const String8 &physicalCameraId,
+ hardware::camera::common::V1_0::CameraDeviceStatus newHalStatus) override;
virtual void onTorchStatusChanged(const String8& cameraId,
hardware::camera::common::V1_0::TorchModeStatus newStatus) override;
virtual void onNewProviderRegistered() override;
@@ -185,6 +189,9 @@
// Monitored UIDs availability notification
void notifyMonitoredUids();
+ // Register an offline client for a given active camera id
+ status_t addOfflineClient(String8 cameraId, sp<BasicClient> offlineClient);
+
/////////////////////////////////////////////////////////////////////
// Client functionality
@@ -310,10 +317,9 @@
sp<IBinder> mRemoteBinder; // immutable after constructor
// permissions management
- status_t startCameraOps();
- status_t finishCameraOps();
+ virtual status_t startCameraOps();
+ virtual status_t finishCameraOps();
- private:
std::unique_ptr<AppOpsManager> mAppOpsManager = nullptr;
class OpsCallback : public BnAppOpsCallback {
@@ -402,87 +408,6 @@
int mCameraId; // All API1 clients use integer camera IDs
}; // class Client
-
- // Client for offline session. Note that offline session client does not affect camera service's
- // client arbitration logic. It is camera HAL's decision to decide whether a normal camera
- // client is conflicting with existing offline client(s).
- // The other distinctive difference between offline clients and normal clients is that normal
- // clients are created through ICameraService binder calls, while the offline session client
- // is created through ICameraDeviceUser::switchToOffline call.
- class OfflineClient : public virtual RefBase {
-
- virtual status_t dump(int fd, const Vector<String16>& args) = 0;
-
- // Block the client form using the camera
- virtual void block() = 0;
-
- // Return the package name for this client
- virtual String16 getPackageName() const = 0;
-
- // Notify client about a fatal error
- // TODO: maybe let impl notify within block?
- virtual void notifyError(int32_t errorCode,
- const CaptureResultExtras& resultExtras) = 0;
-
- // Get the UID of the application client using this
- virtual uid_t getClientUid() const = 0;
-
- // Get the PID of the application client using this
- virtual int getClientPid() const = 0;
-
- protected:
- OfflineClient(const sp<CameraService>& cameraService,
- const String16& clientPackageName,
- const String8& cameraIdStr,
- int clientPid,
- uid_t clientUid,
- int servicePid): mCameraIdStr(cameraIdStr),
- mClientPackageName(clientPackageName), mClientPid(clientPid),
- mClientUid(clientUid), mServicePid(servicePid) {
- if (sCameraService == nullptr) {
- sCameraService = cameraService;
- }
- }
-
- virtual ~OfflineClient() { /*TODO*/ }
-
- // these are initialized in the constructor.
- static sp<CameraService> sCameraService;
- const String8 mCameraIdStr;
- String16 mClientPackageName;
- pid_t mClientPid;
- const uid_t mClientUid;
- const pid_t mServicePid;
- bool mDisconnected;
-
- // - The app-side Binder interface to receive callbacks from us
- sp<IBinder> mRemoteBinder; // immutable after constructor
-
- // permissions management
- status_t startCameraOps();
- status_t finishCameraOps();
-
- private:
- std::unique_ptr<AppOpsManager> mAppOpsManager = nullptr;
-
- class OpsCallback : public BnAppOpsCallback {
- public:
- explicit OpsCallback(wp<OfflineClient> client) : mClient(client) {}
- virtual void opChanged(int32_t /*op*/, const String16& /*packageName*/) {
- //TODO
- }
-
- private:
- wp<OfflineClient> mClient;
-
- }; // class OpsCallback
-
- sp<OpsCallback> mOpsCallback;
-
- // IAppOpsCallback interface, indirected through opListener
- // virtual void opChanged(int32_t op, const String16& packageName);
- }; // class OfflineClient
-
/**
* A listener class that implements the LISTENER interface for use with a ClientManager, and
* implements the following methods:
@@ -634,11 +559,24 @@
*/
SystemCameraKind getSystemCameraKind() const;
+ /**
+ * Add/Remove the unavailable physical camera ID.
+ */
+ bool addUnavailablePhysicalId(const String8& physicalId);
+ bool removeUnavailablePhysicalId(const String8& physicalId);
+
+ /**
+ * Return the unavailable physical ids for this device.
+ *
+ * This method acquires mStatusLock.
+ */
+ std::vector<String8> getUnavailablePhysicalIds() const;
private:
const String8 mId;
StatusInternal mStatus; // protected by mStatusLock
const int mCost;
std::set<String8> mConflicting;
+ std::set<String8> mUnavailablePhysicalIds;
mutable Mutex mStatusLock;
CameraParameters mShimParams;
const SystemCameraKind mSystemCameraKind;
@@ -872,6 +810,17 @@
void logDisconnected(const char* cameraId, int clientPid, const char* clientPackage);
/**
+ * Add an event log message that a client has been disconnected from offline device.
+ */
+ void logDisconnectedOffline(const char* cameraId, int clientPid, const char* clientPackage);
+
+ /**
+ * Add an event log message that an offline client has been connected.
+ */
+ void logConnectedOffline(const char* cameraId, int clientPid,
+ const char* clientPackage);
+
+ /**
* Add an event log message that a client has been connected.
*/
void logConnected(const char* cameraId, int clientPid, const char* clientPackage);
@@ -1029,6 +978,9 @@
status_t setTorchStatusLocked(const String8 &cameraId,
hardware::camera::common::V1_0::TorchModeStatus status);
+ // notify physical camera status when the physical camera is public.
+ void notifyPhysicalCameraStatusLocked(int32_t status, const String8& cameraId);
+
// IBinder::DeathRecipient implementation
virtual void binderDied(const wp<IBinder> &who);
@@ -1095,6 +1047,12 @@
void broadcastTorchModeStatus(const String8& cameraId,
hardware::camera::common::V1_0::TorchModeStatus status);
+ void disconnectClient(const String8& id, sp<BasicClient> clientToDisconnect);
+
+ // Regular online and offline devices must not be in conflict at camera service layer.
+ // Use separate keys for offline devices.
+ static const String8 kOfflineDevice;
+
// TODO: right now each BasicClient holds one AppOpsManager instance.
// We can refactor the code so all of clients share this instance
AppOpsManager mAppOps;
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 28421ba..9deb662 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -470,7 +470,8 @@
}
binder::Status CameraDeviceClient::endConfigure(int operatingMode,
- const hardware::camera2::impl::CameraMetadataNative& sessionParams) {
+ const hardware::camera2::impl::CameraMetadataNative& sessionParams,
+ std::vector<int>* offlineStreamIds /*out*/) {
ATRACE_CALL();
ALOGV("%s: ending configure (%d input stream, %zu output surfaces)",
__FUNCTION__, mInputStream.configured ? 1 : 0,
@@ -479,6 +480,12 @@
binder::Status res;
if (!(res = checkPidStatus(__FUNCTION__)).isOk()) return res;
+ if (offlineStreamIds == nullptr) {
+ String8 msg = String8::format("Invalid offline stream ids");
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+ }
+
Mutex::Autolock icl(mBinderSerializationLock);
if (!mDevice.get()) {
@@ -502,15 +509,41 @@
ALOGE("%s: %s", __FUNCTION__, msg.string());
res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
} else {
+ offlineStreamIds->clear();
+ mDevice->getOfflineStreamIds(offlineStreamIds);
+
for (size_t i = 0; i < mCompositeStreamMap.size(); ++i) {
err = mCompositeStreamMap.valueAt(i)->configureStream();
- if (err != OK ) {
+ if (err != OK) {
String8 msg = String8::format("Camera %s: Error configuring composite "
"streams: %s (%d)", mCameraIdStr.string(), strerror(-err), err);
ALOGE("%s: %s", __FUNCTION__, msg.string());
res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
break;
}
+
+ // Composite streams can only support offline mode in case all individual internal
+ // streams are also supported.
+ std::vector<int> internalStreams;
+ mCompositeStreamMap.valueAt(i)->insertCompositeStreamIds(&internalStreams);
+ std::remove_if(offlineStreamIds->begin(), offlineStreamIds->end(),
+ [&internalStreams] (int streamId) {
+ auto it = std::find(internalStreams.begin(), internalStreams.end(),
+ streamId);
+ if (it != internalStreams.end()) {
+ internalStreams.erase(it);
+ return true;
+ }
+
+ return false;
+ });
+ if (internalStreams.empty()) {
+ offlineStreamIds->push_back(mCompositeStreamMap.valueAt(i)->getStreamId());
+ }
+ }
+
+ for (const auto& offlineStreamId : *offlineStreamIds) {
+ mStreamInfoMap[offlineStreamId].supportsOffline = true;
}
}
@@ -1903,7 +1936,7 @@
binder::Status CameraDeviceClient::switchToOffline(
const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
- const std::vector<view::Surface>& offlineOutputs,
+ const std::vector<int>& offlineOutputIds,
/*out*/
sp<hardware::camera2::ICameraOfflineSession>* session) {
ATRACE_CALL();
@@ -1917,8 +1950,8 @@
return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
}
- if (offlineOutputs.empty()) {
- String8 msg = String8::format("Offline outputs must not be empty");
+ if (offlineOutputIds.empty()) {
+ String8 msg = String8::format("Offline surfaces must not be empty");
ALOGE("%s: %s", __FUNCTION__, msg.string());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
}
@@ -1929,24 +1962,47 @@
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
}
- std::vector<int32_t> offlineStreamIds(offlineOutputs.size());
- for (auto& surface : offlineOutputs) {
- sp<IBinder> binder = IInterface::asBinder(surface.graphicBufferProducer);
- ssize_t index = mStreamMap.indexOfKey(binder);
+ std::vector<int32_t> offlineStreamIds(offlineOutputIds.size());
+ KeyedVector<sp<IBinder>, sp<CompositeStream>> offlineCompositeStreamMap;
+ for (const auto& streamId : offlineOutputIds) {
+ ssize_t index = mConfiguredOutputs.indexOfKey(streamId);
if (index == NAME_NOT_FOUND) {
- String8 msg = String8::format("Offline output is invalid");
+ String8 msg = String8::format("Offline surface with id: %d is not registered",
+ streamId);
ALOGE("%s: %s", __FUNCTION__, msg.string());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
}
- // TODO: Also check whether the offline output is supported by Hal for offline mode.
- sp<Surface> s = new Surface(surface.graphicBufferProducer);
- bool isCompositeStream = camera3::DepthCompositeStream::isDepthCompositeStream(s);
- isCompositeStream |= camera3::HeicCompositeStream::isHeicCompositeStream(s);
- if (isCompositeStream) {
- // TODO: Add composite specific handling
- } else {
- offlineStreamIds.push_back(mStreamMap.valueAt(index).streamId());
+ if (!mStreamInfoMap[streamId].supportsOffline) {
+ String8 msg = String8::format("Offline surface with id: %d doesn't support "
+ "offline mode", streamId);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+ }
+
+ bool isCompositeStream = false;
+ for (const auto& gbp : mConfiguredOutputs[streamId].getGraphicBufferProducers()) {
+ sp<Surface> s = new Surface(gbp, false /*controlledByApp*/);
+ isCompositeStream = camera3::DepthCompositeStream::isDepthCompositeStream(s) |
+ camera3::HeicCompositeStream::isHeicCompositeStream(s);
+ if (isCompositeStream) {
+ auto compositeIdx = mCompositeStreamMap.indexOfKey(IInterface::asBinder(gbp));
+ if (compositeIdx == NAME_NOT_FOUND) {
+ ALOGE("%s: Unknown composite stream", __FUNCTION__);
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+ "Unknown composite stream");
+ }
+
+ mCompositeStreamMap.valueAt(compositeIdx)->insertCompositeStreamIds(
+ &offlineStreamIds);
+ offlineCompositeStreamMap.add(mCompositeStreamMap.keyAt(compositeIdx),
+ mCompositeStreamMap.valueAt(compositeIdx));
+ break;
+ }
+ }
+
+ if (!isCompositeStream) {
+ offlineStreamIds.push_back(streamId);
}
}
@@ -1958,16 +2014,36 @@
mCameraIdStr.string(), strerror(ret), ret);
}
- sp<CameraOfflineSessionClient> offlineClient = new CameraOfflineSessionClient(sCameraService,
- offlineSession, cameraCb, mClientPackageName, mCameraIdStr, mClientPid, mClientUid,
- mServicePid);
- ret = offlineClient->initialize();
+ sp<CameraOfflineSessionClient> offlineClient;
+ if (offlineSession.get() != nullptr) {
+ offlineClient = new CameraOfflineSessionClient(sCameraService,
+ offlineSession, offlineCompositeStreamMap, cameraCb, mClientPackageName,
+ mClientFeatureId, mCameraIdStr, mCameraFacing, mClientPid, mClientUid, mServicePid);
+ ret = sCameraService->addOfflineClient(mCameraIdStr, offlineClient);
+ }
+
if (ret == OK) {
- // TODO: We need to update mStreamMap, mConfiguredOutputs
+ // A successful offline session switch must reset the current camera client
+ // and release any resources occupied by previously configured streams.
+ mStreamMap.clear();
+ mConfiguredOutputs.clear();
+ mDeferredStreams.clear();
+ mStreamInfoMap.clear();
+ mCompositeStreamMap.clear();
+ mInputStream = {false, 0, 0, 0, 0};
} else {
- return STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
- "Camera %s: Failed to initilize offline session: %s (%d)",
- mCameraIdStr.string(), strerror(ret), ret);
+ switch(ret) {
+ case BAD_VALUE:
+ return STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
+ "Illegal argument to HAL module for camera \"%s\"", mCameraIdStr.c_str());
+ case TIMED_OUT:
+ return STATUS_ERROR_FMT(CameraService::ERROR_CAMERA_IN_USE,
+ "Camera \"%s\" is already open", mCameraIdStr.c_str());
+ default:
+ return STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
+ "Failed to initialize camera \"%s\": %s (%d)", mCameraIdStr.c_str(),
+ strerror(-ret), ret);
+ }
}
*session = offlineClient;
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 0a8f377..a3a3189 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -92,7 +92,9 @@
virtual binder::Status beginConfigure() override;
virtual binder::Status endConfigure(int operatingMode,
- const hardware::camera2::impl::CameraMetadataNative& sessionParams) override;
+ const hardware::camera2::impl::CameraMetadataNative& sessionParams,
+ /*out*/
+ std::vector<int>* offlineStreamIds) override;
// Verify specific session configuration.
virtual binder::Status isSessionConfigurationSupported(
@@ -160,7 +162,7 @@
virtual binder::Status switchToOffline(
const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
- const std::vector<view::Surface>& offlineOutputs,
+ const std::vector<int>& offlineOutputIds,
/*out*/
sp<hardware::camera2::ICameraOfflineSession>* session) override;
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
new file mode 100644
index 0000000..af7b9e1
--- /dev/null
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
@@ -0,0 +1,287 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "CameraOfflineClient"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include "CameraOfflineSessionClient.h"
+#include "utils/CameraThreadState.h"
+#include <utils/Trace.h>
+
+namespace android {
+
+using binder::Status;
+
+status_t CameraOfflineSessionClient::initialize(sp<CameraProviderManager>, const String8&) {
+ ATRACE_CALL();
+
+ // Verify ops permissions
+ auto res = startCameraOps();
+ if (res != OK) {
+ return res;
+ }
+
+ if (mOfflineSession.get() == nullptr) {
+ ALOGE("%s: Camera %s: No valid offline session",
+ __FUNCTION__, mCameraIdStr.string());
+ return NO_INIT;
+ }
+
+ wp<NotificationListener> weakThis(this);
+ res = mOfflineSession->initialize(weakThis);
+ if (res != OK) {
+ ALOGE("%s: Camera %s: unable to initialize device: %s (%d)",
+ __FUNCTION__, mCameraIdStr.string(), strerror(-res), res);
+ return res;
+ }
+
+ return OK;
+}
+
+status_t CameraOfflineSessionClient::dump(int fd, const Vector<String16>& args) {
+ return BasicClient::dump(fd, args);
+}
+
+status_t CameraOfflineSessionClient::dumpClient(int fd, const Vector<String16>& /*args*/) {
+ String8 result;
+
+ result = " Offline session dump:\n";
+ write(fd, result.string(), result.size());
+
+ if (mOfflineSession.get() == nullptr) {
+ result = " *** Offline session is detached\n";
+ write(fd, result.string(), result.size());
+ return NO_ERROR;
+ }
+
+ auto res = mOfflineSession->dump(fd);
+ if (res != OK) {
+ result = String8::format(" Error dumping offline session: %s (%d)",
+ strerror(-res), res);
+ write(fd, result.string(), result.size());
+ }
+
+ return OK;
+}
+
+binder::Status CameraOfflineSessionClient::disconnect() {
+ Mutex::Autolock icl(mBinderSerializationLock);
+
+ binder::Status res = Status::ok();
+ if (mDisconnected) {
+ return res;
+ }
+ // Allow both client and the media server to disconnect at all times
+ int callingPid = CameraThreadState::getCallingPid();
+ if (callingPid != mClientPid &&
+ callingPid != mServicePid) {
+ return res;
+ }
+
+ mDisconnected = true;
+
+ sCameraService->removeByClient(this);
+ sCameraService->logDisconnectedOffline(mCameraIdStr, mClientPid, String8(mClientPackageName));
+
+ sp<IBinder> remote = getRemote();
+ if (remote != nullptr) {
+ remote->unlinkToDeath(sCameraService);
+ }
+
+ finishCameraOps();
+ ALOGI("%s: Disconnected client for offline camera %s for PID %d", __FUNCTION__,
+ mCameraIdStr.string(), mClientPid);
+
+ // client shouldn't be able to call into us anymore
+ mClientPid = 0;
+
+ if (mOfflineSession.get() != nullptr) {
+ auto ret = mOfflineSession->disconnect();
+ if (ret != OK) {
+ ALOGE("%s: Failed disconnecting from offline session %s (%d)", __FUNCTION__,
+ strerror(-ret), ret);
+ }
+ mOfflineSession = nullptr;
+ }
+
+ for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
+ auto ret = mCompositeStreamMap.valueAt(i)->deleteInternalStreams();
+ if (ret != OK) {
+ ALOGE("%s: Failed removing composite stream %s (%d)", __FUNCTION__,
+ strerror(-ret), ret);
+ }
+ }
+ mCompositeStreamMap.clear();
+
+ return res;
+}
+
+void CameraOfflineSessionClient::notifyError(int32_t errorCode,
+ const CaptureResultExtras& resultExtras) {
+ // Thread safe. Don't bother locking.
+ // Composites can have multiple internal streams. Error notifications coming from such internal
+ // streams may need to remain within camera service.
+ bool skipClientNotification = false;
+ for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
+ skipClientNotification |= mCompositeStreamMap.valueAt(i)->onError(errorCode, resultExtras);
+ }
+
+ if ((mRemoteCallback.get() != nullptr) && (!skipClientNotification)) {
+ mRemoteCallback->onDeviceError(errorCode, resultExtras);
+ }
+}
+
+status_t CameraOfflineSessionClient::startCameraOps() {
+ ATRACE_CALL();
+ {
+ ALOGV("%s: Start camera ops, package name = %s, client UID = %d",
+ __FUNCTION__, String8(mClientPackageName).string(), mClientUid);
+ }
+
+ if (mAppOpsManager != nullptr) {
+ // Notify app ops that the camera is not available
+ mOpsCallback = new OpsCallback(this);
+ int32_t res;
+ // TODO : possibly change this to OP_OFFLINE_CAMERA_SESSION
+ mAppOpsManager->startWatchingMode(AppOpsManager::OP_CAMERA,
+ mClientPackageName, mOpsCallback);
+ // TODO : possibly change this to OP_OFFLINE_CAMERA_SESSION
+ res = mAppOpsManager->startOpNoThrow(AppOpsManager::OP_CAMERA,
+ mClientUid, mClientPackageName, /*startIfModeDefault*/ false);
+
+ if (res == AppOpsManager::MODE_ERRORED) {
+ ALOGI("Offline Camera %s: Access for \"%s\" has been revoked",
+ mCameraIdStr.string(), String8(mClientPackageName).string());
+ return PERMISSION_DENIED;
+ }
+
+ if (res == AppOpsManager::MODE_IGNORED) {
+ ALOGI("Offline Camera %s: Access for \"%s\" has been restricted",
+ mCameraIdStr.string(), String8(mClientPackageName).string());
+ // Return the same error as for device policy manager rejection
+ return -EACCES;
+ }
+ }
+
+ mOpsActive = true;
+
+ // Transition device state to OPEN
+ sCameraService->mUidPolicy->registerMonitorUid(mClientUid);
+
+ return OK;
+}
+
+status_t CameraOfflineSessionClient::finishCameraOps() {
+ ATRACE_CALL();
+
+ // Check if startCameraOps succeeded, and if so, finish the camera op
+ if (mOpsActive) {
+ // Notify app ops that the camera is available again
+ if (mAppOpsManager != nullptr) {
+ // TODO : possibly change this to OP_OFFLINE_CAMERA_SESSION
+ mAppOpsManager->finishOp(AppOpsManager::OP_CAMERA, mClientUid,
+ mClientPackageName);
+ mOpsActive = false;
+ }
+ }
+ // Always stop watching, even if no camera op is active
+ if (mOpsCallback != nullptr && mAppOpsManager != nullptr) {
+ mAppOpsManager->stopWatchingMode(mOpsCallback);
+ }
+ mOpsCallback.clear();
+
+ sCameraService->mUidPolicy->unregisterMonitorUid(mClientUid);
+
+ return OK;
+}
+
+void CameraOfflineSessionClient::onResultAvailable(const CaptureResult& result) {
+ ATRACE_CALL();
+ ALOGV("%s", __FUNCTION__);
+
+ if (mRemoteCallback.get() != NULL) {
+ mRemoteCallback->onResultReceived(result.mMetadata, result.mResultExtras,
+ result.mPhysicalMetadatas);
+ }
+
+ for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
+ mCompositeStreamMap.valueAt(i)->onResultAvailable(result);
+ }
+}
+
+void CameraOfflineSessionClient::notifyShutter(const CaptureResultExtras& resultExtras,
+ nsecs_t timestamp) {
+
+ if (mRemoteCallback.get() != nullptr) {
+ mRemoteCallback->onCaptureStarted(resultExtras, timestamp);
+ }
+
+ for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
+ mCompositeStreamMap.valueAt(i)->onShutter(resultExtras, timestamp);
+ }
+}
+
+void CameraOfflineSessionClient::notifyIdle() {
+ if (mRemoteCallback.get() != nullptr) {
+ mRemoteCallback->onDeviceIdle();
+ }
+}
+
+void CameraOfflineSessionClient::notifyAutoFocus(uint8_t newState, int triggerId) {
+ (void)newState;
+ (void)triggerId;
+
+ ALOGV("%s: Autofocus state now %d, last trigger %d",
+ __FUNCTION__, newState, triggerId);
+}
+
+void CameraOfflineSessionClient::notifyAutoExposure(uint8_t newState, int triggerId) {
+ (void)newState;
+ (void)triggerId;
+
+ ALOGV("%s: Autoexposure state now %d, last trigger %d",
+ __FUNCTION__, newState, triggerId);
+}
+
+void CameraOfflineSessionClient::notifyAutoWhitebalance(uint8_t newState, int triggerId) {
+ (void)newState;
+ (void)triggerId;
+
+ ALOGV("%s: Auto-whitebalance state now %d, last trigger %d", __FUNCTION__, newState,
+ triggerId);
+}
+
+void CameraOfflineSessionClient::notifyPrepared(int /*streamId*/) {
+ ALOGE("%s: Unexpected stream prepare notification in offline mode!", __FUNCTION__);
+ notifyError(hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE,
+ CaptureResultExtras());
+}
+
+void CameraOfflineSessionClient::notifyRequestQueueEmpty() {
+ if (mRemoteCallback.get() != nullptr) {
+ mRemoteCallback->onRequestQueueEmpty();
+ }
+}
+
+void CameraOfflineSessionClient::notifyRepeatingRequestError(long /*lastFrameNumber*/) {
+ ALOGE("%s: Unexpected repeating request error in offline mode!", __FUNCTION__);
+ notifyError(hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE,
+ CaptureResultExtras());
+}
+
+// ----------------------------------------------------------------------------
+}; // namespace android
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index cb83e29..b0f000d 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -19,65 +19,88 @@
#include <android/hardware/camera2/BnCameraOfflineSession.h>
#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
+#include "common/FrameProcessorBase.h"
+#include "common/CameraDeviceBase.h"
#include "CameraService.h"
+#include "CompositeStream.h"
namespace android {
using android::hardware::camera2::ICameraDeviceCallbacks;
+using camera3::CompositeStream;
+// Client for offline session. Note that offline session client does not affect camera service's
+// client arbitration logic. It is camera HAL's decision to decide whether a normal camera
+// client is conflicting with existing offline client(s).
+// The other distinctive difference between offline clients and normal clients is that normal
+// clients are created through ICameraService binder calls, while the offline session client
+// is created through ICameraDeviceUser::switchToOffline call.
class CameraOfflineSessionClient :
- public CameraService::OfflineClient,
- public hardware::camera2::BnCameraOfflineSession
- // public camera2::FrameProcessorBase::FilteredListener?
+ public CameraService::BasicClient,
+ public hardware::camera2::BnCameraOfflineSession,
+ public camera2::FrameProcessorBase::FilteredListener,
+ public NotificationListener
{
public:
CameraOfflineSessionClient(
const sp<CameraService>& cameraService,
sp<CameraOfflineSessionBase> session,
+ const KeyedVector<sp<IBinder>, sp<CompositeStream>>& offlineCompositeStreamMap,
const sp<ICameraDeviceCallbacks>& remoteCallback,
const String16& clientPackageName,
- const String8& cameraIdStr,
+ const std::unique_ptr<String16>& clientFeatureId,
+ const String8& cameraIdStr, int cameraFacing,
int clientPid, uid_t clientUid, int servicePid) :
- CameraService::OfflineClient(cameraService, clientPackageName,
- cameraIdStr, clientPid, clientUid, servicePid),
- mRemoteCallback(remoteCallback), mOfflineSession(session) {}
+ CameraService::BasicClient(
+ cameraService,
+ IInterface::asBinder(remoteCallback),
+ clientPackageName, clientFeatureId,
+ cameraIdStr, cameraFacing, clientPid, clientUid, servicePid),
+ mRemoteCallback(remoteCallback), mOfflineSession(session),
+ mCompositeStreamMap(offlineCompositeStreamMap) {}
- ~CameraOfflineSessionClient() {}
+ virtual ~CameraOfflineSessionClient() {}
- virtual binder::Status disconnect() override { return binder::Status::ok(); }
-
- virtual status_t dump(int /*fd*/, const Vector<String16>& /*args*/) override {
- return OK;
+ sp<IBinder> asBinderWrapper() override {
+ return IInterface::asBinder(this);
}
- // Block the client form using the camera
- virtual void block() override {};
+ binder::Status disconnect() override;
- // Return the package name for this client
- virtual String16 getPackageName() const override { String16 ret; return ret; };
+ status_t dump(int /*fd*/, const Vector<String16>& /*args*/) override;
- // Notify client about a fatal error
- // TODO: maybe let impl notify within block?
- virtual void notifyError(int32_t /*errorCode*/,
- const CaptureResultExtras& /*resultExtras*/) override {}
+ status_t dumpClient(int /*fd*/, const Vector<String16>& /*args*/) override;
- // Get the UID of the application client using this
- virtual uid_t getClientUid() const override { return 0; }
+ status_t initialize(sp<CameraProviderManager> /*manager*/,
+ const String8& /*monitorTags*/) override;
- // Get the PID of the application client using this
- virtual int getClientPid() const override { return 0; }
+ // permissions management
+ status_t startCameraOps() override;
+ status_t finishCameraOps() override;
- status_t initialize() {
- // TODO: Talk to camera service to add the offline session client book keeping
- return OK;
- }
+ // FilteredResultListener API
+ void onResultAvailable(const CaptureResult& result) override;
+
+ // NotificationListener API
+ void notifyError(int32_t errorCode, const CaptureResultExtras& resultExtras) override;
+ void notifyShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) override;
+ void notifyIdle() override;
+ void notifyAutoFocus(uint8_t newState, int triggerId) override;
+ void notifyAutoExposure(uint8_t newState, int triggerId) override;
+ void notifyAutoWhitebalance(uint8_t newState, int triggerId) override;
+ void notifyPrepared(int streamId) override;
+ void notifyRequestQueueEmpty() override;
+ void notifyRepeatingRequestError(long lastFrameNumber) override;
+
private:
- sp<CameraOfflineSessionBase> mSession;
+ mutable Mutex mBinderSerializationLock;
sp<hardware::camera2::ICameraDeviceCallbacks> mRemoteCallback;
- // This class is responsible to convert HAL callbacks to AIDL callbacks
sp<CameraOfflineSessionBase> mOfflineSession;
+
+ // Offline composite stream map, output surface -> composite stream
+ KeyedVector<sp<IBinder>, sp<CompositeStream>> mCompositeStreamMap;
};
} // namespace android
diff --git a/services/camera/libcameraservice/api2/CompositeStream.h b/services/camera/libcameraservice/api2/CompositeStream.h
index a401a82..de894f3 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.h
+++ b/services/camera/libcameraservice/api2/CompositeStream.h
@@ -64,6 +64,10 @@
virtual status_t insertGbp(SurfaceMap* /*out*/outSurfaceMap,
Vector<int32_t>* /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) = 0;
+ // Attach the internal composite stream ids.
+ virtual status_t insertCompositeStreamIds(
+ std::vector<int32_t>* compositeStreamIds /*out*/) = 0;
+
// Return composite stream id.
virtual int getStreamId() = 0;
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index ac6f8d6..acad8c6 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -683,6 +683,18 @@
return NO_ERROR;
}
+status_t DepthCompositeStream::insertCompositeStreamIds(
+ std::vector<int32_t>* compositeStreamIds /*out*/) {
+ if (compositeStreamIds == nullptr) {
+ return BAD_VALUE;
+ }
+
+ compositeStreamIds->push_back(mDepthStreamId);
+ compositeStreamIds->push_back(mBlobStreamId);
+
+ return OK;
+}
+
void DepthCompositeStream::onResultError(const CaptureResultExtras& resultExtras) {
// Processing can continue even in case of result errors.
// At the moment depth composite stream processing relies mainly on static camera
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.h b/services/camera/libcameraservice/api2/DepthCompositeStream.h
index 60c6b1e..1bf714d 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.h
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.h
@@ -56,6 +56,7 @@
status_t configureStream() override;
status_t insertGbp(SurfaceMap* /*out*/outSurfaceMap, Vector<int32_t>* /*out*/outputStreamIds,
int32_t* /*out*/currentStreamId) override;
+ status_t insertCompositeStreamIds(std::vector<int32_t>* compositeStreamIds /*out*/) override;
int getStreamId() override { return mBlobStreamId; }
// CpuConsumer listener implementation
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 9cdad70..d25e467 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -504,6 +504,18 @@
return NO_ERROR;
}
+status_t HeicCompositeStream::insertCompositeStreamIds(
+ std::vector<int32_t>* compositeStreamIds /*out*/) {
+ if (compositeStreamIds == nullptr) {
+ return BAD_VALUE;
+ }
+
+ compositeStreamIds->push_back(mAppSegmentStreamId);
+ compositeStreamIds->push_back(mMainImageStreamId);
+
+ return OK;
+}
+
void HeicCompositeStream::onShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) {
Mutex::Autolock l(mMutex);
if (mErrorState) {
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index e88471d..8fc521e 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -55,6 +55,8 @@
status_t insertGbp(SurfaceMap* /*out*/outSurfaceMap, Vector<int32_t>* /*out*/outputStreamIds,
int32_t* /*out*/currentStreamId) override;
+ status_t insertCompositeStreamIds(std::vector<int32_t>* compositeStreamIds /*out*/) override;
+
void onShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) override;
int getStreamId() override { return mMainImageStreamId; }
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 8792f9a..0a41776 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -111,7 +111,7 @@
return res;
}
- wp<CameraDeviceBase::NotificationListener> weakThis(this);
+ wp<NotificationListener> weakThis(this);
res = mDevice->setNotifyCallback(weakThis);
return OK;
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index 12cba0b..042f5aa 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -29,7 +29,7 @@
template <typename TClientBase>
class Camera2ClientBase :
public TClientBase,
- public CameraDeviceBase::NotificationListener
+ public NotificationListener
{
public:
typedef typename TClientBase::TCamCallbacks TCamCallbacks;
@@ -61,7 +61,7 @@
virtual status_t dumpClient(int fd, const Vector<String16>& args);
/**
- * CameraDeviceBase::NotificationListener implementation
+ * NotificationListener implementation
*/
virtual void notifyError(int32_t errorCode,
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.cpp b/services/camera/libcameraservice/common/CameraDeviceBase.cpp
index 6c4e87f..0efe4bc 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.cpp
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.cpp
@@ -24,7 +24,4 @@
CameraDeviceBase::~CameraDeviceBase() {
}
-CameraDeviceBase::NotificationListener::~NotificationListener() {
-}
-
} // namespace android
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 1026fdf..2f01198 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -234,6 +234,12 @@
virtual status_t configureStreams(const CameraMetadata& sessionParams,
int operatingMode = 0) = 0;
+ /**
+ * Retrieve a list of all stream ids that were advertised as capable of
+ * supporting offline processing mode by Hal after the last stream configuration.
+ */
+ virtual void getOfflineStreamIds(std::vector<int> *offlineStreamIds) = 0;
+
// get the buffer producer of the input stream
virtual status_t getInputBufferProducer(
sp<IGraphicBufferProducer> *producer) = 0;
@@ -259,35 +265,6 @@
virtual ssize_t getJpegBufferSize(uint32_t width, uint32_t height) const = 0;
/**
- * Abstract class for HAL notification listeners
- */
- class NotificationListener : public virtual RefBase {
- public:
- // The set of notifications is a merge of the notifications required for
- // API1 and API2.
-
- // Required for API 1 and 2
- virtual void notifyError(int32_t errorCode,
- const CaptureResultExtras &resultExtras) = 0;
-
- // Required only for API2
- virtual void notifyIdle() = 0;
- virtual void notifyShutter(const CaptureResultExtras &resultExtras,
- nsecs_t timestamp) = 0;
- virtual void notifyPrepared(int streamId) = 0;
- virtual void notifyRequestQueueEmpty() = 0;
-
- // Required only for API1
- virtual void notifyAutoFocus(uint8_t newState, int triggerId) = 0;
- virtual void notifyAutoExposure(uint8_t newState, int triggerId) = 0;
- virtual void notifyAutoWhitebalance(uint8_t newState,
- int triggerId) = 0;
- virtual void notifyRepeatingRequestError(long lastFrameNumber) = 0;
- protected:
- virtual ~NotificationListener();
- };
-
- /**
* Connect HAL notifications to a listener. Overwrites previous
* listener. Set to NULL to stop receiving notifications.
*/
diff --git a/services/camera/libcameraservice/common/CameraOfflineSessionBase.h b/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
index 3862521..05ea7fb 100644
--- a/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
+++ b/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
@@ -25,10 +25,42 @@
namespace android {
+/**
+ * Abstract class for HAL notification listeners
+ */
+class NotificationListener : public virtual RefBase {
+ public:
+ // The set of notifications is a merge of the notifications required for
+ // API1 and API2.
+
+ // Required for API 1 and 2
+ virtual void notifyError(int32_t errorCode,
+ const CaptureResultExtras &resultExtras) = 0;
+
+ // Required only for API2
+ virtual void notifyIdle() = 0;
+ virtual void notifyShutter(const CaptureResultExtras &resultExtras,
+ nsecs_t timestamp) = 0;
+ virtual void notifyPrepared(int streamId) = 0;
+ virtual void notifyRequestQueueEmpty() = 0;
+
+ // Required only for API1
+ virtual void notifyAutoFocus(uint8_t newState, int triggerId) = 0;
+ virtual void notifyAutoExposure(uint8_t newState, int triggerId) = 0;
+ virtual void notifyAutoWhitebalance(uint8_t newState,
+ int triggerId) = 0;
+ virtual void notifyRepeatingRequestError(long lastFrameNumber) = 0;
+ protected:
+ virtual ~NotificationListener() {}
+};
+
class CameraOfflineSessionBase : public virtual RefBase {
public:
virtual ~CameraOfflineSessionBase();
+ virtual status_t initialize(
+ wp<NotificationListener> listener) = 0;
+
// The session's original camera ID
virtual const String8& getId() const = 0;
@@ -36,8 +68,6 @@
virtual status_t dump(int fd) = 0;
- virtual status_t abort() = 0;
-
/**
* Capture result passing
*/
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 0f74a48..57f812f 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -1255,20 +1255,6 @@
mMinorVersion = 4;
}
- // cameraDeviceStatusChange callbacks may be called (and causing new devices added)
- // before setCallback returns
- hardware::Return<Status> status = interface->setCallback(this);
- if (!status.isOk()) {
- ALOGE("%s: Transaction error setting up callbacks with camera provider '%s': %s",
- __FUNCTION__, mProviderName.c_str(), status.description().c_str());
- return DEAD_OBJECT;
- }
- if (status != Status::OK) {
- ALOGE("%s: Unable to register callbacks with camera provider '%s'",
- __FUNCTION__, mProviderName.c_str());
- return mapToStatusT(status);
- }
-
hardware::Return<bool> linked = interface->linkToDeath(this, /*cookie*/ mId);
if (!linked.isOk()) {
ALOGE("%s: Transaction error in linking to camera provider '%s' death: %s",
@@ -1297,6 +1283,7 @@
return res;
}
+ Status status;
// Get initial list of camera devices, if any
std::vector<std::string> devices;
hardware::Return<void> ret = interface->getCameraIdList([&status, this, &devices](
@@ -1353,6 +1340,22 @@
}
}
+ // cameraDeviceStatusChange callbacks may be called (and causing new devices added)
+ // before setCallback returns. setCallback must be called after addDevice so that
+ // the physical camera status callback can look up available regular
+ // cameras.
+ hardware::Return<Status> st = interface->setCallback(this);
+ if (!st.isOk()) {
+ ALOGE("%s: Transaction error setting up callbacks with camera provider '%s': %s",
+ __FUNCTION__, mProviderName.c_str(), st.description().c_str());
+ return DEAD_OBJECT;
+ }
+ if (st != Status::OK) {
+ ALOGE("%s: Unable to register callbacks with camera provider '%s'",
+ __FUNCTION__, mProviderName.c_str());
+ return mapToStatusT(st);
+ }
+
ALOGI("Camera provider %s ready with %zu camera devices",
mProviderName.c_str(), mDevices.size());
@@ -1604,6 +1607,61 @@
return hardware::Void();
}
+hardware::Return<void> CameraProviderManager::ProviderInfo::physicalCameraDeviceStatusChange(
+ const hardware::hidl_string& cameraDeviceName,
+ const hardware::hidl_string& physicalCameraDeviceName,
+ CameraDeviceStatus newStatus) {
+ sp<StatusListener> listener;
+ std::string id;
+ bool initialized = false;
+ {
+ std::lock_guard<std::mutex> lock(mLock);
+ bool known = false;
+ for (auto& deviceInfo : mDevices) {
+ if (deviceInfo->mName == cameraDeviceName) {
+ id = deviceInfo->mId;
+
+ if (!deviceInfo->mIsLogicalCamera) {
+ ALOGE("%s: Invalid combination of camera id %s, physical id %s",
+ __FUNCTION__, id.c_str(), physicalCameraDeviceName.c_str());
+ return hardware::Void();
+ }
+ if (std::find(deviceInfo->mPhysicalIds.begin(), deviceInfo->mPhysicalIds.end(),
+ physicalCameraDeviceName) == deviceInfo->mPhysicalIds.end()) {
+ ALOGE("%s: Invalid combination of camera id %s, physical id %s",
+ __FUNCTION__, id.c_str(), physicalCameraDeviceName.c_str());
+ return hardware::Void();
+ }
+ ALOGI("Camera device %s physical device %s status is now %s, was %s",
+ cameraDeviceName.c_str(), physicalCameraDeviceName.c_str(),
+ deviceStatusToString(newStatus), deviceStatusToString(
+ deviceInfo->mPhysicalStatus[physicalCameraDeviceName]));
+ known = true;
+ break;
+ }
+ }
+ // Previously unseen device; status must not be NOT_PRESENT
+ if (!known) {
+ ALOGW("Camera provider %s says an unknown camera device %s-%s is not present. Curious.",
+ mProviderName.c_str(), cameraDeviceName.c_str(),
+ physicalCameraDeviceName.c_str());
+ return hardware::Void();
+ }
+ listener = mManager->getStatusListener();
+ initialized = mInitialized;
+ }
+ // Call without lock held to allow reentrancy into provider manager
+ // Don't send the callback if providerInfo hasn't been initialized.
+ // CameraService will initialize device status after provider is
+ // initialized
+ if (listener != nullptr && initialized) {
+ String8 physicalId(physicalCameraDeviceName.c_str());
+ listener->onDeviceStatusChanged(String8(id.c_str()),
+ physicalId, newStatus);
+ }
+ return hardware::Void();
+}
+
hardware::Return<void> CameraProviderManager::ProviderInfo::torchModeStatusChange(
const hardware::hidl_string& cameraDeviceName,
TorchModeStatus newStatus) {
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index 58df0e8..3eba162 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -29,6 +29,7 @@
#include <utils/Errors.h>
#include <android/hardware/camera/common/1.0/types.h>
#include <android/hardware/camera/provider/2.5/ICameraProvider.h>
+#include <android/hardware/camera/provider/2.6/ICameraProviderCallback.h>
#include <android/hardware/camera/device/3.4/ICameraDeviceSession.h>
#include <android/hidl/manager/1.0/IServiceNotification.h>
#include <camera/VendorTagDescriptor.h>
@@ -136,6 +137,9 @@
virtual void onDeviceStatusChanged(const String8 &cameraId,
hardware::camera::common::V1_0::CameraDeviceStatus newStatus) = 0;
+ virtual void onDeviceStatusChanged(const String8 &cameraId,
+ const String8 &physicalCameraId,
+ hardware::camera::common::V1_0::CameraDeviceStatus newStatus) = 0;
virtual void onTorchStatusChanged(const String8 &cameraId,
hardware::camera::common::V1_0::TorchModeStatus newStatus) = 0;
virtual void onNewProviderRegistered() = 0;
@@ -342,7 +346,7 @@
std::mutex mProviderInterfaceMapLock;
struct ProviderInfo :
- virtual public hardware::camera::provider::V2_4::ICameraProviderCallback,
+ virtual public hardware::camera::provider::V2_6::ICameraProviderCallback,
virtual public hardware::hidl_death_recipient
{
const std::string mProviderName;
@@ -380,12 +384,16 @@
status_t dump(int fd, const Vector<String16>& args) const;
// ICameraProviderCallbacks interface - these lock the parent mInterfaceMutex
- virtual hardware::Return<void> cameraDeviceStatusChange(
+ hardware::Return<void> cameraDeviceStatusChange(
const hardware::hidl_string& cameraDeviceName,
hardware::camera::common::V1_0::CameraDeviceStatus newStatus) override;
- virtual hardware::Return<void> torchModeStatusChange(
+ hardware::Return<void> torchModeStatusChange(
const hardware::hidl_string& cameraDeviceName,
hardware::camera::common::V1_0::TorchModeStatus newStatus) override;
+ hardware::Return<void> physicalCameraDeviceStatusChange(
+ const hardware::hidl_string& cameraDeviceName,
+ const hardware::hidl_string& physicalCameraDeviceName,
+ hardware::camera::common::V1_0::CameraDeviceStatus newStatus) override;
// hidl_death_recipient interface - this locks the parent mInterfaceMutex
virtual void serviceDied(uint64_t cookie, const wp<hidl::base::V1_0::IBase>& who) override;
@@ -417,6 +425,8 @@
const hardware::camera::common::V1_0::CameraResourceCost mResourceCost;
hardware::camera::common::V1_0::CameraDeviceStatus mStatus;
+ std::map<std::string, hardware::camera::common::V1_0::CameraDeviceStatus>
+ mPhysicalStatus;
sp<ProviderInfo> mParentProvider;
diff --git a/services/camera/libcameraservice/device3/BufferUtils.cpp b/services/camera/libcameraservice/device3/BufferUtils.cpp
new file mode 100644
index 0000000..cc29390
--- /dev/null
+++ b/services/camera/libcameraservice/device3/BufferUtils.cpp
@@ -0,0 +1,277 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera3-BufUtils"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+//#define LOG_NNDEBUG 0 // Per-frame verbose logging
+
+#include <inttypes.h>
+
+#include <utils/Log.h>
+
+#include "device3/BufferUtils.h"
+
+namespace android {
+namespace camera3 {
+
+camera3_buffer_status_t mapHidlBufferStatus(hardware::camera::device::V3_2::BufferStatus status) {
+ using hardware::camera::device::V3_2::BufferStatus;
+
+ switch (status) {
+ case BufferStatus::OK: return CAMERA3_BUFFER_STATUS_OK;
+ case BufferStatus::ERROR: return CAMERA3_BUFFER_STATUS_ERROR;
+ }
+ return CAMERA3_BUFFER_STATUS_ERROR;
+}
+
+void BufferRecords::takeInflightBufferMap(BufferRecords& other) {
+ std::lock_guard<std::mutex> oLock(other.mInflightLock);
+ std::lock_guard<std::mutex> lock(mInflightLock);
+ if (mInflightBufferMap.size() > 0) {
+ ALOGE("%s: inflight map is set in non-empty state!", __FUNCTION__);
+ }
+ mInflightBufferMap = std::move(other.mInflightBufferMap);
+ other.mInflightBufferMap.clear();
+}
+
+void BufferRecords::takeRequestedBufferMap(BufferRecords& other) {
+ std::lock_guard<std::mutex> oLock(other.mRequestedBuffersLock);
+ std::lock_guard<std::mutex> lock(mRequestedBuffersLock);
+ if (mRequestedBufferMap.size() > 0) {
+ ALOGE("%s: requested buffer map is set in non-empty state!", __FUNCTION__);
+ }
+ mRequestedBufferMap = std::move(other.mRequestedBufferMap);
+ other.mRequestedBufferMap.clear();
+}
+
+void BufferRecords::takeBufferCaches(BufferRecords& other, const std::vector<int32_t>& streams) {
+ std::lock_guard<std::mutex> oLock(other.mBufferIdMapLock);
+ std::lock_guard<std::mutex> lock(mBufferIdMapLock);
+ if (mBufferIdMaps.size() > 0) {
+ ALOGE("%s: buffer ID map is set in non-empty state!", __FUNCTION__);
+ }
+ for (auto streamId : streams) {
+ mBufferIdMaps.insert({streamId, std::move(other.mBufferIdMaps.at(streamId))});
+ }
+ other.mBufferIdMaps.clear();
+}
+
+std::pair<bool, uint64_t> BufferRecords::getBufferId(
+ const buffer_handle_t& buf, int streamId) {
+ std::lock_guard<std::mutex> lock(mBufferIdMapLock);
+
+ BufferIdMap& bIdMap = mBufferIdMaps.at(streamId);
+ auto it = bIdMap.find(buf);
+ if (it == bIdMap.end()) {
+ bIdMap[buf] = mNextBufferId++;
+ ALOGV("stream %d now have %zu buffer caches, buf %p",
+ streamId, bIdMap.size(), buf);
+ return std::make_pair(true, mNextBufferId - 1);
+ } else {
+ return std::make_pair(false, it->second);
+ }
+}
+
+void BufferRecords::tryCreateBufferCache(int streamId) {
+ std::lock_guard<std::mutex> lock(mBufferIdMapLock);
+ if (mBufferIdMaps.count(streamId) == 0) {
+ mBufferIdMaps.emplace(streamId, BufferIdMap{});
+ }
+}
+
+void BufferRecords::removeInactiveBufferCaches(const std::set<int32_t>& activeStreams) {
+ std::lock_guard<std::mutex> lock(mBufferIdMapLock);
+ for(auto it = mBufferIdMaps.begin(); it != mBufferIdMaps.end();) {
+ int streamId = it->first;
+ bool active = activeStreams.count(streamId) > 0;
+ if (!active) {
+ it = mBufferIdMaps.erase(it);
+ } else {
+ ++it;
+ }
+ }
+}
+
+uint64_t BufferRecords::removeOneBufferCache(int streamId, const native_handle_t* handle) {
+ std::lock_guard<std::mutex> lock(mBufferIdMapLock);
+ uint64_t bufferId = BUFFER_ID_NO_BUFFER;
+ auto mapIt = mBufferIdMaps.find(streamId);
+ if (mapIt == mBufferIdMaps.end()) {
+ // streamId might be from a deleted stream here
+ ALOGI("%s: stream %d has been removed",
+ __FUNCTION__, streamId);
+ return BUFFER_ID_NO_BUFFER;
+ }
+ BufferIdMap& bIdMap = mapIt->second;
+ auto it = bIdMap.find(handle);
+ if (it == bIdMap.end()) {
+ ALOGW("%s: cannot find buffer %p in stream %d",
+ __FUNCTION__, handle, streamId);
+ return BUFFER_ID_NO_BUFFER;
+ } else {
+ bufferId = it->second;
+ bIdMap.erase(it);
+ ALOGV("%s: stream %d now have %zu buffer caches after removing buf %p",
+ __FUNCTION__, streamId, bIdMap.size(), handle);
+ }
+ return bufferId;
+}
+
+std::vector<uint64_t> BufferRecords::clearBufferCaches(int streamId) {
+ std::lock_guard<std::mutex> lock(mBufferIdMapLock);
+ std::vector<uint64_t> ret;
+ auto mapIt = mBufferIdMaps.find(streamId);
+ if (mapIt == mBufferIdMaps.end()) {
+ ALOGE("%s: streamId %d not found!", __FUNCTION__, streamId);
+ return ret;
+ }
+ BufferIdMap& bIdMap = mapIt->second;
+ ret.reserve(bIdMap.size());
+ for (const auto& it : bIdMap) {
+ ret.push_back(it.second);
+ }
+ bIdMap.clear();
+ return ret;
+}
+
+bool BufferRecords::isStreamCached(int streamId) {
+ std::lock_guard<std::mutex> lock(mBufferIdMapLock);
+ return mBufferIdMaps.find(streamId) != mBufferIdMaps.end();
+}
+
+bool BufferRecords::verifyBufferIds(
+ int32_t streamId, std::vector<uint64_t>& bufIds) {
+ std::lock_guard<std::mutex> lock(mBufferIdMapLock);
+ camera3::BufferIdMap& bIdMap = mBufferIdMaps.at(streamId);
+ if (bIdMap.size() != bufIds.size()) {
+ ALOGE("%s: stream ID %d buffer cache number mismatch: %zu/%zu (service/HAL)",
+ __FUNCTION__, streamId, bIdMap.size(), bufIds.size());
+ return false;
+ }
+ std::vector<uint64_t> internalBufIds;
+ internalBufIds.reserve(bIdMap.size());
+ for (const auto& pair : bIdMap) {
+ internalBufIds.push_back(pair.second);
+ }
+ std::sort(bufIds.begin(), bufIds.end());
+ std::sort(internalBufIds.begin(), internalBufIds.end());
+ for (size_t i = 0; i < bufIds.size(); i++) {
+ if (bufIds[i] != internalBufIds[i]) {
+ ALOGE("%s: buffer cache mismatch! Service %" PRIu64 ", HAL %" PRIu64,
+ __FUNCTION__, internalBufIds[i], bufIds[i]);
+ return false;
+ }
+ }
+ return true;
+}
+
+void BufferRecords::getInflightBufferKeys(
+ std::vector<std::pair<int32_t, int32_t>>* out) {
+ std::lock_guard<std::mutex> lock(mInflightLock);
+ out->clear();
+ out->reserve(mInflightBufferMap.size());
+ for (auto& pair : mInflightBufferMap) {
+ uint64_t key = pair.first;
+ int32_t streamId = key & 0xFFFFFFFF;
+ int32_t frameNumber = (key >> 32) & 0xFFFFFFFF;
+ out->push_back(std::make_pair(frameNumber, streamId));
+ }
+ return;
+}
+
+status_t BufferRecords::pushInflightBuffer(
+ int32_t frameNumber, int32_t streamId, buffer_handle_t *buffer) {
+ std::lock_guard<std::mutex> lock(mInflightLock);
+ uint64_t key = static_cast<uint64_t>(frameNumber) << 32 | static_cast<uint64_t>(streamId);
+ mInflightBufferMap[key] = buffer;
+ return OK;
+}
+
+status_t BufferRecords::popInflightBuffer(
+ int32_t frameNumber, int32_t streamId,
+ /*out*/ buffer_handle_t **buffer) {
+ std::lock_guard<std::mutex> lock(mInflightLock);
+
+ uint64_t key = static_cast<uint64_t>(frameNumber) << 32 | static_cast<uint64_t>(streamId);
+ auto it = mInflightBufferMap.find(key);
+ if (it == mInflightBufferMap.end()) return NAME_NOT_FOUND;
+ if (buffer != nullptr) {
+ *buffer = it->second;
+ }
+ mInflightBufferMap.erase(it);
+ return OK;
+}
+
+void BufferRecords::popInflightBuffers(
+ const std::vector<std::pair<int32_t, int32_t>>& buffers) {
+ for (const auto& pair : buffers) {
+ int32_t frameNumber = pair.first;
+ int32_t streamId = pair.second;
+ popInflightBuffer(frameNumber, streamId, nullptr);
+ }
+}
+
+status_t BufferRecords::pushInflightRequestBuffer(
+ uint64_t bufferId, buffer_handle_t* buf, int32_t streamId) {
+ std::lock_guard<std::mutex> lock(mRequestedBuffersLock);
+ auto pair = mRequestedBufferMap.insert({bufferId, {streamId, buf}});
+ if (!pair.second) {
+ ALOGE("%s: bufId %" PRIu64 " is already inflight!",
+ __FUNCTION__, bufferId);
+ return BAD_VALUE;
+ }
+ return OK;
+}
+
+// Find and pop a buffer_handle_t based on bufferId
+status_t BufferRecords::popInflightRequestBuffer(
+ uint64_t bufferId,
+ /*out*/ buffer_handle_t** buffer,
+ /*optional out*/ int32_t* streamId) {
+ if (buffer == nullptr) {
+ ALOGE("%s: buffer (%p) must not be null", __FUNCTION__, buffer);
+ return BAD_VALUE;
+ }
+ std::lock_guard<std::mutex> lock(mRequestedBuffersLock);
+ auto it = mRequestedBufferMap.find(bufferId);
+ if (it == mRequestedBufferMap.end()) {
+ ALOGE("%s: bufId %" PRIu64 " is not inflight!",
+ __FUNCTION__, bufferId);
+ return BAD_VALUE;
+ }
+ *buffer = it->second.second;
+ if (streamId != nullptr) {
+ *streamId = it->second.first;
+ }
+ mRequestedBufferMap.erase(it);
+ return OK;
+}
+
+void BufferRecords::getInflightRequestBufferKeys(
+ std::vector<uint64_t>* out) {
+ std::lock_guard<std::mutex> lock(mRequestedBuffersLock);
+ out->clear();
+ out->reserve(mRequestedBufferMap.size());
+ for (auto& pair : mRequestedBufferMap) {
+ out->push_back(pair.first);
+ }
+ return;
+}
+
+
+} // camera3
+} // namespace android
diff --git a/services/camera/libcameraservice/device3/BufferUtils.h b/services/camera/libcameraservice/device3/BufferUtils.h
new file mode 100644
index 0000000..452a908
--- /dev/null
+++ b/services/camera/libcameraservice/device3/BufferUtils.h
@@ -0,0 +1,167 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA3_BUFFER_UTILS_H
+#define ANDROID_SERVERS_CAMERA3_BUFFER_UTILS_H
+
+#include <unordered_map>
+#include <mutex>
+#include <set>
+
+#include <cutils/native_handle.h>
+
+#include <android/hardware/camera/device/3.2/ICameraDevice.h>
+
+// TODO: remove legacy camera3.h references
+#include "hardware/camera3.h"
+
+#include <device3/Camera3OutputInterface.h>
+
+namespace android {
+
+namespace camera3 {
+
+ struct BufferHasher {
+ size_t operator()(const buffer_handle_t& buf) const {
+ if (buf == nullptr)
+ return 0;
+
+ size_t result = 1;
+ result = 31 * result + buf->numFds;
+ for (int i = 0; i < buf->numFds; i++) {
+ result = 31 * result + buf->data[i];
+ }
+ return result;
+ }
+ };
+
+ struct BufferComparator {
+ bool operator()(const buffer_handle_t& buf1, const buffer_handle_t& buf2) const {
+ if (buf1->numFds == buf2->numFds) {
+ for (int i = 0; i < buf1->numFds; i++) {
+ if (buf1->data[i] != buf2->data[i]) {
+ return false;
+ }
+ }
+ return true;
+ }
+ return false;
+ }
+ };
+
+ // Per stream buffer native handle -> bufId map
+ typedef std::unordered_map<const buffer_handle_t, uint64_t,
+ BufferHasher, BufferComparator> BufferIdMap;
+
+ // streamId -> BufferIdMap
+ typedef std::unordered_map<int, BufferIdMap> BufferIdMaps;
+
+ // Map of inflight buffers sent along in capture requests.
+ // Key is composed by (frameNumber << 32 | streamId)
+ typedef std::unordered_map<uint64_t, buffer_handle_t*> InflightBufferMap;
+
+ // Map of inflight buffers dealt by requestStreamBuffers API
+ typedef std::unordered_map<uint64_t, std::pair<int32_t, buffer_handle_t*>> RequestedBufferMap;
+
+ // A struct containing all buffer tracking information like inflight buffers
+ // and buffer ID caches
+ class BufferRecords : public BufferRecordsInterface {
+
+ public:
+ BufferRecords() {}
+
+ BufferRecords(BufferRecords&& other) :
+ mBufferIdMaps(other.mBufferIdMaps),
+ mNextBufferId(other.mNextBufferId),
+ mInflightBufferMap(other.mInflightBufferMap),
+ mRequestedBufferMap(other.mRequestedBufferMap) {}
+
+ virtual ~BufferRecords() {}
+
+ // Helper methods to help moving buffer records
+ void takeInflightBufferMap(BufferRecords& other);
+ void takeRequestedBufferMap(BufferRecords& other);
+ void takeBufferCaches(BufferRecords& other, const std::vector<int32_t>& streams);
+
+ // method to extract buffer's unique ID
+ // return pair of (newlySeenBuffer?, bufferId)
+ virtual std::pair<bool, uint64_t> getBufferId(
+ const buffer_handle_t& buf, int streamId) override;
+
+ void tryCreateBufferCache(int streamId);
+
+ void removeInactiveBufferCaches(const std::set<int32_t>& activeStreams);
+
+ // Return the removed buffer ID if input cache is found.
+ // Otherwise return BUFFER_ID_NO_BUFFER
+ uint64_t removeOneBufferCache(int streamId, const native_handle_t* handle);
+
+ // Clear all caches for input stream, but do not remove the stream
+ // Removed buffers' ID are returned
+ std::vector<uint64_t> clearBufferCaches(int streamId);
+
+ bool isStreamCached(int streamId);
+
+ // Return true if the input caches match what we have; otherwise false
+ bool verifyBufferIds(int32_t streamId, std::vector<uint64_t>& inBufIds);
+
+ // Get a vector of (frameNumber, streamId) pair of currently inflight
+ // buffers
+ void getInflightBufferKeys(std::vector<std::pair<int32_t, int32_t>>* out);
+
+ status_t pushInflightBuffer(int32_t frameNumber, int32_t streamId,
+ buffer_handle_t *buffer);
+
+ // Find a buffer_handle_t based on frame number and stream ID
+ virtual status_t popInflightBuffer(int32_t frameNumber, int32_t streamId,
+ /*out*/ buffer_handle_t **buffer) override;
+
+ // Pop inflight buffers based on pairs of (frameNumber,streamId)
+ void popInflightBuffers(const std::vector<std::pair<int32_t, int32_t>>& buffers);
+
+ // Get a vector of bufferId of currently inflight buffers
+ void getInflightRequestBufferKeys(std::vector<uint64_t>* out);
+
+ // Register a bufId (streamId, buffer_handle_t) to inflight request buffer
+ virtual status_t pushInflightRequestBuffer(
+ uint64_t bufferId, buffer_handle_t* buf, int32_t streamId) override;
+
+ // Find a buffer_handle_t based on bufferId
+ virtual status_t popInflightRequestBuffer(uint64_t bufferId,
+ /*out*/ buffer_handle_t** buffer,
+ /*optional out*/ int32_t* streamId = nullptr) override;
+
+ private:
+ std::mutex mBufferIdMapLock;
+ BufferIdMaps mBufferIdMaps;
+ uint64_t mNextBufferId = 1; // 0 means no buffer
+
+ std::mutex mInflightLock;
+ InflightBufferMap mInflightBufferMap;
+
+ std::mutex mRequestedBuffersLock;
+ RequestedBufferMap mRequestedBufferMap;
+ }; // class BufferRecords
+
+ static const uint64_t BUFFER_ID_NO_BUFFER = 0;
+
+ camera3_buffer_status_t mapHidlBufferStatus(
+ hardware::camera::device::V3_2::BufferStatus status);
+} // namespace camera3
+
+} // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 92ba5e4..23e26ce 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -58,10 +58,10 @@
#include "device3/Camera3InputStream.h"
#include "device3/Camera3DummyStream.h"
#include "device3/Camera3SharedOutputStream.h"
-#include "device3/Camera3OfflineSession.h"
#include "CameraService.h"
#include "utils/CameraThreadState.h"
+#include <algorithm>
#include <tuple>
using namespace android::camera3;
@@ -574,14 +574,6 @@
return OK;
}
-camera3_buffer_status_t Camera3Device::mapHidlBufferStatus(BufferStatus status) {
- switch (status) {
- case BufferStatus::OK: return CAMERA3_BUFFER_STATUS_OK;
- case BufferStatus::ERROR: return CAMERA3_BUFFER_STATUS_ERROR;
- }
- return CAMERA3_BUFFER_STATUS_ERROR;
-}
-
int Camera3Device::mapToFrameworkFormat(
hardware::graphics::common::V1_0::PixelFormat pixelFormat) {
return static_cast<uint32_t>(pixelFormat);
@@ -1001,230 +993,18 @@
hardware::Return<void> Camera3Device::requestStreamBuffers(
const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& bufReqs,
requestStreamBuffers_cb _hidl_cb) {
- using hardware::camera::device::V3_5::BufferRequestStatus;
- using hardware::camera::device::V3_5::StreamBufferRet;
- using hardware::camera::device::V3_5::StreamBufferRequestError;
-
- std::lock_guard<std::mutex> lock(mRequestBufferInterfaceLock);
-
- hardware::hidl_vec<StreamBufferRet> bufRets;
- if (!mUseHalBufManager) {
- ALOGE("%s: Camera %s does not support HAL buffer management",
- __FUNCTION__, mId.string());
- _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
- return hardware::Void();
- }
-
- SortedVector<int32_t> streamIds;
- ssize_t sz = streamIds.setCapacity(bufReqs.size());
- if (sz < 0 || static_cast<size_t>(sz) != bufReqs.size()) {
- ALOGE("%s: failed to allocate memory for %zu buffer requests",
- __FUNCTION__, bufReqs.size());
- _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
- return hardware::Void();
- }
-
- if (bufReqs.size() > mOutputStreams.size()) {
- ALOGE("%s: too many buffer requests (%zu > # of output streams %zu)",
- __FUNCTION__, bufReqs.size(), mOutputStreams.size());
- _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
- return hardware::Void();
- }
-
- // Check for repeated streamId
- for (const auto& bufReq : bufReqs) {
- if (streamIds.indexOf(bufReq.streamId) != NAME_NOT_FOUND) {
- ALOGE("%s: Stream %d appear multiple times in buffer requests",
- __FUNCTION__, bufReq.streamId);
- _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
- return hardware::Void();
- }
- streamIds.add(bufReq.streamId);
- }
-
- if (!mRequestBufferSM.startRequestBuffer()) {
- ALOGE("%s: request buffer disallowed while camera service is configuring",
- __FUNCTION__);
- _hidl_cb(BufferRequestStatus::FAILED_CONFIGURING, bufRets);
- return hardware::Void();
- }
-
- bufRets.resize(bufReqs.size());
-
- bool allReqsSucceeds = true;
- bool oneReqSucceeds = false;
- for (size_t i = 0; i < bufReqs.size(); i++) {
- const auto& bufReq = bufReqs[i];
- auto& bufRet = bufRets[i];
- int32_t streamId = bufReq.streamId;
- sp<Camera3OutputStreamInterface> outputStream = mOutputStreams.get(streamId);
- if (outputStream == nullptr) {
- ALOGE("%s: Output stream id %d not found!", __FUNCTION__, streamId);
- hardware::hidl_vec<StreamBufferRet> emptyBufRets;
- _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, emptyBufRets);
- mRequestBufferSM.endRequestBuffer();
- return hardware::Void();
- }
-
- if (outputStream->isAbandoned()) {
- bufRet.val.error(StreamBufferRequestError::STREAM_DISCONNECTED);
- allReqsSucceeds = false;
- continue;
- }
-
- bufRet.streamId = streamId;
- size_t handOutBufferCount = outputStream->getOutstandingBuffersCount();
- uint32_t numBuffersRequested = bufReq.numBuffersRequested;
- size_t totalHandout = handOutBufferCount + numBuffersRequested;
- uint32_t maxBuffers = outputStream->asHalStream()->max_buffers;
- if (totalHandout > maxBuffers) {
- // Not able to allocate enough buffer. Exit early for this stream
- ALOGE("%s: request too much buffers for stream %d: at HAL: %zu + requesting: %d"
- " > max: %d", __FUNCTION__, streamId, handOutBufferCount,
- numBuffersRequested, maxBuffers);
- bufRet.val.error(StreamBufferRequestError::MAX_BUFFER_EXCEEDED);
- allReqsSucceeds = false;
- continue;
- }
-
- hardware::hidl_vec<StreamBuffer> tmpRetBuffers(numBuffersRequested);
- bool currentReqSucceeds = true;
- std::vector<camera3_stream_buffer_t> streamBuffers(numBuffersRequested);
- size_t numAllocatedBuffers = 0;
- size_t numPushedInflightBuffers = 0;
- for (size_t b = 0; b < numBuffersRequested; b++) {
- camera3_stream_buffer_t& sb = streamBuffers[b];
- // Since this method can run concurrently with request thread
- // We need to update the wait duration everytime we call getbuffer
- nsecs_t waitDuration = kBaseGetBufferWait + getExpectedInFlightDuration();
- status_t res = outputStream->getBuffer(&sb, waitDuration);
- if (res != OK) {
- if (res == NO_INIT || res == DEAD_OBJECT) {
- ALOGV("%s: Can't get output buffer for stream %d: %s (%d)",
- __FUNCTION__, streamId, strerror(-res), res);
- bufRet.val.error(StreamBufferRequestError::STREAM_DISCONNECTED);
- } else {
- ALOGE("%s: Can't get output buffer for stream %d: %s (%d)",
- __FUNCTION__, streamId, strerror(-res), res);
- if (res == TIMED_OUT || res == NO_MEMORY) {
- bufRet.val.error(StreamBufferRequestError::NO_BUFFER_AVAILABLE);
- } else {
- bufRet.val.error(StreamBufferRequestError::UNKNOWN_ERROR);
- }
- }
- currentReqSucceeds = false;
- break;
- }
- numAllocatedBuffers++;
-
- buffer_handle_t *buffer = sb.buffer;
- auto pair = mInterface->getBufferId(*buffer, streamId);
- bool isNewBuffer = pair.first;
- uint64_t bufferId = pair.second;
- StreamBuffer& hBuf = tmpRetBuffers[b];
-
- hBuf.streamId = streamId;
- hBuf.bufferId = bufferId;
- hBuf.buffer = (isNewBuffer) ? *buffer : nullptr;
- hBuf.status = BufferStatus::OK;
- hBuf.releaseFence = nullptr;
-
- native_handle_t *acquireFence = nullptr;
- if (sb.acquire_fence != -1) {
- acquireFence = native_handle_create(1,0);
- acquireFence->data[0] = sb.acquire_fence;
- }
- hBuf.acquireFence.setTo(acquireFence, /*shouldOwn*/true);
- hBuf.releaseFence = nullptr;
-
- res = mInterface->pushInflightRequestBuffer(bufferId, buffer, streamId);
- if (res != OK) {
- ALOGE("%s: Can't get register request buffers for stream %d: %s (%d)",
- __FUNCTION__, streamId, strerror(-res), res);
- bufRet.val.error(StreamBufferRequestError::UNKNOWN_ERROR);
- currentReqSucceeds = false;
- break;
- }
- numPushedInflightBuffers++;
- }
- if (currentReqSucceeds) {
- bufRet.val.buffers(std::move(tmpRetBuffers));
- oneReqSucceeds = true;
- } else {
- allReqsSucceeds = false;
- for (size_t b = 0; b < numPushedInflightBuffers; b++) {
- StreamBuffer& hBuf = tmpRetBuffers[b];
- buffer_handle_t* buffer;
- status_t res = mInterface->popInflightRequestBuffer(hBuf.bufferId, &buffer);
- if (res != OK) {
- SET_ERR("%s: popInflightRequestBuffer failed for stream %d: %s (%d)",
- __FUNCTION__, streamId, strerror(-res), res);
- }
- }
- for (size_t b = 0; b < numAllocatedBuffers; b++) {
- camera3_stream_buffer_t& sb = streamBuffers[b];
- sb.acquire_fence = -1;
- sb.status = CAMERA3_BUFFER_STATUS_ERROR;
- }
- returnOutputBuffers(streamBuffers.data(), numAllocatedBuffers, 0);
- }
- }
-
- _hidl_cb(allReqsSucceeds ? BufferRequestStatus::OK :
- oneReqSucceeds ? BufferRequestStatus::FAILED_PARTIAL :
- BufferRequestStatus::FAILED_UNKNOWN,
- bufRets);
- mRequestBufferSM.endRequestBuffer();
+ RequestBufferStates states {
+ mId, mRequestBufferInterfaceLock, mUseHalBufManager, mOutputStreams,
+ *this, *mInterface, *this};
+ camera3::requestStreamBuffers(states, bufReqs, _hidl_cb);
return hardware::Void();
}
hardware::Return<void> Camera3Device::returnStreamBuffers(
const hardware::hidl_vec<hardware::camera::device::V3_2::StreamBuffer>& buffers) {
- if (!mUseHalBufManager) {
- ALOGE("%s: Camera %s does not support HAL buffer managerment",
- __FUNCTION__, mId.string());
- return hardware::Void();
- }
-
- for (const auto& buf : buffers) {
- if (buf.bufferId == HalInterface::BUFFER_ID_NO_BUFFER) {
- ALOGE("%s: cannot return a buffer without bufferId", __FUNCTION__);
- continue;
- }
-
- buffer_handle_t* buffer;
- status_t res = mInterface->popInflightRequestBuffer(buf.bufferId, &buffer);
-
- if (res != OK) {
- ALOGE("%s: cannot find in-flight buffer %" PRIu64 " for stream %d",
- __FUNCTION__, buf.bufferId, buf.streamId);
- continue;
- }
-
- camera3_stream_buffer_t streamBuffer;
- streamBuffer.buffer = buffer;
- streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
- streamBuffer.acquire_fence = -1;
- streamBuffer.release_fence = -1;
-
- if (buf.releaseFence == nullptr) {
- streamBuffer.release_fence = -1;
- } else if (buf.releaseFence->numFds == 1) {
- streamBuffer.release_fence = dup(buf.releaseFence->data[0]);
- } else {
- ALOGE("%s: Invalid release fence, fd count is %d, not 1",
- __FUNCTION__, buf.releaseFence->numFds);
- continue;
- }
-
- sp<Camera3StreamInterface> stream = mOutputStreams.get(buf.streamId);
- if (stream == nullptr) {
- ALOGE("%s: Output stream id %d not found!", __FUNCTION__, buf.streamId);
- continue;
- }
- streamBuffer.stream = stream->asHalStream();
- returnOutputBuffers(&streamBuffer, /*size*/1, /*timestamp*/ 0);
- }
+ ReturnBufferStates states {
+ mId, mUseHalBufManager, mOutputStreams, *mInterface};
+ camera3::returnStreamBuffers(states, buffers);
return hardware::Void();
}
@@ -1241,6 +1021,12 @@
ALOGW("%s: received capture result in error state.", __FUNCTION__);
}
+ sp<NotificationListener> listener;
+ {
+ std::lock_guard<std::mutex> l(mOutputLock);
+ listener = mListener.promote();
+ }
+
if (mProcessCaptureResultLock.tryLock() != OK) {
// This should never happen; it indicates a wrong client implementation
// that doesn't follow the contract. But, we can be tolerant here.
@@ -1253,8 +1039,22 @@
return hardware::Void();
}
}
+ CaptureOutputStates states {
+ mId,
+ mInFlightLock, mInFlightMap,
+ mOutputLock, mResultQueue, mResultSignal,
+ mNextShutterFrameNumber,
+ mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
+ mNextResultFrameNumber,
+ mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+ mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+ mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
+ mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mTagMonitor,
+ mInputStream, mOutputStreams, listener, *this, *this, *mInterface
+ };
+
for (const auto& result : results) {
- processOneCaptureResultLocked(result.v3_2, result.physicalCameraMetadata);
+ processOneCaptureResultLocked(states, result.v3_2, result.physicalCameraMetadata);
}
mProcessCaptureResultLock.unlock();
return hardware::Void();
@@ -1277,6 +1077,12 @@
ALOGW("%s: received capture result in error state.", __FUNCTION__);
}
+ sp<NotificationListener> listener;
+ {
+ std::lock_guard<std::mutex> l(mOutputLock);
+ listener = mListener.promote();
+ }
+
if (mProcessCaptureResultLock.tryLock() != OK) {
// This should never happen; it indicates a wrong client implementation
// that doesn't follow the contract. But, we can be tolerant here.
@@ -1289,186 +1095,28 @@
return hardware::Void();
}
}
+
+ CaptureOutputStates states {
+ mId,
+ mInFlightLock, mInFlightMap,
+ mOutputLock, mResultQueue, mResultSignal,
+ mNextShutterFrameNumber,
+ mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
+ mNextResultFrameNumber,
+ mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+ mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+ mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
+ mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mTagMonitor,
+ mInputStream, mOutputStreams, listener, *this, *this, *mInterface
+ };
+
for (const auto& result : results) {
- processOneCaptureResultLocked(result, noPhysMetadata);
+ processOneCaptureResultLocked(states, result, noPhysMetadata);
}
mProcessCaptureResultLock.unlock();
return hardware::Void();
}
-status_t Camera3Device::readOneCameraMetadataLocked(
- uint64_t fmqResultSize, hardware::camera::device::V3_2::CameraMetadata& resultMetadata,
- const hardware::camera::device::V3_2::CameraMetadata& result) {
- if (fmqResultSize > 0) {
- resultMetadata.resize(fmqResultSize);
- if (mResultMetadataQueue == nullptr) {
- return NO_MEMORY; // logged in initialize()
- }
- if (!mResultMetadataQueue->read(resultMetadata.data(), fmqResultSize)) {
- ALOGE("%s: Cannot read camera metadata from fmq, size = %" PRIu64,
- __FUNCTION__, fmqResultSize);
- return INVALID_OPERATION;
- }
- } else {
- resultMetadata.setToExternal(const_cast<uint8_t *>(result.data()),
- result.size());
- }
-
- if (resultMetadata.size() != 0) {
- status_t res;
- const camera_metadata_t* metadata =
- reinterpret_cast<const camera_metadata_t*>(resultMetadata.data());
- size_t expected_metadata_size = resultMetadata.size();
- if ((res = validate_camera_metadata_structure(metadata, &expected_metadata_size)) != OK) {
- ALOGE("%s: Invalid camera metadata received by camera service from HAL: %s (%d)",
- __FUNCTION__, strerror(-res), res);
- return INVALID_OPERATION;
- }
- }
-
- return OK;
-}
-
-void Camera3Device::processOneCaptureResultLocked(
- const hardware::camera::device::V3_2::CaptureResult& result,
- const hardware::hidl_vec<
- hardware::camera::device::V3_4::PhysicalCameraMetadata> physicalCameraMetadata) {
- camera3_capture_result r;
- status_t res;
- r.frame_number = result.frameNumber;
-
- // Read and validate the result metadata.
- hardware::camera::device::V3_2::CameraMetadata resultMetadata;
- res = readOneCameraMetadataLocked(result.fmqResultSize, resultMetadata, result.result);
- if (res != OK) {
- ALOGE("%s: Frame %d: Failed to read capture result metadata",
- __FUNCTION__, result.frameNumber);
- return;
- }
- r.result = reinterpret_cast<const camera_metadata_t*>(resultMetadata.data());
-
- // Read and validate physical camera metadata
- size_t physResultCount = physicalCameraMetadata.size();
- std::vector<const char*> physCamIds(physResultCount);
- std::vector<const camera_metadata_t *> phyCamMetadatas(physResultCount);
- std::vector<hardware::camera::device::V3_2::CameraMetadata> physResultMetadata;
- physResultMetadata.resize(physResultCount);
- for (size_t i = 0; i < physicalCameraMetadata.size(); i++) {
- res = readOneCameraMetadataLocked(physicalCameraMetadata[i].fmqMetadataSize,
- physResultMetadata[i], physicalCameraMetadata[i].metadata);
- if (res != OK) {
- ALOGE("%s: Frame %d: Failed to read capture result metadata for camera %s",
- __FUNCTION__, result.frameNumber,
- physicalCameraMetadata[i].physicalCameraId.c_str());
- return;
- }
- physCamIds[i] = physicalCameraMetadata[i].physicalCameraId.c_str();
- phyCamMetadatas[i] = reinterpret_cast<const camera_metadata_t*>(
- physResultMetadata[i].data());
- }
- r.num_physcam_metadata = physResultCount;
- r.physcam_ids = physCamIds.data();
- r.physcam_metadata = phyCamMetadatas.data();
-
- std::vector<camera3_stream_buffer_t> outputBuffers(result.outputBuffers.size());
- std::vector<buffer_handle_t> outputBufferHandles(result.outputBuffers.size());
- for (size_t i = 0; i < result.outputBuffers.size(); i++) {
- auto& bDst = outputBuffers[i];
- const StreamBuffer &bSrc = result.outputBuffers[i];
-
- sp<Camera3StreamInterface> stream = mOutputStreams.get(bSrc.streamId);
- if (stream == nullptr) {
- ALOGE("%s: Frame %d: Buffer %zu: Invalid output stream id %d",
- __FUNCTION__, result.frameNumber, i, bSrc.streamId);
- return;
- }
- bDst.stream = stream->asHalStream();
-
- bool noBufferReturned = false;
- buffer_handle_t *buffer = nullptr;
- if (mUseHalBufManager) {
- // This is suspicious most of the time but can be correct during flush where HAL
- // has to return capture result before a buffer is requested
- if (bSrc.bufferId == HalInterface::BUFFER_ID_NO_BUFFER) {
- if (bSrc.status == BufferStatus::OK) {
- ALOGE("%s: Frame %d: Buffer %zu: No bufferId for stream %d",
- __FUNCTION__, result.frameNumber, i, bSrc.streamId);
- // Still proceeds so other buffers can be returned
- }
- noBufferReturned = true;
- }
- if (noBufferReturned) {
- res = OK;
- } else {
- res = mInterface->popInflightRequestBuffer(bSrc.bufferId, &buffer);
- }
- } else {
- res = mInterface->popInflightBuffer(result.frameNumber, bSrc.streamId, &buffer);
- }
-
- if (res != OK) {
- ALOGE("%s: Frame %d: Buffer %zu: No in-flight buffer for stream %d",
- __FUNCTION__, result.frameNumber, i, bSrc.streamId);
- return;
- }
-
- bDst.buffer = buffer;
- bDst.status = mapHidlBufferStatus(bSrc.status);
- bDst.acquire_fence = -1;
- if (bSrc.releaseFence == nullptr) {
- bDst.release_fence = -1;
- } else if (bSrc.releaseFence->numFds == 1) {
- if (noBufferReturned) {
- ALOGE("%s: got releaseFence without output buffer!", __FUNCTION__);
- }
- bDst.release_fence = dup(bSrc.releaseFence->data[0]);
- } else {
- ALOGE("%s: Frame %d: Invalid release fence for buffer %zu, fd count is %d, not 1",
- __FUNCTION__, result.frameNumber, i, bSrc.releaseFence->numFds);
- return;
- }
- }
- r.num_output_buffers = outputBuffers.size();
- r.output_buffers = outputBuffers.data();
-
- camera3_stream_buffer_t inputBuffer;
- if (result.inputBuffer.streamId == -1) {
- r.input_buffer = nullptr;
- } else {
- if (mInputStream->getId() != result.inputBuffer.streamId) {
- ALOGE("%s: Frame %d: Invalid input stream id %d", __FUNCTION__,
- result.frameNumber, result.inputBuffer.streamId);
- return;
- }
- inputBuffer.stream = mInputStream->asHalStream();
- buffer_handle_t *buffer;
- res = mInterface->popInflightBuffer(result.frameNumber, result.inputBuffer.streamId,
- &buffer);
- if (res != OK) {
- ALOGE("%s: Frame %d: Input buffer: No in-flight buffer for stream %d",
- __FUNCTION__, result.frameNumber, result.inputBuffer.streamId);
- return;
- }
- inputBuffer.buffer = buffer;
- inputBuffer.status = mapHidlBufferStatus(result.inputBuffer.status);
- inputBuffer.acquire_fence = -1;
- if (result.inputBuffer.releaseFence == nullptr) {
- inputBuffer.release_fence = -1;
- } else if (result.inputBuffer.releaseFence->numFds == 1) {
- inputBuffer.release_fence = dup(result.inputBuffer.releaseFence->data[0]);
- } else {
- ALOGE("%s: Frame %d: Invalid release fence for input buffer, fd count is %d, not 1",
- __FUNCTION__, result.frameNumber, result.inputBuffer.releaseFence->numFds);
- return;
- }
- r.input_buffer = &inputBuffer;
- }
-
- r.partial_result = result.partialResult;
-
- processCaptureResult(&r);
-}
-
hardware::Return<void> Camera3Device::notify(
const hardware::hidl_vec<hardware::camera::device::V3_2::NotifyMsg>& msgs) {
// Ideally we should grab mLock, but that can lead to deadlock, and
@@ -1481,55 +1129,31 @@
ALOGW("%s: received notify message in error state.", __FUNCTION__);
}
+ sp<NotificationListener> listener;
+ {
+ std::lock_guard<std::mutex> l(mOutputLock);
+ listener = mListener.promote();
+ }
+
+ CaptureOutputStates states {
+ mId,
+ mInFlightLock, mInFlightMap,
+ mOutputLock, mResultQueue, mResultSignal,
+ mNextShutterFrameNumber,
+ mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
+ mNextResultFrameNumber,
+ mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+ mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+ mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
+ mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mTagMonitor,
+ mInputStream, mOutputStreams, listener, *this, *this, *mInterface
+ };
for (const auto& msg : msgs) {
- notify(msg);
+ camera3::notify(states, msg);
}
return hardware::Void();
}
-void Camera3Device::notify(
- const hardware::camera::device::V3_2::NotifyMsg& msg) {
-
- camera3_notify_msg m;
- switch (msg.type) {
- case MsgType::ERROR:
- m.type = CAMERA3_MSG_ERROR;
- m.message.error.frame_number = msg.msg.error.frameNumber;
- if (msg.msg.error.errorStreamId >= 0) {
- sp<Camera3StreamInterface> stream = mOutputStreams.get(msg.msg.error.errorStreamId);
- if (stream == nullptr) {
- ALOGE("%s: Frame %d: Invalid error stream id %d", __FUNCTION__,
- m.message.error.frame_number, msg.msg.error.errorStreamId);
- return;
- }
- m.message.error.error_stream = stream->asHalStream();
- } else {
- m.message.error.error_stream = nullptr;
- }
- switch (msg.msg.error.errorCode) {
- case ErrorCode::ERROR_DEVICE:
- m.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
- break;
- case ErrorCode::ERROR_REQUEST:
- m.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
- break;
- case ErrorCode::ERROR_RESULT:
- m.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
- break;
- case ErrorCode::ERROR_BUFFER:
- m.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
- break;
- }
- break;
- case MsgType::SHUTTER:
- m.type = CAMERA3_MSG_SHUTTER;
- m.message.shutter.frame_number = msg.msg.shutter.frameNumber;
- m.message.shutter.timestamp = msg.msg.shutter.timestamp;
- break;
- }
- notify(&m);
-}
-
status_t Camera3Device::captureList(const List<const PhysicalCameraSettingsList> &requestsList,
const std::list<const SurfaceMap> &surfaceMaps,
int64_t *lastFrameNumber) {
@@ -1683,56 +1307,6 @@
return OK;
}
-status_t Camera3Device::StreamSet::add(
- int streamId, sp<camera3::Camera3OutputStreamInterface> stream) {
- if (stream == nullptr) {
- ALOGE("%s: cannot add null stream", __FUNCTION__);
- return BAD_VALUE;
- }
- std::lock_guard<std::mutex> lock(mLock);
- return mData.add(streamId, stream);
-}
-
-ssize_t Camera3Device::StreamSet::remove(int streamId) {
- std::lock_guard<std::mutex> lock(mLock);
- return mData.removeItem(streamId);
-}
-
-sp<camera3::Camera3OutputStreamInterface>
-Camera3Device::StreamSet::get(int streamId) {
- std::lock_guard<std::mutex> lock(mLock);
- ssize_t idx = mData.indexOfKey(streamId);
- if (idx == NAME_NOT_FOUND) {
- return nullptr;
- }
- return mData.editValueAt(idx);
-}
-
-sp<camera3::Camera3OutputStreamInterface>
-Camera3Device::StreamSet::operator[] (size_t index) {
- std::lock_guard<std::mutex> lock(mLock);
- return mData.editValueAt(index);
-}
-
-size_t Camera3Device::StreamSet::size() const {
- std::lock_guard<std::mutex> lock(mLock);
- return mData.size();
-}
-
-void Camera3Device::StreamSet::clear() {
- std::lock_guard<std::mutex> lock(mLock);
- return mData.clear();
-}
-
-std::vector<int> Camera3Device::StreamSet::getStreamIds() {
- std::lock_guard<std::mutex> lock(mLock);
- std::vector<int> streamIds(mData.size());
- for (size_t i = 0; i < mData.size(); i++) {
- streamIds[i] = mData.keyAt(i);
- }
- return streamIds;
-}
-
status_t Camera3Device::createStream(sp<Surface> consumer,
uint32_t width, uint32_t height, int format,
android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
@@ -2296,7 +1870,7 @@
status_t Camera3Device::setNotifyCallback(wp<NotificationListener> listener) {
ATRACE_CALL();
- Mutex::Autolock l(mOutputLock);
+ std::lock_guard<std::mutex> l(mOutputLock);
if (listener != NULL && mListener != NULL) {
ALOGW("%s: Replacing old callback listener", __FUNCTION__);
@@ -2314,17 +1888,12 @@
status_t Camera3Device::waitForNextFrame(nsecs_t timeout) {
ATRACE_CALL();
- status_t res;
- Mutex::Autolock l(mOutputLock);
+ std::unique_lock<std::mutex> l(mOutputLock);
while (mResultQueue.empty()) {
- res = mResultSignal.waitRelative(mOutputLock, timeout);
- if (res == TIMED_OUT) {
- return res;
- } else if (res != OK) {
- ALOGW("%s: Camera %s: No frame in %" PRId64 " ns: %s (%d)",
- __FUNCTION__, mId.string(), timeout, strerror(-res), res);
- return res;
+ auto st = mResultSignal.wait_for(l, std::chrono::nanoseconds(timeout));
+ if (st == std::cv_status::timeout) {
+ return TIMED_OUT;
}
}
return OK;
@@ -2332,7 +1901,7 @@
status_t Camera3Device::getNextResult(CaptureResult *frame) {
ATRACE_CALL();
- Mutex::Autolock l(mOutputLock);
+ std::lock_guard<std::mutex> l(mOutputLock);
if (mResultQueue.empty()) {
return NOT_ENOUGH_DATA;
@@ -2528,7 +2097,7 @@
sp<NotificationListener> listener;
{
- Mutex::Autolock l(mOutputLock);
+ std::lock_guard<std::mutex> l(mOutputLock);
listener = mListener.promote();
}
if (idle && listener != NULL) {
@@ -2727,17 +2296,6 @@
return newRequest;
}
-bool Camera3Device::isOpaqueInputSizeSupported(uint32_t width, uint32_t height) {
- for (uint32_t i = 0; i < mSupportedOpaqueInputSizes.size(); i++) {
- Size size = mSupportedOpaqueInputSizes[i];
- if (size.width == width && size.height == height) {
- return true;
- }
- }
-
- return false;
-}
-
void Camera3Device::cancelStreamsConfigurationLocked() {
int res = OK;
if (mInputStream != NULL && mInputStream->isConfiguring()) {
@@ -3175,7 +2733,7 @@
bool isZslCapture, const std::set<std::string>& cameraIdsWithZoom,
const SurfaceMap& outputSurfaces) {
ATRACE_CALL();
- Mutex::Autolock l(mInFlightLock);
+ std::lock_guard<std::mutex> l(mInFlightLock);
ssize_t res;
res = mInFlightMap.add(frameNumber, InFlightRequest(numBuffers, resultExtras, hasInput,
@@ -3196,79 +2754,7 @@
return OK;
}
-void Camera3Device::returnOutputBuffers(
- const camera3_stream_buffer_t *outputBuffers, size_t numBuffers,
- nsecs_t timestamp, bool timestampIncreasing,
- const SurfaceMap& outputSurfaces,
- const CaptureResultExtras &inResultExtras) {
-
- for (size_t i = 0; i < numBuffers; i++)
- {
- if (outputBuffers[i].buffer == nullptr) {
- if (!mUseHalBufManager) {
- // With HAL buffer management API, HAL sometimes will have to return buffers that
- // has not got a output buffer handle filled yet. This is though illegal if HAL
- // buffer management API is not being used.
- ALOGE("%s: cannot return a null buffer!", __FUNCTION__);
- }
- continue;
- }
-
- Camera3StreamInterface *stream = Camera3Stream::cast(outputBuffers[i].stream);
- int streamId = stream->getId();
- const auto& it = outputSurfaces.find(streamId);
- status_t res = OK;
- if (it != outputSurfaces.end()) {
- res = stream->returnBuffer(
- outputBuffers[i], timestamp, timestampIncreasing, it->second,
- inResultExtras.frameNumber);
- } else {
- res = stream->returnBuffer(
- outputBuffers[i], timestamp, timestampIncreasing, std::vector<size_t> (),
- inResultExtras.frameNumber);
- }
-
- // Note: stream may be deallocated at this point, if this buffer was
- // the last reference to it.
- if (res == NO_INIT || res == DEAD_OBJECT) {
- ALOGV("Can't return buffer to its stream: %s (%d)", strerror(-res), res);
- } else if (res != OK) {
- ALOGE("Can't return buffer to its stream: %s (%d)", strerror(-res), res);
- }
-
- // Long processing consumers can cause returnBuffer timeout for shared stream
- // If that happens, cancel the buffer and send a buffer error to client
- if (it != outputSurfaces.end() && res == TIMED_OUT &&
- outputBuffers[i].status == CAMERA3_BUFFER_STATUS_OK) {
- // cancel the buffer
- camera3_stream_buffer_t sb = outputBuffers[i];
- sb.status = CAMERA3_BUFFER_STATUS_ERROR;
- stream->returnBuffer(sb, /*timestamp*/0, timestampIncreasing, std::vector<size_t> (),
- inResultExtras.frameNumber);
-
- // notify client buffer error
- sp<NotificationListener> listener;
- {
- Mutex::Autolock l(mOutputLock);
- listener = mListener.promote();
- }
-
- if (listener != nullptr) {
- CaptureResultExtras extras = inResultExtras;
- extras.errorStreamId = streamId;
- listener->notifyError(
- hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER,
- extras);
- }
- }
- }
-}
-
-void Camera3Device::removeInFlightMapEntryLocked(int idx) {
- ATRACE_CALL();
- nsecs_t duration = mInFlightMap.valueAt(idx).maxExpectedDuration;
- mInFlightMap.removeItemsAt(idx, 1);
-
+void Camera3Device::onInflightEntryRemovedLocked(nsecs_t duration) {
// Indicate idle inFlightMap to the status tracker
if (mInFlightMap.size() == 0) {
mRequestBufferSM.onInflightMapEmpty();
@@ -3282,50 +2768,7 @@
mExpectedInflightDuration -= duration;
}
-void Camera3Device::removeInFlightRequestIfReadyLocked(int idx) {
-
- const InFlightRequest &request = mInFlightMap.valueAt(idx);
- const uint32_t frameNumber = mInFlightMap.keyAt(idx);
-
- nsecs_t sensorTimestamp = request.sensorTimestamp;
- nsecs_t shutterTimestamp = request.shutterTimestamp;
-
- // Check if it's okay to remove the request from InFlightMap:
- // In the case of a successful request:
- // all input and output buffers, all result metadata, shutter callback
- // arrived.
- // In the case of a unsuccessful request:
- // all input and output buffers arrived.
- if (request.numBuffersLeft == 0 &&
- (request.skipResultMetadata ||
- (request.haveResultMetadata && shutterTimestamp != 0))) {
- if (request.stillCapture) {
- ATRACE_ASYNC_END("still capture", frameNumber);
- }
-
- ATRACE_ASYNC_END("frame capture", frameNumber);
-
- // Sanity check - if sensor timestamp matches shutter timestamp in the
- // case of request having callback.
- if (request.hasCallback && request.requestStatus == OK &&
- sensorTimestamp != shutterTimestamp) {
- SET_ERR("sensor timestamp (%" PRId64
- ") for frame %d doesn't match shutter timestamp (%" PRId64 ")",
- sensorTimestamp, frameNumber, shutterTimestamp);
- }
-
- // for an unsuccessful request, it may have pending output buffers to
- // return.
- assert(request.requestStatus != OK ||
- request.pendingOutputBuffers.size() == 0);
- returnOutputBuffers(request.pendingOutputBuffers.array(),
- request.pendingOutputBuffers.size(), 0, /*timestampIncreasing*/true,
- request.outputSurfaces, request.resultExtras);
-
- removeInFlightMapEntryLocked(idx);
- ALOGVV("%s: removed frame %d from InFlightMap", __FUNCTION__, frameNumber);
- }
-
+void Camera3Device::checkInflightMapLengthLocked() {
// Sanity check - if we have too many in-flight frames with long total inflight duration,
// something has likely gone wrong. This might still be legit only if application send in
// a long burst of long exposure requests.
@@ -3342,734 +2785,32 @@
}
}
+void Camera3Device::onInflightMapFlushedLocked() {
+ mExpectedInflightDuration = 0;
+}
+
+void Camera3Device::removeInFlightMapEntryLocked(int idx) {
+ ATRACE_CALL();
+ nsecs_t duration = mInFlightMap.valueAt(idx).maxExpectedDuration;
+ mInFlightMap.removeItemsAt(idx, 1);
+
+ onInflightEntryRemovedLocked(duration);
+}
+
+
void Camera3Device::flushInflightRequests() {
ATRACE_CALL();
- { // First return buffers cached in mInFlightMap
- Mutex::Autolock l(mInFlightLock);
- for (size_t idx = 0; idx < mInFlightMap.size(); idx++) {
- const InFlightRequest &request = mInFlightMap.valueAt(idx);
- returnOutputBuffers(request.pendingOutputBuffers.array(),
- request.pendingOutputBuffers.size(), 0,
- /*timestampIncreasing*/true, request.outputSurfaces,
- request.resultExtras);
- }
- mInFlightMap.clear();
- mExpectedInflightDuration = 0;
- }
-
- // Then return all inflight buffers not returned by HAL
- std::vector<std::pair<int32_t, int32_t>> inflightKeys;
- mInterface->getInflightBufferKeys(&inflightKeys);
-
- // Inflight buffers for HAL buffer manager
- std::vector<uint64_t> inflightRequestBufferKeys;
- mInterface->getInflightRequestBufferKeys(&inflightRequestBufferKeys);
-
- // (streamId, frameNumber, buffer_handle_t*) tuple for all inflight buffers.
- // frameNumber will be -1 for buffers from HAL buffer manager
- std::vector<std::tuple<int32_t, int32_t, buffer_handle_t*>> inflightBuffers;
- inflightBuffers.reserve(inflightKeys.size() + inflightRequestBufferKeys.size());
-
- for (auto& pair : inflightKeys) {
- int32_t frameNumber = pair.first;
- int32_t streamId = pair.second;
- buffer_handle_t* buffer;
- status_t res = mInterface->popInflightBuffer(frameNumber, streamId, &buffer);
- if (res != OK) {
- ALOGE("%s: Frame %d: No in-flight buffer for stream %d",
- __FUNCTION__, frameNumber, streamId);
- continue;
- }
- inflightBuffers.push_back(std::make_tuple(streamId, frameNumber, buffer));
- }
-
- for (auto& bufferId : inflightRequestBufferKeys) {
- int32_t streamId = -1;
- buffer_handle_t* buffer = nullptr;
- status_t res = mInterface->popInflightRequestBuffer(bufferId, &buffer, &streamId);
- if (res != OK) {
- ALOGE("%s: cannot find in-flight buffer %" PRIu64, __FUNCTION__, bufferId);
- continue;
- }
- inflightBuffers.push_back(std::make_tuple(streamId, /*frameNumber*/-1, buffer));
- }
-
- int32_t inputStreamId = (mInputStream != nullptr) ? mInputStream->getId() : -1;
- for (auto& tuple : inflightBuffers) {
- status_t res = OK;
- int32_t streamId = std::get<0>(tuple);
- int32_t frameNumber = std::get<1>(tuple);
- buffer_handle_t* buffer = std::get<2>(tuple);
-
- camera3_stream_buffer_t streamBuffer;
- streamBuffer.buffer = buffer;
- streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
- streamBuffer.acquire_fence = -1;
- streamBuffer.release_fence = -1;
-
- // First check if the buffer belongs to deleted stream
- bool streamDeleted = false;
- for (auto& stream : mDeletedStreams) {
- if (streamId == stream->getId()) {
- streamDeleted = true;
- // Return buffer to deleted stream
- camera3_stream* halStream = stream->asHalStream();
- streamBuffer.stream = halStream;
- switch (halStream->stream_type) {
- case CAMERA3_STREAM_OUTPUT:
- res = stream->returnBuffer(streamBuffer, /*timestamp*/ 0,
- /*timestampIncreasing*/true, std::vector<size_t> (), frameNumber);
- if (res != OK) {
- ALOGE("%s: Can't return output buffer for frame %d to"
- " stream %d: %s (%d)", __FUNCTION__,
- frameNumber, streamId, strerror(-res), res);
- }
- break;
- case CAMERA3_STREAM_INPUT:
- res = stream->returnInputBuffer(streamBuffer);
- if (res != OK) {
- ALOGE("%s: Can't return input buffer for frame %d to"
- " stream %d: %s (%d)", __FUNCTION__,
- frameNumber, streamId, strerror(-res), res);
- }
- break;
- default: // Bi-direcitonal stream is deprecated
- ALOGE("%s: stream %d has unknown stream type %d",
- __FUNCTION__, streamId, halStream->stream_type);
- break;
- }
- break;
- }
- }
- if (streamDeleted) {
- continue;
- }
-
- // Then check against configured streams
- if (streamId == inputStreamId) {
- streamBuffer.stream = mInputStream->asHalStream();
- res = mInputStream->returnInputBuffer(streamBuffer);
- if (res != OK) {
- ALOGE("%s: Can't return input buffer for frame %d to"
- " stream %d: %s (%d)", __FUNCTION__,
- frameNumber, streamId, strerror(-res), res);
- }
- } else {
- sp<Camera3StreamInterface> stream = mOutputStreams.get(streamId);
- if (stream == nullptr) {
- ALOGE("%s: Output stream id %d not found!", __FUNCTION__, streamId);
- continue;
- }
- streamBuffer.stream = stream->asHalStream();
- returnOutputBuffers(&streamBuffer, /*size*/1, /*timestamp*/ 0);
- }
- }
-}
-
-void Camera3Device::insertResultLocked(CaptureResult *result,
- uint32_t frameNumber) {
- if (result == nullptr) return;
-
- camera_metadata_t *meta = const_cast<camera_metadata_t *>(
- result->mMetadata.getAndLock());
- set_camera_metadata_vendor_id(meta, mVendorTagId);
- result->mMetadata.unlock(meta);
-
- if (result->mMetadata.update(ANDROID_REQUEST_FRAME_COUNT,
- (int32_t*)&frameNumber, 1) != OK) {
- SET_ERR("Failed to set frame number %d in metadata", frameNumber);
- return;
- }
-
- if (result->mMetadata.update(ANDROID_REQUEST_ID, &result->mResultExtras.requestId, 1) != OK) {
- SET_ERR("Failed to set request ID in metadata for frame %d", frameNumber);
- return;
- }
-
- // Update vendor tag id for physical metadata
- for (auto& physicalMetadata : result->mPhysicalMetadatas) {
- camera_metadata_t *pmeta = const_cast<camera_metadata_t *>(
- physicalMetadata.mPhysicalCameraMetadata.getAndLock());
- set_camera_metadata_vendor_id(pmeta, mVendorTagId);
- physicalMetadata.mPhysicalCameraMetadata.unlock(pmeta);
- }
-
- // Valid result, insert into queue
- List<CaptureResult>::iterator queuedResult =
- mResultQueue.insert(mResultQueue.end(), CaptureResult(*result));
- ALOGVV("%s: result requestId = %" PRId32 ", frameNumber = %" PRId64
- ", burstId = %" PRId32, __FUNCTION__,
- queuedResult->mResultExtras.requestId,
- queuedResult->mResultExtras.frameNumber,
- queuedResult->mResultExtras.burstId);
-
- mResultSignal.signal();
-}
-
-
-void Camera3Device::sendPartialCaptureResult(const camera_metadata_t * partialResult,
- const CaptureResultExtras &resultExtras, uint32_t frameNumber) {
- ATRACE_CALL();
- Mutex::Autolock l(mOutputLock);
-
- CaptureResult captureResult;
- captureResult.mResultExtras = resultExtras;
- captureResult.mMetadata = partialResult;
-
- // Fix up result metadata for monochrome camera.
- status_t res = fixupMonochromeTags(mDeviceInfo, captureResult.mMetadata);
- if (res != OK) {
- SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
- return;
- }
-
- insertResultLocked(&captureResult, frameNumber);
-}
-
-
-void Camera3Device::sendCaptureResult(CameraMetadata &pendingMetadata,
- CaptureResultExtras &resultExtras,
- CameraMetadata &collectedPartialResult,
- uint32_t frameNumber,
- bool reprocess, bool zslStillCapture, const std::set<std::string>& cameraIdsWithZoom,
- const std::vector<PhysicalCaptureResultInfo>& physicalMetadatas) {
- ATRACE_CALL();
- if (pendingMetadata.isEmpty())
- return;
-
- Mutex::Autolock l(mOutputLock);
-
- // TODO: need to track errors for tighter bounds on expected frame number
- if (reprocess) {
- if (frameNumber < mNextReprocessResultFrameNumber) {
- SET_ERR("Out-of-order reprocess capture result metadata submitted! "
- "(got frame number %d, expecting %d)",
- frameNumber, mNextReprocessResultFrameNumber);
- return;
- }
- mNextReprocessResultFrameNumber = frameNumber + 1;
- } else if (zslStillCapture) {
- if (frameNumber < mNextZslStillResultFrameNumber) {
- SET_ERR("Out-of-order ZSL still capture result metadata submitted! "
- "(got frame number %d, expecting %d)",
- frameNumber, mNextZslStillResultFrameNumber);
- return;
- }
- mNextZslStillResultFrameNumber = frameNumber + 1;
- } else {
- if (frameNumber < mNextResultFrameNumber) {
- SET_ERR("Out-of-order capture result metadata submitted! "
- "(got frame number %d, expecting %d)",
- frameNumber, mNextResultFrameNumber);
- return;
- }
- mNextResultFrameNumber = frameNumber + 1;
- }
-
- CaptureResult captureResult;
- captureResult.mResultExtras = resultExtras;
- captureResult.mMetadata = pendingMetadata;
- captureResult.mPhysicalMetadatas = physicalMetadatas;
-
- // Append any previous partials to form a complete result
- if (mUsePartialResult && !collectedPartialResult.isEmpty()) {
- captureResult.mMetadata.append(collectedPartialResult);
- }
-
- captureResult.mMetadata.sort();
-
- // Check that there's a timestamp in the result metadata
- camera_metadata_entry timestamp = captureResult.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
- if (timestamp.count == 0) {
- SET_ERR("No timestamp provided by HAL for frame %d!",
- frameNumber);
- return;
- }
- for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
- camera_metadata_entry timestamp =
- physicalMetadata.mPhysicalCameraMetadata.find(ANDROID_SENSOR_TIMESTAMP);
- if (timestamp.count == 0) {
- SET_ERR("No timestamp provided by HAL for physical camera %s frame %d!",
- String8(physicalMetadata.mPhysicalCameraId).c_str(), frameNumber);
- return;
- }
- }
-
- // Fix up some result metadata to account for HAL-level distortion correction
- status_t res =
- mDistortionMappers[mId.c_str()].correctCaptureResult(&captureResult.mMetadata);
- if (res != OK) {
- SET_ERR("Unable to correct capture result metadata for frame %d: %s (%d)",
- frameNumber, strerror(-res), res);
- return;
- }
-
- // Fix up result metadata to account for zoom ratio availabilities between
- // HAL and app.
- bool zoomRatioIs1 = cameraIdsWithZoom.find(mId.c_str()) == cameraIdsWithZoom.end();
- res = mZoomRatioMappers[mId.c_str()].updateCaptureResult(
- &captureResult.mMetadata, zoomRatioIs1);
- if (res != OK) {
- SET_ERR("Failed to update capture result zoom ratio metadata for frame %d: %s (%d)",
- frameNumber, strerror(-res), res);
- return;
- }
-
- for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
- String8 cameraId8(physicalMetadata.mPhysicalCameraId);
- if (mDistortionMappers.find(cameraId8.c_str()) != mDistortionMappers.end()) {
- res = mDistortionMappers[cameraId8.c_str()].correctCaptureResult(
- &physicalMetadata.mPhysicalCameraMetadata);
- if (res != OK) {
- SET_ERR("Unable to correct physical capture result metadata for frame %d: %s (%d)",
- frameNumber, strerror(-res), res);
- return;
- }
- }
-
- zoomRatioIs1 = cameraIdsWithZoom.find(cameraId8.c_str()) == cameraIdsWithZoom.end();
- res = mZoomRatioMappers[cameraId8.c_str()].updateCaptureResult(
- &physicalMetadata.mPhysicalCameraMetadata, zoomRatioIs1);
- if (res != OK) {
- SET_ERR("Failed to update camera %s's physical zoom ratio metadata for "
- "frame %d: %s(%d)", cameraId8.c_str(), frameNumber, strerror(-res), res);
- return;
- }
- }
-
- // Fix up result metadata for monochrome camera.
- res = fixupMonochromeTags(mDeviceInfo, captureResult.mMetadata);
- if (res != OK) {
- SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
- return;
- }
- for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
- String8 cameraId8(physicalMetadata.mPhysicalCameraId);
- res = fixupMonochromeTags(mPhysicalDeviceInfoMap.at(cameraId8.c_str()),
- physicalMetadata.mPhysicalCameraMetadata);
- if (res != OK) {
- SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
- return;
- }
- }
-
- std::unordered_map<std::string, CameraMetadata> monitoredPhysicalMetadata;
- for (auto& m : physicalMetadatas) {
- monitoredPhysicalMetadata.emplace(String8(m.mPhysicalCameraId).string(),
- CameraMetadata(m.mPhysicalCameraMetadata));
- }
- mTagMonitor.monitorMetadata(TagMonitor::RESULT,
- frameNumber, timestamp.data.i64[0], captureResult.mMetadata,
- monitoredPhysicalMetadata);
-
- insertResultLocked(&captureResult, frameNumber);
-}
-
-/**
- * Camera HAL device callback methods
- */
-
-void Camera3Device::processCaptureResult(const camera3_capture_result *result) {
- ATRACE_CALL();
-
- status_t res;
-
- uint32_t frameNumber = result->frame_number;
- if (result->result == NULL && result->num_output_buffers == 0 &&
- result->input_buffer == NULL) {
- SET_ERR("No result data provided by HAL for frame %d",
- frameNumber);
- return;
- }
-
- if (!mUsePartialResult &&
- result->result != NULL &&
- result->partial_result != 1) {
- SET_ERR("Result is malformed for frame %d: partial_result %u must be 1"
- " if partial result is not supported",
- frameNumber, result->partial_result);
- return;
- }
-
- bool isPartialResult = false;
- CameraMetadata collectedPartialResult;
- bool hasInputBufferInRequest = false;
-
- // Get shutter timestamp and resultExtras from list of in-flight requests,
- // where it was added by the shutter notification for this frame. If the
- // shutter timestamp isn't received yet, append the output buffers to the
- // in-flight request and they will be returned when the shutter timestamp
- // arrives. Update the in-flight status and remove the in-flight entry if
- // all result data and shutter timestamp have been received.
- nsecs_t shutterTimestamp = 0;
-
- {
- Mutex::Autolock l(mInFlightLock);
- ssize_t idx = mInFlightMap.indexOfKey(frameNumber);
- if (idx == NAME_NOT_FOUND) {
- SET_ERR("Unknown frame number for capture result: %d",
- frameNumber);
- return;
- }
- InFlightRequest &request = mInFlightMap.editValueAt(idx);
- ALOGVV("%s: got InFlightRequest requestId = %" PRId32
- ", frameNumber = %" PRId64 ", burstId = %" PRId32
- ", partialResultCount = %d, hasCallback = %d",
- __FUNCTION__, request.resultExtras.requestId,
- request.resultExtras.frameNumber, request.resultExtras.burstId,
- result->partial_result, request.hasCallback);
- // Always update the partial count to the latest one if it's not 0
- // (buffers only). When framework aggregates adjacent partial results
- // into one, the latest partial count will be used.
- if (result->partial_result != 0)
- request.resultExtras.partialResultCount = result->partial_result;
-
- // Check if this result carries only partial metadata
- if (mUsePartialResult && result->result != NULL) {
- if (result->partial_result > mNumPartialResults || result->partial_result < 1) {
- SET_ERR("Result is malformed for frame %d: partial_result %u must be in"
- " the range of [1, %d] when metadata is included in the result",
- frameNumber, result->partial_result, mNumPartialResults);
- return;
- }
- isPartialResult = (result->partial_result < mNumPartialResults);
- if (isPartialResult && result->num_physcam_metadata) {
- SET_ERR("Result is malformed for frame %d: partial_result not allowed for"
- " physical camera result", frameNumber);
- return;
- }
- if (isPartialResult) {
- request.collectedPartialResult.append(result->result);
- }
-
- if (isPartialResult && request.hasCallback) {
- // Send partial capture result
- sendPartialCaptureResult(result->result, request.resultExtras,
- frameNumber);
- }
- }
-
- shutterTimestamp = request.shutterTimestamp;
- hasInputBufferInRequest = request.hasInputBuffer;
-
- // Did we get the (final) result metadata for this capture?
- if (result->result != NULL && !isPartialResult) {
- if (request.physicalCameraIds.size() != result->num_physcam_metadata) {
- SET_ERR("Expected physical Camera metadata count %d not equal to actual count %d",
- request.physicalCameraIds.size(), result->num_physcam_metadata);
- return;
- }
- if (request.haveResultMetadata) {
- SET_ERR("Called multiple times with metadata for frame %d",
- frameNumber);
- return;
- }
- for (uint32_t i = 0; i < result->num_physcam_metadata; i++) {
- String8 physicalId(result->physcam_ids[i]);
- std::set<String8>::iterator cameraIdIter =
- request.physicalCameraIds.find(physicalId);
- if (cameraIdIter != request.physicalCameraIds.end()) {
- request.physicalCameraIds.erase(cameraIdIter);
- } else {
- SET_ERR("Total result for frame %d has already returned for camera %s",
- frameNumber, physicalId.c_str());
- return;
- }
- }
- if (mUsePartialResult &&
- !request.collectedPartialResult.isEmpty()) {
- collectedPartialResult.acquire(
- request.collectedPartialResult);
- }
- request.haveResultMetadata = true;
- }
-
- uint32_t numBuffersReturned = result->num_output_buffers;
- if (result->input_buffer != NULL) {
- if (hasInputBufferInRequest) {
- numBuffersReturned += 1;
- } else {
- ALOGW("%s: Input buffer should be NULL if there is no input"
- " buffer sent in the request",
- __FUNCTION__);
- }
- }
- request.numBuffersLeft -= numBuffersReturned;
- if (request.numBuffersLeft < 0) {
- SET_ERR("Too many buffers returned for frame %d",
- frameNumber);
- return;
- }
-
- camera_metadata_ro_entry_t entry;
- res = find_camera_metadata_ro_entry(result->result,
- ANDROID_SENSOR_TIMESTAMP, &entry);
- if (res == OK && entry.count == 1) {
- request.sensorTimestamp = entry.data.i64[0];
- }
-
- // If shutter event isn't received yet, append the output buffers to
- // the in-flight request. Otherwise, return the output buffers to
- // streams.
- if (shutterTimestamp == 0) {
- request.pendingOutputBuffers.appendArray(result->output_buffers,
- result->num_output_buffers);
- } else {
- bool timestampIncreasing = !(request.zslCapture || request.hasInputBuffer);
- returnOutputBuffers(result->output_buffers,
- result->num_output_buffers, shutterTimestamp, timestampIncreasing,
- request.outputSurfaces, request.resultExtras);
- }
-
- if (result->result != NULL && !isPartialResult) {
- for (uint32_t i = 0; i < result->num_physcam_metadata; i++) {
- CameraMetadata physicalMetadata;
- physicalMetadata.append(result->physcam_metadata[i]);
- request.physicalMetadatas.push_back({String16(result->physcam_ids[i]),
- physicalMetadata});
- }
- if (shutterTimestamp == 0) {
- request.pendingMetadata = result->result;
- request.collectedPartialResult = collectedPartialResult;
- } else if (request.hasCallback) {
- CameraMetadata metadata;
- metadata = result->result;
- sendCaptureResult(metadata, request.resultExtras,
- collectedPartialResult, frameNumber,
- hasInputBufferInRequest, request.zslCapture && request.stillCapture,
- request.cameraIdsWithZoom, request.physicalMetadatas);
- }
- }
-
- removeInFlightRequestIfReadyLocked(idx);
- } // scope for mInFlightLock
-
- if (result->input_buffer != NULL) {
- if (hasInputBufferInRequest) {
- Camera3Stream *stream =
- Camera3Stream::cast(result->input_buffer->stream);
- res = stream->returnInputBuffer(*(result->input_buffer));
- // Note: stream may be deallocated at this point, if this buffer was the
- // last reference to it.
- if (res != OK) {
- ALOGE("%s: RequestThread: Can't return input buffer for frame %d to"
- " its stream:%s (%d)", __FUNCTION__,
- frameNumber, strerror(-res), res);
- }
- } else {
- ALOGW("%s: Input buffer should be NULL if there is no input"
- " buffer sent in the request, skipping input buffer return.",
- __FUNCTION__);
- }
- }
-}
-
-void Camera3Device::notify(const camera3_notify_msg *msg) {
- ATRACE_CALL();
sp<NotificationListener> listener;
{
- Mutex::Autolock l(mOutputLock);
+ std::lock_guard<std::mutex> l(mOutputLock);
listener = mListener.promote();
}
- if (msg == NULL) {
- SET_ERR("HAL sent NULL notify message!");
- return;
- }
+ FlushInflightReqStates states {
+ mId, mInFlightLock, mInFlightMap, mUseHalBufManager,
+ listener, *this, *mInterface, *this};
- switch (msg->type) {
- case CAMERA3_MSG_ERROR: {
- notifyError(msg->message.error, listener);
- break;
- }
- case CAMERA3_MSG_SHUTTER: {
- notifyShutter(msg->message.shutter, listener);
- break;
- }
- default:
- SET_ERR("Unknown notify message from HAL: %d",
- msg->type);
- }
-}
-
-void Camera3Device::notifyError(const camera3_error_msg_t &msg,
- sp<NotificationListener> listener) {
- ATRACE_CALL();
- // Map camera HAL error codes to ICameraDeviceCallback error codes
- // Index into this with the HAL error code
- static const int32_t halErrorMap[CAMERA3_MSG_NUM_ERRORS] = {
- // 0 = Unused error code
- hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_INVALID_ERROR,
- // 1 = CAMERA3_MSG_ERROR_DEVICE
- hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE,
- // 2 = CAMERA3_MSG_ERROR_REQUEST
- hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST,
- // 3 = CAMERA3_MSG_ERROR_RESULT
- hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT,
- // 4 = CAMERA3_MSG_ERROR_BUFFER
- hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER
- };
-
- int32_t errorCode =
- ((msg.error_code >= 0) &&
- (msg.error_code < CAMERA3_MSG_NUM_ERRORS)) ?
- halErrorMap[msg.error_code] :
- hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_INVALID_ERROR;
-
- int streamId = 0;
- String16 physicalCameraId;
- if (msg.error_stream != NULL) {
- Camera3Stream *stream =
- Camera3Stream::cast(msg.error_stream);
- streamId = stream->getId();
- physicalCameraId = String16(stream->physicalCameraId());
- }
- ALOGV("Camera %s: %s: HAL error, frame %d, stream %d: %d",
- mId.string(), __FUNCTION__, msg.frame_number,
- streamId, msg.error_code);
-
- CaptureResultExtras resultExtras;
- switch (errorCode) {
- case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE:
- // SET_ERR calls notifyError
- SET_ERR("Camera HAL reported serious device error");
- break;
- case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST:
- case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT:
- case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
- {
- Mutex::Autolock l(mInFlightLock);
- ssize_t idx = mInFlightMap.indexOfKey(msg.frame_number);
- if (idx >= 0) {
- InFlightRequest &r = mInFlightMap.editValueAt(idx);
- r.requestStatus = msg.error_code;
- resultExtras = r.resultExtras;
- bool logicalDeviceResultError = false;
- if (hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT ==
- errorCode) {
- if (physicalCameraId.size() > 0) {
- String8 cameraId(physicalCameraId);
- auto iter = r.physicalCameraIds.find(cameraId);
- if (iter == r.physicalCameraIds.end()) {
- ALOGE("%s: Reported result failure for physical camera device: %s "
- " which is not part of the respective request!",
- __FUNCTION__, cameraId.string());
- break;
- }
- r.physicalCameraIds.erase(iter);
- resultExtras.errorPhysicalCameraId = physicalCameraId;
- } else {
- logicalDeviceResultError = true;
- }
- }
-
- if (logicalDeviceResultError
- || hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST ==
- errorCode) {
- r.skipResultMetadata = true;
- }
- if (logicalDeviceResultError) {
- // In case of missing result check whether the buffers
- // returned. If they returned, then remove inflight
- // request.
- // TODO: should we call this for ERROR_CAMERA_REQUEST as well?
- // otherwise we are depending on HAL to send the buffers back after
- // calling notifyError. Not sure if that's in the spec.
- removeInFlightRequestIfReadyLocked(idx);
- }
- } else {
- resultExtras.frameNumber = msg.frame_number;
- ALOGE("Camera %s: %s: cannot find in-flight request on "
- "frame %" PRId64 " error", mId.string(), __FUNCTION__,
- resultExtras.frameNumber);
- }
- }
- resultExtras.errorStreamId = streamId;
- if (listener != NULL) {
- listener->notifyError(errorCode, resultExtras);
- } else {
- ALOGE("Camera %s: %s: no listener available", mId.string(), __FUNCTION__);
- }
- break;
- default:
- // SET_ERR calls notifyError
- SET_ERR("Unknown error message from HAL: %d", msg.error_code);
- break;
- }
-}
-
-void Camera3Device::notifyShutter(const camera3_shutter_msg_t &msg,
- sp<NotificationListener> listener) {
- ATRACE_CALL();
- ssize_t idx;
-
- // Set timestamp for the request in the in-flight tracking
- // and get the request ID to send upstream
- {
- Mutex::Autolock l(mInFlightLock);
- idx = mInFlightMap.indexOfKey(msg.frame_number);
- if (idx >= 0) {
- InFlightRequest &r = mInFlightMap.editValueAt(idx);
-
- // Verify ordering of shutter notifications
- {
- Mutex::Autolock l(mOutputLock);
- // TODO: need to track errors for tighter bounds on expected frame number.
- if (r.hasInputBuffer) {
- if (msg.frame_number < mNextReprocessShutterFrameNumber) {
- SET_ERR("Reprocess shutter notification out-of-order. Expected "
- "notification for frame %d, got frame %d",
- mNextReprocessShutterFrameNumber, msg.frame_number);
- return;
- }
- mNextReprocessShutterFrameNumber = msg.frame_number + 1;
- } else if (r.zslCapture && r.stillCapture) {
- if (msg.frame_number < mNextZslStillShutterFrameNumber) {
- SET_ERR("ZSL still capture shutter notification out-of-order. Expected "
- "notification for frame %d, got frame %d",
- mNextZslStillShutterFrameNumber, msg.frame_number);
- return;
- }
- mNextZslStillShutterFrameNumber = msg.frame_number + 1;
- } else {
- if (msg.frame_number < mNextShutterFrameNumber) {
- SET_ERR("Shutter notification out-of-order. Expected "
- "notification for frame %d, got frame %d",
- mNextShutterFrameNumber, msg.frame_number);
- return;
- }
- mNextShutterFrameNumber = msg.frame_number + 1;
- }
- }
-
- r.shutterTimestamp = msg.timestamp;
- if (r.hasCallback) {
- ALOGVV("Camera %s: %s: Shutter fired for frame %d (id %d) at %" PRId64,
- mId.string(), __FUNCTION__,
- msg.frame_number, r.resultExtras.requestId, msg.timestamp);
- // Call listener, if any
- if (listener != NULL) {
- listener->notifyShutter(r.resultExtras, msg.timestamp);
- }
- // send pending result and buffers
- sendCaptureResult(r.pendingMetadata, r.resultExtras,
- r.collectedPartialResult, msg.frame_number,
- r.hasInputBuffer, r.zslCapture && r.stillCapture,
- r.cameraIdsWithZoom, r.physicalMetadatas);
- }
- bool timestampIncreasing = !(r.zslCapture || r.hasInputBuffer);
- returnOutputBuffers(r.pendingOutputBuffers.array(),
- r.pendingOutputBuffers.size(), r.shutterTimestamp, timestampIncreasing,
- r.outputSurfaces, r.resultExtras);
- r.pendingOutputBuffers.clear();
-
- removeInFlightRequestIfReadyLocked(idx);
- }
- }
- if (idx < 0) {
- SET_ERR("Shutter notification for non-existent frame number %d",
- msg.frame_number);
- }
+ camera3::flushInflightRequests(states);
}
CameraMetadata Camera3Device::getLatestRequestLocked() {
@@ -4084,7 +2825,6 @@
return retVal;
}
-
void Camera3Device::monitorMetadata(TagMonitor::eventSource source,
int64_t frameNumber, nsecs_t timestamp, const CameraMetadata& metadata,
const std::unordered_map<std::string, CameraMetadata>& physicalMetadata) {
@@ -4319,20 +3059,10 @@
activeStreams.insert(streamId);
// Create Buffer ID map if necessary
- if (mBufferIdMaps.count(streamId) == 0) {
- mBufferIdMaps.emplace(streamId, BufferIdMap{});
- }
+ mBufferRecords.tryCreateBufferCache(streamId);
}
// remove BufferIdMap for deleted streams
- for(auto it = mBufferIdMaps.begin(); it != mBufferIdMaps.end();) {
- int streamId = it->first;
- bool active = activeStreams.count(streamId) > 0;
- if (!active) {
- it = mBufferIdMaps.erase(it);
- } else {
- ++it;
- }
- }
+ mBufferRecords.removeInactiveBufferCaches(activeStreams);
StreamConfigurationMode operationMode;
res = mapToStreamConfigurationMode(
@@ -4350,6 +3080,7 @@
// Invoke configureStreams
device::V3_3::HalStreamConfiguration finalConfiguration;
device::V3_4::HalStreamConfiguration finalConfiguration3_4;
+ device::V3_6::HalStreamConfiguration finalConfiguration3_6;
common::V1_0::Status status;
auto configStream34Cb = [&status, &finalConfiguration3_4]
@@ -4358,6 +3089,12 @@
status = s;
};
+ auto configStream36Cb = [&status, &finalConfiguration3_6]
+ (common::V1_0::Status s, const device::V3_6::HalStreamConfiguration& halConfiguration) {
+ finalConfiguration3_6 = halConfiguration;
+ status = s;
+ };
+
auto postprocConfigStream34 = [&finalConfiguration, &finalConfiguration3_4]
(hardware::Return<void>& err) -> status_t {
if (!err.isOk()) {
@@ -4371,8 +3108,32 @@
return OK;
};
+ auto postprocConfigStream36 = [&finalConfiguration, &finalConfiguration3_6]
+ (hardware::Return<void>& err) -> status_t {
+ if (!err.isOk()) {
+ ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
+ return DEAD_OBJECT;
+ }
+ finalConfiguration.streams.resize(finalConfiguration3_6.streams.size());
+ for (size_t i = 0; i < finalConfiguration3_6.streams.size(); i++) {
+ finalConfiguration.streams[i] = finalConfiguration3_6.streams[i].v3_4.v3_3;
+ }
+ return OK;
+ };
+
// See which version of HAL we have
- if (mHidlSession_3_5 != nullptr) {
+ if (mHidlSession_3_6 != nullptr) {
+ ALOGV("%s: v3.6 device found", __FUNCTION__);
+ device::V3_5::StreamConfiguration requestedConfiguration3_5;
+ requestedConfiguration3_5.v3_4 = requestedConfiguration3_4;
+ requestedConfiguration3_5.streamConfigCounter = mNextStreamConfigCounter++;
+ auto err = mHidlSession_3_6->configureStreams_3_6(
+ requestedConfiguration3_5, configStream36Cb);
+ res = postprocConfigStream36(err);
+ if (res != OK) {
+ return res;
+ }
+ } else if (mHidlSession_3_5 != nullptr) {
ALOGV("%s: v3.5 device found", __FUNCTION__);
device::V3_5::StreamConfiguration requestedConfiguration3_5;
requestedConfiguration3_5.v3_4 = requestedConfiguration3_4;
@@ -4454,11 +3215,16 @@
return INVALID_OPERATION;
}
device::V3_3::HalStream &src = finalConfiguration.streams[realIdx];
+ device::V3_6::HalStream &src_36 = finalConfiguration3_6.streams[realIdx];
Camera3Stream* dstStream = Camera3Stream::cast(dst);
int overrideFormat = mapToFrameworkFormat(src.v3_2.overrideFormat);
android_dataspace overrideDataSpace = mapToFrameworkDataspace(src.overrideDataSpace);
+ if (mHidlSession_3_6 != nullptr) {
+ dstStream->setOfflineProcessingSupport(src_36.supportOffline);
+ }
+
if (dstStream->getOriginalFormat() != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
dstStream->setFormatOverride(false);
dstStream->setDataSpaceOverride(false);
@@ -4522,7 +3288,6 @@
captureRequest->fmqSettingsSize = 0;
{
- std::lock_guard<std::mutex> lock(mInflightLock);
if (request->input_buffer != nullptr) {
int32_t streamId = Camera3Stream::cast(request->input_buffer->stream)->getId();
buffer_handle_t buf = *(request->input_buffer->buffer);
@@ -4542,7 +3307,7 @@
captureRequest->inputBuffer.acquireFence = acquireFence;
captureRequest->inputBuffer.releaseFence = nullptr;
- pushInflightBufferLocked(captureRequest->frameNumber, streamId,
+ mBufferRecords.pushInflightBuffer(captureRequest->frameNumber, streamId,
request->input_buffer->buffer);
inflightBuffers->push_back(std::make_pair(captureRequest->frameNumber, streamId));
} else {
@@ -4583,7 +3348,8 @@
// Output buffers are empty when using HAL buffer manager
if (!mUseHalBufManager) {
- pushInflightBufferLocked(captureRequest->frameNumber, streamId, src->buffer);
+ mBufferRecords.pushInflightBuffer(
+ captureRequest->frameNumber, streamId, src->buffer);
inflightBuffers->push_back(std::make_pair(captureRequest->frameNumber, streamId));
}
}
@@ -4640,7 +3406,7 @@
/*out*/&handlesCreated, /*out*/&inflightBuffers);
}
if (res != OK) {
- popInflightBuffers(inflightBuffers);
+ mBufferRecords.popInflightBuffers(inflightBuffers);
cleanupNativeHandles(&handlesCreated);
return res;
}
@@ -4648,10 +3414,10 @@
std::vector<device::V3_2::BufferCache> cachesToRemove;
{
- std::lock_guard<std::mutex> lock(mBufferIdMapLock);
+ std::lock_guard<std::mutex> lock(mFreedBuffersLock);
for (auto& pair : mFreedBuffers) {
// The stream might have been removed since onBufferFreed
- if (mBufferIdMaps.find(pair.first) != mBufferIdMaps.end()) {
+ if (mBufferRecords.isStreamCached(pair.first)) {
cachesToRemove.push_back({pair.first, pair.second});
}
}
@@ -4758,7 +3524,7 @@
cleanupNativeHandles(&handlesCreated);
}
} else {
- popInflightBuffers(inflightBuffers);
+ mBufferRecords.popInflightBuffers(inflightBuffers);
cleanupNativeHandles(&handlesCreated);
}
return res;
@@ -4820,20 +3586,20 @@
status_t Camera3Device::HalInterface::switchToOffline(
const std::vector<int32_t>& streamsToKeep,
/*out*/hardware::camera::device::V3_6::CameraOfflineSessionInfo* offlineSessionInfo,
- /*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession) {
+ /*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession,
+ /*out*/camera3::BufferRecords* bufferRecords) {
ATRACE_NAME("CameraHal::switchToOffline");
if (!valid() || mHidlSession_3_6 == nullptr) {
ALOGE("%s called on invalid camera!", __FUNCTION__);
return INVALID_OPERATION;
}
- if (offlineSessionInfo == nullptr || offlineSession == nullptr) {
- ALOGE("%s: offlineSessionInfo and offlineSession must not be null!", __FUNCTION__);
+ if (offlineSessionInfo == nullptr || offlineSession == nullptr || bufferRecords == nullptr) {
+ ALOGE("%s: output arguments must not be null!", __FUNCTION__);
return INVALID_OPERATION;
}
common::V1_0::Status status = common::V1_0::Status::INTERNAL_ERROR;
-
auto resultCallback =
[&status, &offlineSessionInfo, &offlineSession] (auto s, auto info, auto session) {
status = s;
@@ -4846,75 +3612,65 @@
ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
return DEAD_OBJECT;
}
- return CameraProviderManager::mapToStatusT(status);
+
+ status_t ret = CameraProviderManager::mapToStatusT(status);
+ if (ret != OK) {
+ return ret;
+ }
+
+ // TODO: assert no ongoing requestBuffer/returnBuffer call here
+ // TODO: update RequestBufferStateMachine to block requestBuffer/returnBuffer once HAL
+ // returns from switchToOffline.
+
+
+ // Validate buffer caches
+ std::vector<int32_t> streams;
+ streams.reserve(offlineSessionInfo->offlineStreams.size());
+ for (auto offlineStream : offlineSessionInfo->offlineStreams) {
+ int32_t id = offlineStream.id;
+ streams.push_back(id);
+ // Verify buffer caches
+ std::vector<uint64_t> bufIds(offlineStream.circulatingBufferIds.begin(),
+ offlineStream.circulatingBufferIds.end());
+ if (!verifyBufferIds(id, bufIds)) {
+ ALOGE("%s: stream ID %d buffer cache records mismatch!", __FUNCTION__, id);
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ // Move buffer records
+ bufferRecords->takeBufferCaches(mBufferRecords, streams);
+ bufferRecords->takeInflightBufferMap(mBufferRecords);
+ bufferRecords->takeRequestedBufferMap(mBufferRecords);
+ return ret;
}
void Camera3Device::HalInterface::getInflightBufferKeys(
std::vector<std::pair<int32_t, int32_t>>* out) {
- std::lock_guard<std::mutex> lock(mInflightLock);
- out->clear();
- out->reserve(mInflightBufferMap.size());
- for (auto& pair : mInflightBufferMap) {
- uint64_t key = pair.first;
- int32_t streamId = key & 0xFFFFFFFF;
- int32_t frameNumber = (key >> 32) & 0xFFFFFFFF;
- out->push_back(std::make_pair(frameNumber, streamId));
- }
+ mBufferRecords.getInflightBufferKeys(out);
return;
}
void Camera3Device::HalInterface::getInflightRequestBufferKeys(
std::vector<uint64_t>* out) {
- std::lock_guard<std::mutex> lock(mRequestedBuffersLock);
- out->clear();
- out->reserve(mRequestedBuffers.size());
- for (auto& pair : mRequestedBuffers) {
- out->push_back(pair.first);
- }
+ mBufferRecords.getInflightRequestBufferKeys(out);
return;
}
-status_t Camera3Device::HalInterface::pushInflightBufferLocked(
- int32_t frameNumber, int32_t streamId, buffer_handle_t *buffer) {
- uint64_t key = static_cast<uint64_t>(frameNumber) << 32 | static_cast<uint64_t>(streamId);
- mInflightBufferMap[key] = buffer;
- return OK;
+bool Camera3Device::HalInterface::verifyBufferIds(
+ int32_t streamId, std::vector<uint64_t>& bufIds) {
+ return mBufferRecords.verifyBufferIds(streamId, bufIds);
}
status_t Camera3Device::HalInterface::popInflightBuffer(
int32_t frameNumber, int32_t streamId,
/*out*/ buffer_handle_t **buffer) {
- std::lock_guard<std::mutex> lock(mInflightLock);
-
- uint64_t key = static_cast<uint64_t>(frameNumber) << 32 | static_cast<uint64_t>(streamId);
- auto it = mInflightBufferMap.find(key);
- if (it == mInflightBufferMap.end()) return NAME_NOT_FOUND;
- if (buffer != nullptr) {
- *buffer = it->second;
- }
- mInflightBufferMap.erase(it);
- return OK;
-}
-
-void Camera3Device::HalInterface::popInflightBuffers(
- const std::vector<std::pair<int32_t, int32_t>>& buffers) {
- for (const auto& pair : buffers) {
- int32_t frameNumber = pair.first;
- int32_t streamId = pair.second;
- popInflightBuffer(frameNumber, streamId, nullptr);
- }
+ return mBufferRecords.popInflightBuffer(frameNumber, streamId, buffer);
}
status_t Camera3Device::HalInterface::pushInflightRequestBuffer(
uint64_t bufferId, buffer_handle_t* buf, int32_t streamId) {
- std::lock_guard<std::mutex> lock(mRequestedBuffersLock);
- auto pair = mRequestedBuffers.insert({bufferId, {streamId, buf}});
- if (!pair.second) {
- ALOGE("%s: bufId %" PRIu64 " is already inflight!",
- __FUNCTION__, bufferId);
- return BAD_VALUE;
- }
- return OK;
+ return mBufferRecords.pushInflightRequestBuffer(bufferId, buf, streamId);
}
// Find and pop a buffer_handle_t based on bufferId
@@ -4922,81 +3678,29 @@
uint64_t bufferId,
/*out*/ buffer_handle_t** buffer,
/*optional out*/ int32_t* streamId) {
- if (buffer == nullptr) {
- ALOGE("%s: buffer (%p) must not be null", __FUNCTION__, buffer);
- return BAD_VALUE;
- }
- std::lock_guard<std::mutex> lock(mRequestedBuffersLock);
- auto it = mRequestedBuffers.find(bufferId);
- if (it == mRequestedBuffers.end()) {
- ALOGE("%s: bufId %" PRIu64 " is not inflight!",
- __FUNCTION__, bufferId);
- return BAD_VALUE;
- }
- *buffer = it->second.second;
- if (streamId != nullptr) {
- *streamId = it->second.first;
- }
- mRequestedBuffers.erase(it);
- return OK;
+ return mBufferRecords.popInflightRequestBuffer(bufferId, buffer, streamId);
}
std::pair<bool, uint64_t> Camera3Device::HalInterface::getBufferId(
const buffer_handle_t& buf, int streamId) {
- std::lock_guard<std::mutex> lock(mBufferIdMapLock);
-
- BufferIdMap& bIdMap = mBufferIdMaps.at(streamId);
- auto it = bIdMap.find(buf);
- if (it == bIdMap.end()) {
- bIdMap[buf] = mNextBufferId++;
- ALOGV("stream %d now have %zu buffer caches, buf %p",
- streamId, bIdMap.size(), buf);
- return std::make_pair(true, mNextBufferId - 1);
- } else {
- return std::make_pair(false, it->second);
- }
+ return mBufferRecords.getBufferId(buf, streamId);
}
void Camera3Device::HalInterface::onBufferFreed(
int streamId, const native_handle_t* handle) {
- std::lock_guard<std::mutex> lock(mBufferIdMapLock);
- uint64_t bufferId = BUFFER_ID_NO_BUFFER;
- auto mapIt = mBufferIdMaps.find(streamId);
- if (mapIt == mBufferIdMaps.end()) {
- // streamId might be from a deleted stream here
- ALOGI("%s: stream %d has been removed",
- __FUNCTION__, streamId);
- return;
+ uint32_t bufferId = mBufferRecords.removeOneBufferCache(streamId, handle);
+ std::lock_guard<std::mutex> lock(mFreedBuffersLock);
+ if (bufferId != BUFFER_ID_NO_BUFFER) {
+ mFreedBuffers.push_back(std::make_pair(streamId, bufferId));
}
- BufferIdMap& bIdMap = mapIt->second;
- auto it = bIdMap.find(handle);
- if (it == bIdMap.end()) {
- ALOGW("%s: cannot find buffer %p in stream %d",
- __FUNCTION__, handle, streamId);
- return;
- } else {
- bufferId = it->second;
- bIdMap.erase(it);
- ALOGV("%s: stream %d now have %zu buffer caches after removing buf %p",
- __FUNCTION__, streamId, bIdMap.size(), handle);
- }
- mFreedBuffers.push_back(std::make_pair(streamId, bufferId));
}
void Camera3Device::HalInterface::onStreamReConfigured(int streamId) {
- std::lock_guard<std::mutex> lock(mBufferIdMapLock);
- auto mapIt = mBufferIdMaps.find(streamId);
- if (mapIt == mBufferIdMaps.end()) {
- ALOGE("%s: streamId %d not found!", __FUNCTION__, streamId);
- return;
- }
-
- BufferIdMap& bIdMap = mapIt->second;
- for (const auto& it : bIdMap) {
- uint64_t bufferId = it.second;
+ std::vector<uint64_t> bufIds = mBufferRecords.clearBufferCaches(streamId);
+ std::lock_guard<std::mutex> lock(mFreedBuffersLock);
+ for (auto bufferId : bufIds) {
mFreedBuffers.push_back(std::make_pair(streamId, bufferId));
}
- bIdMap.clear();
}
/**
@@ -6030,17 +4734,22 @@
status_t Camera3Device::RequestThread::switchToOffline(
const std::vector<int32_t>& streamsToKeep,
/*out*/hardware::camera::device::V3_6::CameraOfflineSessionInfo* offlineSessionInfo,
- /*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession) {
+ /*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession,
+ /*out*/camera3::BufferRecords* bufferRecords) {
Mutex::Autolock l(mRequestLock);
clearRepeatingRequestsLocked(/*lastFrameNumber*/nullptr);
- // Theoretically we should also check for mRepeatingRequests.empty(), but the API interface
- // is serialized by mInterfaceLock so skip that check.
+ // Wait until request thread is fully stopped
+ // TBD: check if request thread is being paused by other APIs (shouldn't be)
+
+ // We could also check for mRepeatingRequests.empty(), but the API interface
+ // is serialized by Camera3Device::mInterfaceLock so no one should be able to submit any
+ // new requests during the call; hence skip that check.
bool queueEmpty = mNextRequests.empty() && mRequestQueue.empty();
while (!queueEmpty) {
status_t res = mRequestSubmittedSignal.waitRelative(mRequestLock, kRequestSubmitTimeout);
if (res == TIMED_OUT) {
- ALOGE("%s: request thread failed to submit a request within timeout!", __FUNCTION__);
+ ALOGE("%s: request thread failed to submit one request within timeout!", __FUNCTION__);
return res;
} else if (res != OK) {
ALOGE("%s: request thread failed to submit a request: %s (%d)!",
@@ -6049,13 +4758,14 @@
}
queueEmpty = mNextRequests.empty() && mRequestQueue.empty();
}
+
return mInterface->switchToOffline(
- streamsToKeep, offlineSessionInfo, offlineSession);
+ streamsToKeep, offlineSessionInfo, offlineSession, bufferRecords);
}
nsecs_t Camera3Device::getExpectedInFlightDuration() {
ATRACE_CALL();
- Mutex::Autolock al(mInFlightLock);
+ std::lock_guard<std::mutex> l(mInFlightLock);
return mExpectedInflightDuration > kMinInflightDuration ?
mExpectedInflightDuration : kMinInflightDuration;
}
@@ -6141,7 +4851,7 @@
{
sp<Camera3Device> parent = mParent.promote();
if (parent != NULL) {
- Mutex::Autolock l(parent->mInFlightLock);
+ std::lock_guard<std::mutex> l(parent->mInFlightLock);
ssize_t idx = parent->mInFlightMap.indexOfKey(captureRequest->mResultExtras.frameNumber);
if (idx >= 0) {
ALOGV("%s: Remove inflight request from queue: frameNumber %" PRId64,
@@ -6830,6 +5540,7 @@
void Camera3Device::RequestBufferStateMachine::onStreamsConfigured() {
std::lock_guard<std::mutex> lock(mLock);
+ mSwitchedToOffline = false;
mStatus = RB_STATUS_READY;
return;
}
@@ -6872,6 +5583,20 @@
return;
}
+bool Camera3Device::RequestBufferStateMachine::onSwitchToOfflineSuccess() {
+ std::lock_guard<std::mutex> lock(mLock);
+ if (mRequestBufferOngoing) {
+ ALOGE("%s: HAL must not be requesting buffer after HAL returns switchToOffline!",
+ __FUNCTION__);
+ return false;
+ }
+ mSwitchedToOffline = true;
+ mInflightMapEmpty = true;
+ mRequestThreadPaused = true;
+ mStatus = RB_STATUS_STOPPED;
+ return true;
+}
+
void Camera3Device::RequestBufferStateMachine::notifyTrackerLocked(bool active) {
sp<StatusTracker> statusTracker = mStatusTracker.promote();
if (statusTracker != nullptr) {
@@ -6891,75 +5616,40 @@
return false;
}
-status_t Camera3Device::fixupMonochromeTags(const CameraMetadata& deviceInfo,
- CameraMetadata& resultMetadata) {
- status_t res = OK;
- if (!mNeedFixupMonochromeTags) {
- return res;
- }
+bool Camera3Device::startRequestBuffer() {
+ return mRequestBufferSM.startRequestBuffer();
+}
- // Remove tags that are not applicable to monochrome camera.
- int32_t tagsToRemove[] = {
- ANDROID_SENSOR_GREEN_SPLIT,
- ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
- ANDROID_COLOR_CORRECTION_MODE,
- ANDROID_COLOR_CORRECTION_TRANSFORM,
- ANDROID_COLOR_CORRECTION_GAINS,
- };
- for (auto tag : tagsToRemove) {
- res = resultMetadata.erase(tag);
- if (res != OK) {
- ALOGE("%s: Failed to remove tag %d for monochrome camera", __FUNCTION__, tag);
- return res;
- }
- }
+void Camera3Device::endRequestBuffer() {
+ mRequestBufferSM.endRequestBuffer();
+}
- // ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
- camera_metadata_entry blEntry = resultMetadata.find(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
- for (size_t i = 1; i < blEntry.count; i++) {
- blEntry.data.f[i] = blEntry.data.f[0];
- }
+nsecs_t Camera3Device::getWaitDuration() {
+ return kBaseGetBufferWait + getExpectedInFlightDuration();
+}
- // ANDROID_SENSOR_NOISE_PROFILE
- camera_metadata_entry npEntry = resultMetadata.find(ANDROID_SENSOR_NOISE_PROFILE);
- if (npEntry.count > 0 && npEntry.count % 2 == 0) {
- double np[] = {npEntry.data.d[0], npEntry.data.d[1]};
- res = resultMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, np, 2);
- if (res != OK) {
- ALOGE("%s: Failed to update SENSOR_NOISE_PROFILE: %s (%d)",
- __FUNCTION__, strerror(-res), res);
- return res;
- }
- }
+void Camera3Device::getInflightBufferKeys(std::vector<std::pair<int32_t, int32_t>>* out) {
+ mInterface->getInflightBufferKeys(out);
+}
- // ANDROID_STATISTICS_LENS_SHADING_MAP
- camera_metadata_ro_entry lsSizeEntry = deviceInfo.find(ANDROID_LENS_INFO_SHADING_MAP_SIZE);
- camera_metadata_entry lsEntry = resultMetadata.find(ANDROID_STATISTICS_LENS_SHADING_MAP);
- if (lsSizeEntry.count == 2 && lsEntry.count > 0
- && (int32_t)lsEntry.count == 4 * lsSizeEntry.data.i32[0] * lsSizeEntry.data.i32[1]) {
- for (int32_t i = 0; i < lsSizeEntry.data.i32[0] * lsSizeEntry.data.i32[1]; i++) {
- lsEntry.data.f[4*i+1] = lsEntry.data.f[4*i];
- lsEntry.data.f[4*i+2] = lsEntry.data.f[4*i];
- lsEntry.data.f[4*i+3] = lsEntry.data.f[4*i];
- }
- }
+void Camera3Device::getInflightRequestBufferKeys(std::vector<uint64_t>* out) {
+ mInterface->getInflightRequestBufferKeys(out);
+}
- // ANDROID_TONEMAP_CURVE_BLUE
- // ANDROID_TONEMAP_CURVE_GREEN
- // ANDROID_TONEMAP_CURVE_RED
- camera_metadata_entry tcbEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_BLUE);
- camera_metadata_entry tcgEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_GREEN);
- camera_metadata_entry tcrEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_RED);
- if (tcbEntry.count > 0
- && tcbEntry.count == tcgEntry.count
- && tcbEntry.count == tcrEntry.count) {
- for (size_t i = 0; i < tcbEntry.count; i++) {
- tcbEntry.data.f[i] = tcrEntry.data.f[i];
- tcgEntry.data.f[i] = tcrEntry.data.f[i];
- }
+std::vector<sp<Camera3StreamInterface>> Camera3Device::getAllStreams() {
+ std::vector<sp<Camera3StreamInterface>> ret;
+ bool hasInputStream = mInputStream != nullptr;
+ ret.reserve(mOutputStreams.size() + mDeletedStreams.size() + ((hasInputStream) ? 1 : 0));
+ if (hasInputStream) {
+ ret.push_back(mInputStream);
}
-
- return res;
+ for (size_t i = 0; i < mOutputStreams.size(); i++) {
+ ret.push_back(mOutputStreams[i]);
+ }
+ for (size_t i = 0; i < mDeletedStreams.size(); i++) {
+ ret.push_back(mDeletedStreams[i]);
+ }
+ return ret;
}
status_t Camera3Device::switchToOffline(
@@ -6972,24 +5662,231 @@
}
Mutex::Autolock il(mInterfaceLock);
- Mutex::Autolock l(mLock);
- // 1. Stop repeating request, wait until request thread submitted all requests, then
- // call HAL switchToOffline
+ bool hasInputStream = mInputStream != nullptr;
+ int32_t inputStreamId = hasInputStream ? mInputStream->getId() : -1;
+ bool inputStreamSupportsOffline = hasInputStream ?
+ mInputStream->getOfflineProcessingSupport() : false;
+ auto outputStreamIds = mOutputStreams.getStreamIds();
+ auto streamIds = outputStreamIds;
+ if (hasInputStream) {
+ streamIds.push_back(mInputStream->getId());
+ }
+
+ // Check all streams in streamsToKeep supports offline mode
+ for (auto id : streamsToKeep) {
+ if (std::find(streamIds.begin(), streamIds.end(), id) == streamIds.end()) {
+ ALOGE("%s: Unknown stream ID %d", __FUNCTION__, id);
+ return BAD_VALUE;
+ } else if (id == inputStreamId) {
+ if (!inputStreamSupportsOffline) {
+ ALOGE("%s: input stream %d cannot be switched to offline",
+ __FUNCTION__, id);
+ return BAD_VALUE;
+ }
+ } else {
+ sp<camera3::Camera3OutputStreamInterface> stream = mOutputStreams.get(id);
+ if (!stream->getOfflineProcessingSupport()) {
+ ALOGE("%s: output stream %d cannot be switched to offline",
+ __FUNCTION__, id);
+ return BAD_VALUE;
+ }
+ }
+ }
+
+ // TODO: block surface sharing and surface group streams until we can support them
+
+ // Stop repeating request, wait until all remaining requests are submitted, then call into
+ // HAL switchToOffline
hardware::camera::device::V3_6::CameraOfflineSessionInfo offlineSessionInfo;
sp<hardware::camera::device::V3_6::ICameraOfflineSession> offlineSession;
+ camera3::BufferRecords bufferRecords;
+ status_t ret = mRequestThread->switchToOffline(
+ streamsToKeep, &offlineSessionInfo, &offlineSession, &bufferRecords);
- mRequestThread->switchToOffline(streamsToKeep, &offlineSessionInfo, &offlineSession);
+ if (ret != OK) {
+ SET_ERR("Switch to offline failed: %s (%d)", strerror(-ret), ret);
+ return ret;
+ }
+ bool succ = mRequestBufferSM.onSwitchToOfflineSuccess();
+ if (!succ) {
+ SET_ERR("HAL must not be calling requestStreamBuffers call");
+ // TODO: block ALL callbacks from HAL till app configured new streams?
+ return UNKNOWN_ERROR;
+ }
- // 2. Verify offlineSessionInfo
- // 3. create Camera3OfflineSession and transfer object ownership
+ // Verify offlineSessionInfo
+ std::vector<int32_t> offlineStreamIds;
+ offlineStreamIds.reserve(offlineSessionInfo.offlineStreams.size());
+ for (auto offlineStream : offlineSessionInfo.offlineStreams) {
+ // verify stream IDs
+ int32_t id = offlineStream.id;
+ if (std::find(streamIds.begin(), streamIds.end(), id) == streamIds.end()) {
+ SET_ERR("stream ID %d not found!", id);
+ return UNKNOWN_ERROR;
+ }
+
+ // When not using HAL buf manager, only allow streams requested by app to be preserved
+ if (!mUseHalBufManager) {
+ if (std::find(streamsToKeep.begin(), streamsToKeep.end(), id) == streamsToKeep.end()) {
+ SET_ERR("stream ID %d must not be switched to offline!", id);
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ offlineStreamIds.push_back(id);
+ sp<Camera3StreamInterface> stream = (id == inputStreamId) ?
+ static_cast<sp<Camera3StreamInterface>>(mInputStream) :
+ static_cast<sp<Camera3StreamInterface>>(mOutputStreams.get(id));
+ // Verify number of outstanding buffers
+ if (stream->getOutstandingBuffersCount() != offlineStream.numOutstandingBuffers) {
+ SET_ERR("Offline stream %d # of remaining buffer mismatch: (%zu,%d) (service/HAL)",
+ id, stream->getOutstandingBuffersCount(), offlineStream.numOutstandingBuffers);
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ // Verify all streams to be deleted don't have any outstanding buffers
+ if (hasInputStream && std::find(offlineStreamIds.begin(), offlineStreamIds.end(),
+ inputStreamId) == offlineStreamIds.end()) {
+ if (mInputStream->hasOutstandingBuffers()) {
+ SET_ERR("Input stream %d still has %zu outstanding buffer!",
+ inputStreamId, mInputStream->getOutstandingBuffersCount());
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ for (const auto& outStreamId : outputStreamIds) {
+ if (std::find(offlineStreamIds.begin(), offlineStreamIds.end(),
+ outStreamId) == offlineStreamIds.end()) {
+ auto outStream = mOutputStreams.get(outStreamId);
+ if (outStream->hasOutstandingBuffers()) {
+ SET_ERR("Output stream %d still has %zu outstanding buffer!",
+ outStreamId, outStream->getOutstandingBuffersCount());
+ return UNKNOWN_ERROR;
+ }
+ }
+ }
+
+ InFlightRequestMap offlineReqs;
+ // Verify inflight requests and their pending buffers
+ {
+ std::lock_guard<std::mutex> l(mInFlightLock);
+ for (auto offlineReq : offlineSessionInfo.offlineRequests) {
+ int idx = mInFlightMap.indexOfKey(offlineReq.frameNumber);
+ if (idx == NAME_NOT_FOUND) {
+ SET_ERR("Offline request frame number %d not found!", offlineReq.frameNumber);
+ return UNKNOWN_ERROR;
+ }
+
+ const auto& inflightReq = mInFlightMap.valueAt(idx);
+ // TODO: check specific stream IDs
+ size_t numBuffersLeft = static_cast<size_t>(inflightReq.numBuffersLeft);
+ if (numBuffersLeft != offlineReq.pendingStreams.size()) {
+ SET_ERR("Offline request # of remaining buffer mismatch: (%d,%d) (service/HAL)",
+ inflightReq.numBuffersLeft, offlineReq.pendingStreams.size());
+ return UNKNOWN_ERROR;
+ }
+ offlineReqs.add(offlineReq.frameNumber, inflightReq);
+ }
+ }
+
+ // Create Camera3OfflineSession and transfer object ownership
// (streams, inflight requests, buffer caches)
- *session = new Camera3OfflineSession(mId);
+ camera3::StreamSet offlineStreamSet;
+ sp<camera3::Camera3Stream> inputStream;
+ for (auto offlineStream : offlineSessionInfo.offlineStreams) {
+ int32_t id = offlineStream.id;
+ if (mInputStream != nullptr && id == mInputStream->getId()) {
+ inputStream = mInputStream;
+ } else {
+ offlineStreamSet.add(id, mOutputStreams.get(id));
+ }
+ }
- // 4. Delete unneeded streams
- // 5. Verify Camera3Device is in unconfigured state
+ // TODO: check if we need to lock before copying states
+ // though technically no other thread should be talking to Camera3Device at this point
+ Camera3OfflineStates offlineStates(
+ mTagMonitor, mVendorTagId, mUseHalBufManager, mNeedFixupMonochromeTags,
+ mUsePartialResult, mNumPartialResults, mNextResultFrameNumber,
+ mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+ mNextShutterFrameNumber, mNextReprocessShutterFrameNumber,
+ mNextZslStillShutterFrameNumber, mDeviceInfo, mPhysicalDeviceInfoMap,
+ mDistortionMappers, mZoomRatioMappers);
+
+ *session = new Camera3OfflineSession(mId, inputStream, offlineStreamSet,
+ std::move(bufferRecords), offlineReqs, offlineStates, offlineSession);
+
+ // Delete all streams that has been transferred to offline session
+ Mutex::Autolock l(mLock);
+ for (auto offlineStream : offlineSessionInfo.offlineStreams) {
+ int32_t id = offlineStream.id;
+ if (mInputStream != nullptr && id == mInputStream->getId()) {
+ mInputStream.clear();
+ } else {
+ mOutputStreams.remove(id);
+ }
+ }
+
+ // disconnect all other streams and switch to UNCONFIGURED state
+ if (mInputStream != nullptr) {
+ ret = mInputStream->disconnect();
+ if (ret != OK) {
+ SET_ERR_L("disconnect input stream failed!");
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ for (auto streamId : mOutputStreams.getStreamIds()) {
+ sp<Camera3StreamInterface> stream = mOutputStreams.get(streamId);
+ ret = stream->disconnect();
+ if (ret != OK) {
+ SET_ERR_L("disconnect output stream %d failed!", streamId);
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ mInputStream.clear();
+ mOutputStreams.clear();
+ mNeedConfig = true;
+ internalUpdateStatusLocked(STATUS_UNCONFIGURED);
+ mOperatingMode = NO_MODE;
+ mIsConstrainedHighSpeedConfiguration = false;
+
return OK;
+ // TO be done by CameraDeviceClient/Camera3OfflineSession
+ // register the offline client to camera service
+ // Setup result passthing threads etc
+ // Initialize offline session so HAL can start sending callback to it (result Fmq)
+ // TODO: check how many onIdle callback will be sent
+ // Java side to make sure the CameraCaptureSession is properly closed
+}
+
+void Camera3Device::getOfflineStreamIds(std::vector<int> *offlineStreamIds) {
+ ATRACE_CALL();
+
+ if (offlineStreamIds == nullptr) {
+ return;
+ }
+
+ Mutex::Autolock il(mInterfaceLock);
+
+ auto streamIds = mOutputStreams.getStreamIds();
+ bool hasInputStream = mInputStream != nullptr;
+ if (hasInputStream && mInputStream->getOfflineProcessingSupport()) {
+ offlineStreamIds->push_back(mInputStream->getId());
+ }
+
+ for (const auto & streamId : streamIds) {
+ sp<camera3::Camera3OutputStreamInterface> stream = mOutputStreams.get(streamId);
+ // Streams that use the camera buffer manager are currently not supported in
+ // offline mode
+ if (stream->getOfflineProcessingSupport() &&
+ (stream->getStreamSetId() == CAMERA3_STREAM_SET_ID_INVALID)) {
+ offlineStreamIds->push_back(streamId);
+ }
+ }
}
}; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 5faabd1..7279baf 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -43,10 +43,14 @@
#include <camera/CaptureResult.h>
#include "common/CameraDeviceBase.h"
+#include "device3/BufferUtils.h"
#include "device3/StatusTracker.h"
#include "device3/Camera3BufferManager.h"
#include "device3/DistortionMapper.h"
#include "device3/ZoomRatioMapper.h"
+#include "device3/InFlightRequest.h"
+#include "device3/Camera3OutputInterface.h"
+#include "device3/Camera3OfflineSession.h"
#include "utils/TagMonitor.h"
#include "utils/LatencyHistogram.h"
#include <camera_metadata_hidden.h>
@@ -69,7 +73,11 @@
*/
class Camera3Device :
public CameraDeviceBase,
- virtual public hardware::camera::device::V3_5::ICameraDeviceCallback {
+ virtual public hardware::camera::device::V3_5::ICameraDeviceCallback,
+ public camera3::SetErrorInterface,
+ public camera3::InflightRequestUpdateInterface,
+ public camera3::RequestBufferInterface,
+ public camera3::FlushBufferInterface {
public:
explicit Camera3Device(const String8& id);
@@ -142,6 +150,8 @@
status_t getInputBufferProducer(
sp<IGraphicBufferProducer> *producer) override;
+ void getOfflineStreamIds(std::vector<int> *offlineStreamIds) override;
+
status_t createDefaultRequest(int templateId, CameraMetadata *request) override;
// Transitions to the idle state on success
@@ -201,6 +211,16 @@
status_t switchToOffline(const std::vector<int32_t>& streamsToKeep,
/*out*/ sp<CameraOfflineSessionBase>* session) override;
+ // RequestBufferInterface
+ bool startRequestBuffer() override;
+ void endRequestBuffer() override;
+ nsecs_t getWaitDuration() override;
+
+ // FlushBufferInterface
+ void getInflightBufferKeys(std::vector<std::pair<int32_t, int32_t>>* out) override;
+ void getInflightRequestBufferKeys(std::vector<uint64_t>* out) override;
+ std::vector<sp<camera3::Camera3StreamInterface>> getAllStreams() override;
+
/**
* Helper functions to map between framework and HIDL values
*/
@@ -213,8 +233,6 @@
// Returns a negative error code if the passed-in operation mode is not valid.
static status_t mapToStreamConfigurationMode(camera3_stream_configuration_mode_t operationMode,
/*out*/ hardware::camera::device::V3_2::StreamConfigurationMode *mode);
- static camera3_buffer_status_t mapHidlBufferStatus(
- hardware::camera::device::V3_2::BufferStatus status);
static int mapToFrameworkFormat(hardware::graphics::common::V1_0::PixelFormat pixelFormat);
static android_dataspace mapToFrameworkDataspace(
hardware::camera::device::V3_2::DataspaceFlags);
@@ -229,7 +247,6 @@
// internal typedefs
using RequestMetadataQueue = hardware::MessageQueue<uint8_t, hardware::kSynchronizedReadWrite>;
- using ResultMetadataQueue = hardware::MessageQueue<uint8_t, hardware::kSynchronizedReadWrite>;
static const size_t kDumpLockAttempts = 10;
static const size_t kDumpSleepDuration = 100000; // 0.10 sec
@@ -283,7 +300,8 @@
* Adapter for legacy HAL / HIDL HAL interface calls; calls either into legacy HALv3 or the
* HIDL HALv3 interfaces.
*/
- class HalInterface : public camera3::Camera3StreamBufferFreedListener {
+ class HalInterface : public camera3::Camera3StreamBufferFreedListener,
+ public camera3::BufferRecordsInterface {
public:
HalInterface(sp<hardware::camera::device::V3_2::ICameraDeviceSession> &session,
std::shared_ptr<RequestMetadataQueue> queue,
@@ -321,27 +339,31 @@
bool isReconfigurationRequired(CameraMetadata& oldSessionParams,
CameraMetadata& newSessionParams);
+ // Upon successful return, HalInterface will return buffer maps needed for offline
+ // processing, and clear all its internal buffer maps.
status_t switchToOffline(
const std::vector<int32_t>& streamsToKeep,
/*out*/hardware::camera::device::V3_6::CameraOfflineSessionInfo* offlineSessionInfo,
- /*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession);
+ /*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession,
+ /*out*/camera3::BufferRecords* bufferRecords);
- // method to extract buffer's unique ID
- // return pair of (newlySeenBuffer?, bufferId)
- std::pair<bool, uint64_t> getBufferId(const buffer_handle_t& buf, int streamId);
+ /////////////////////////////////////////////////////////////////////
+ // Implements BufferRecordsInterface
- // Find a buffer_handle_t based on frame number and stream ID
+ std::pair<bool, uint64_t> getBufferId(
+ const buffer_handle_t& buf, int streamId) override;
+
status_t popInflightBuffer(int32_t frameNumber, int32_t streamId,
- /*out*/ buffer_handle_t **buffer);
+ /*out*/ buffer_handle_t **buffer) override;
- // Register a bufId (streamId, buffer_handle_t) to inflight request buffer
status_t pushInflightRequestBuffer(
- uint64_t bufferId, buffer_handle_t* buf, int32_t streamId);
+ uint64_t bufferId, buffer_handle_t* buf, int32_t streamId) override;
- // Find a buffer_handle_t based on bufferId
status_t popInflightRequestBuffer(uint64_t bufferId,
/*out*/ buffer_handle_t** buffer,
- /*optional out*/ int32_t* streamId = nullptr);
+ /*optional out*/ int32_t* streamId = nullptr) override;
+
+ /////////////////////////////////////////////////////////////////////
// Get a vector of (frameNumber, streamId) pair of currently inflight
// buffers
@@ -352,7 +374,6 @@
void onStreamReConfigured(int streamId);
- static const uint64_t BUFFER_ID_NO_BUFFER = 0;
private:
// Always valid
sp<hardware::camera::device::V3_2::ICameraDeviceSession> mHidlSession;
@@ -367,8 +388,6 @@
std::shared_ptr<RequestMetadataQueue> mRequestMetadataQueue;
- std::mutex mInflightLock;
-
// The output HIDL request still depends on input camera3_capture_request_t
// Do not free input camera3_capture_request_t before output HIDL request
status_t wrapAsHidlRequest(camera3_capture_request_t* in,
@@ -382,55 +401,20 @@
// Pop inflight buffers based on pairs of (frameNumber,streamId)
void popInflightBuffers(const std::vector<std::pair<int32_t, int32_t>>& buffers);
- // Cache of buffer handles keyed off (frameNumber << 32 | streamId)
- std::unordered_map<uint64_t, buffer_handle_t*> mInflightBufferMap;
+ // Return true if the input caches match what we have; otherwise false
+ bool verifyBufferIds(int32_t streamId, std::vector<uint64_t>& inBufIds);
// Delete and optionally close native handles and clear the input vector afterward
static void cleanupNativeHandles(
std::vector<native_handle_t*> *handles, bool closeFd = false);
- struct BufferHasher {
- size_t operator()(const buffer_handle_t& buf) const {
- if (buf == nullptr)
- return 0;
-
- size_t result = 1;
- result = 31 * result + buf->numFds;
- for (int i = 0; i < buf->numFds; i++) {
- result = 31 * result + buf->data[i];
- }
- return result;
- }
- };
-
- struct BufferComparator {
- bool operator()(const buffer_handle_t& buf1, const buffer_handle_t& buf2) const {
- if (buf1->numFds == buf2->numFds) {
- for (int i = 0; i < buf1->numFds; i++) {
- if (buf1->data[i] != buf2->data[i]) {
- return false;
- }
- }
- return true;
- }
- return false;
- }
- };
-
- std::mutex mBufferIdMapLock; // protecting mBufferIdMaps and mNextBufferId
- typedef std::unordered_map<const buffer_handle_t, uint64_t,
- BufferHasher, BufferComparator> BufferIdMap;
- // stream ID -> per stream buffer ID map
- std::unordered_map<int, BufferIdMap> mBufferIdMaps;
- uint64_t mNextBufferId = 1; // 0 means no buffer
-
virtual void onBufferFreed(int streamId, const native_handle_t* handle) override;
+ std::mutex mFreedBuffersLock;
std::vector<std::pair<int, uint64_t>> mFreedBuffers;
- // Buffers given to HAL through requestStreamBuffer API
- std::mutex mRequestedBuffersLock;
- std::unordered_map<uint64_t, std::pair<int32_t, buffer_handle_t*>> mRequestedBuffers;
+ // Keep track of buffer cache and inflight buffer records
+ camera3::BufferRecords mBufferRecords;
uint32_t mNextStreamConfigCounter = 1;
@@ -473,24 +457,7 @@
// Tracking cause of fatal errors when in STATUS_ERROR
String8 mErrorCause;
- // Synchronized mapping of stream IDs to stream instances
- class StreamSet {
- public:
- status_t add(int streamId, sp<camera3::Camera3OutputStreamInterface>);
- ssize_t remove(int streamId);
- sp<camera3::Camera3OutputStreamInterface> get(int streamId);
- // get by (underlying) vector index
- sp<camera3::Camera3OutputStreamInterface> operator[] (size_t index);
- size_t size() const;
- std::vector<int> getStreamIds();
- void clear();
-
- private:
- mutable std::mutex mLock;
- KeyedVector<int, sp<camera3::Camera3OutputStreamInterface>> mData;
- };
-
- StreamSet mOutputStreams;
+ camera3::StreamSet mOutputStreams;
sp<camera3::Camera3Stream> mInputStream;
int mNextStreamId;
bool mNeedConfig;
@@ -579,15 +546,6 @@
const hardware::hidl_vec<
hardware::camera::device::V3_2::StreamBuffer>& buffers) override;
- // Handle one capture result. Assume that mProcessCaptureResultLock is held.
- void processOneCaptureResultLocked(
- const hardware::camera::device::V3_2::CaptureResult& result,
- const hardware::hidl_vec<
- hardware::camera::device::V3_4::PhysicalCameraMetadata> physicalCameraMetadata);
- status_t readOneCameraMetadataLocked(uint64_t fmqResultSize,
- hardware::camera::device::V3_2::CameraMetadata& resultMetadata,
- const hardware::camera::device::V3_2::CameraMetadata& result);
-
// Handle one notify message
void notify(const hardware::camera::device::V3_2::NotifyMsg& msg);
@@ -710,11 +668,20 @@
* error message to indicate why. Only the first call's message will be
* used. The message is also sent to the log.
*/
- void setErrorState(const char *fmt, ...);
+ void setErrorState(const char *fmt, ...) override;
+ void setErrorStateLocked(const char *fmt, ...) override;
void setErrorStateV(const char *fmt, va_list args);
- void setErrorStateLocked(const char *fmt, ...);
void setErrorStateLockedV(const char *fmt, va_list args);
+ /////////////////////////////////////////////////////////////////////
+ // Implements InflightRequestUpdateInterface
+
+ void onInflightEntryRemovedLocked(nsecs_t duration) override;
+ void checkInflightMapLengthLocked() override;
+ void onInflightMapFlushedLocked() override;
+
+ /////////////////////////////////////////////////////////////////////
+
/**
* Debugging trylock/spin method
* Try to acquire a lock a few times with sleeps between before giving up.
@@ -722,12 +689,6 @@
bool tryLockSpinRightRound(Mutex& lock);
/**
- * Helper function to determine if an input size for implementation defined
- * format is supported.
- */
- bool isOpaqueInputSizeSupported(uint32_t width, uint32_t height);
-
- /**
* Helper function to get the largest Jpeg resolution (in area)
* Return Size(0, 0) if static metatdata is invalid
*/
@@ -860,7 +821,8 @@
status_t switchToOffline(
const std::vector<int32_t>& streamsToKeep,
/*out*/hardware::camera::device::V3_6::CameraOfflineSessionInfo* offlineSessionInfo,
- /*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession);
+ /*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession,
+ /*out*/camera3::BufferRecords* bufferRecords);
protected:
@@ -1021,119 +983,12 @@
/**
* In-flight queue for tracking completion of capture requests.
*/
+ std::mutex mInFlightLock;
+ camera3::InFlightRequestMap mInFlightMap;
+ nsecs_t mExpectedInflightDuration = 0;
+ // End of mInFlightLock protection scope
- struct InFlightRequest {
- // Set by notify() SHUTTER call.
- nsecs_t shutterTimestamp;
- // Set by process_capture_result().
- nsecs_t sensorTimestamp;
- int requestStatus;
- // Set by process_capture_result call with valid metadata
- bool haveResultMetadata;
- // Decremented by calls to process_capture_result with valid output
- // and input buffers
- int numBuffersLeft;
- CaptureResultExtras resultExtras;
- // If this request has any input buffer
- bool hasInputBuffer;
-
- // The last metadata that framework receives from HAL and
- // not yet send out because the shutter event hasn't arrived.
- // It's added by process_capture_result and sent when framework
- // receives the shutter event.
- CameraMetadata pendingMetadata;
-
- // The metadata of the partial results that framework receives from HAL so far
- // and has sent out.
- CameraMetadata collectedPartialResult;
-
- // Buffers are added by process_capture_result when output buffers
- // return from HAL but framework has not yet received the shutter
- // event. They will be returned to the streams when framework receives
- // the shutter event.
- Vector<camera3_stream_buffer_t> pendingOutputBuffers;
-
- // Whether this inflight request's shutter and result callback are to be
- // called. The policy is that if the request is the last one in the constrained
- // high speed recording request list, this flag will be true. If the request list
- // is not for constrained high speed recording, this flag will also be true.
- bool hasCallback;
-
- // Maximum expected frame duration for this request.
- // For manual captures, equal to the max of requested exposure time and frame duration
- // For auto-exposure modes, equal to 1/(lower end of target FPS range)
- nsecs_t maxExpectedDuration;
-
- // Whether the result metadata for this request is to be skipped. The
- // result metadata should be skipped in the case of
- // REQUEST/RESULT error.
- bool skipResultMetadata;
-
- // The physical camera ids being requested.
- std::set<String8> physicalCameraIds;
-
- // Map of physicalCameraId <-> Metadata
- std::vector<PhysicalCaptureResultInfo> physicalMetadatas;
-
- // Indicates a still capture request.
- bool stillCapture;
-
- // Indicates a ZSL capture request
- bool zslCapture;
-
- // Requested camera ids (both logical and physical) with zoomRatio != 1.0f
- std::set<std::string> cameraIdsWithZoom;
-
- // What shared surfaces an output should go to
- SurfaceMap outputSurfaces;
-
- // Default constructor needed by KeyedVector
- InFlightRequest() :
- shutterTimestamp(0),
- sensorTimestamp(0),
- requestStatus(OK),
- haveResultMetadata(false),
- numBuffersLeft(0),
- hasInputBuffer(false),
- hasCallback(true),
- maxExpectedDuration(kDefaultExpectedDuration),
- skipResultMetadata(false),
- stillCapture(false),
- zslCapture(false) {
- }
-
- InFlightRequest(int numBuffers, CaptureResultExtras extras, bool hasInput,
- bool hasAppCallback, nsecs_t maxDuration,
- const std::set<String8>& physicalCameraIdSet, bool isStillCapture,
- bool isZslCapture, const std::set<std::string>& idsWithZoom,
- const SurfaceMap& outSurfaces = SurfaceMap{}) :
- shutterTimestamp(0),
- sensorTimestamp(0),
- requestStatus(OK),
- haveResultMetadata(false),
- numBuffersLeft(numBuffers),
- resultExtras(extras),
- hasInputBuffer(hasInput),
- hasCallback(hasAppCallback),
- maxExpectedDuration(maxDuration),
- skipResultMetadata(false),
- physicalCameraIds(physicalCameraIdSet),
- stillCapture(isStillCapture),
- zslCapture(isZslCapture),
- cameraIdsWithZoom(idsWithZoom),
- outputSurfaces(outSurfaces) {
- }
- };
-
- // Map from frame number to the in-flight request state
- typedef KeyedVector<uint32_t, InFlightRequest> InFlightMap;
-
-
- Mutex mInFlightLock; // Protects mInFlightMap and
- // mExpectedInflightDuration
- InFlightMap mInFlightMap;
- nsecs_t mExpectedInflightDuration = 0;
- int mInFlightStatusId;
+ int mInFlightStatusId; // const after initialize
status_t registerInFlight(uint32_t frameNumber,
int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
@@ -1211,7 +1066,7 @@
*/
// Lock for output side of device
- Mutex mOutputLock;
+ std::mutex mOutputLock;
/**** Scope for mOutputLock ****/
// the minimal frame number of the next non-reprocess result
@@ -1226,61 +1081,18 @@
uint32_t mNextReprocessShutterFrameNumber;
// the minimal frame number of the next ZSL still capture shutter
uint32_t mNextZslStillShutterFrameNumber;
- List<CaptureResult> mResultQueue;
- Condition mResultSignal;
- wp<NotificationListener> mListener;
+ List<CaptureResult> mResultQueue;
+ std::condition_variable mResultSignal;
+ wp<NotificationListener> mListener;
/**** End scope for mOutputLock ****/
- /**
- * Callback functions from HAL device
- */
- void processCaptureResult(const camera3_capture_result *result);
-
- void notify(const camera3_notify_msg *msg);
-
- // Specific notify handlers
- void notifyError(const camera3_error_msg_t &msg,
- sp<NotificationListener> listener);
- void notifyShutter(const camera3_shutter_msg_t &msg,
- sp<NotificationListener> listener);
-
- // helper function to return the output buffers to the streams.
- void returnOutputBuffers(const camera3_stream_buffer_t *outputBuffers,
- size_t numBuffers, nsecs_t timestamp, bool timestampIncreasing = true,
- // The following arguments are only meant for surface sharing use case
- const SurfaceMap& outputSurfaces = SurfaceMap{},
- // Used to send buffer error callback when failing to return buffer
- const CaptureResultExtras &resultExtras = CaptureResultExtras{});
-
- // Send a partial capture result.
- void sendPartialCaptureResult(const camera_metadata_t * partialResult,
- const CaptureResultExtras &resultExtras, uint32_t frameNumber);
-
- // Send a total capture result given the pending metadata and result extras,
- // partial results, and the frame number to the result queue.
- void sendCaptureResult(CameraMetadata &pendingMetadata,
- CaptureResultExtras &resultExtras,
- CameraMetadata &collectedPartialResult, uint32_t frameNumber,
- bool reprocess, bool zslStillCapture,
- const std::set<std::string>& cameraIdsWithZoom,
- const std::vector<PhysicalCaptureResultInfo>& physicalMetadatas);
-
- bool isLastFullResult(const InFlightRequest& inFlightRequest);
-
- // Insert the result to the result queue after updating frame number and overriding AE
- // trigger cancel.
- // mOutputLock must be held when calling this function.
- void insertResultLocked(CaptureResult *result, uint32_t frameNumber);
-
/**** Scope for mInFlightLock ****/
// Remove the in-flight map entry of the given index from mInFlightMap.
// It must only be called with mInFlightLock held.
void removeInFlightMapEntryLocked(int idx);
- // Remove the in-flight request of the given index from mInFlightMap
- // if it's no longer needed. It must only be called with mInFlightLock held.
- void removeInFlightRequestIfReadyLocked(int idx);
+
// Remove all in-flight requests and return all buffers.
// This is used after HAL interface is closed to cleanup any request/buffers
// not returned by HAL.
@@ -1378,6 +1190,10 @@
void onSubmittingRequest();
void onRequestThreadPaused();
+ // Events triggered by successful switchToOffline call
+ // Return true is there is no ongoing requestBuffer call.
+ bool onSwitchToOfflineSuccess();
+
private:
void notifyTrackerLocked(bool active);
@@ -1391,6 +1207,7 @@
bool mRequestThreadPaused = true;
bool mInflightMapEmpty = true;
bool mRequestBufferOngoing = false;
+ bool mSwitchedToOffline = false;
wp<camera3::StatusTracker> mStatusTracker;
int mRequestBufferStatusId;
@@ -1398,7 +1215,6 @@
// Fix up result metadata for monochrome camera.
bool mNeedFixupMonochromeTags;
- status_t fixupMonochromeTags(const CameraMetadata& deviceInfo, CameraMetadata& resultMetadata);
// Whether HAL supports offline processing capability.
bool mSupportOfflineProcessing = false;
diff --git a/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp b/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp
index dc5cbb3..0f05632 100644
--- a/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp
@@ -29,90 +29,435 @@
#include <utils/Trace.h>
+#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
+
#include "device3/Camera3OfflineSession.h"
#include "device3/Camera3OutputStream.h"
#include "device3/Camera3InputStream.h"
#include "device3/Camera3SharedOutputStream.h"
+#include "utils/CameraTraces.h"
using namespace android::camera3;
using namespace android::hardware::camera;
namespace android {
-Camera3OfflineSession::Camera3OfflineSession(const String8 &id):
- mId(id)
-{
+Camera3OfflineSession::Camera3OfflineSession(const String8 &id,
+ const sp<camera3::Camera3Stream>& inputStream,
+ const camera3::StreamSet& offlineStreamSet,
+ camera3::BufferRecords&& bufferRecords,
+ const camera3::InFlightRequestMap& offlineReqs,
+ const Camera3OfflineStates& offlineStates,
+ sp<hardware::camera::device::V3_6::ICameraOfflineSession> offlineSession) :
+ mId(id),
+ mInputStream(inputStream),
+ mOutputStreams(offlineStreamSet),
+ mBufferRecords(std::move(bufferRecords)),
+ mOfflineReqs(offlineReqs),
+ mSession(offlineSession),
+ mTagMonitor(offlineStates.mTagMonitor),
+ mVendorTagId(offlineStates.mVendorTagId),
+ mUseHalBufManager(offlineStates.mUseHalBufManager),
+ mNeedFixupMonochromeTags(offlineStates.mNeedFixupMonochromeTags),
+ mUsePartialResult(offlineStates.mUsePartialResult),
+ mNumPartialResults(offlineStates.mNumPartialResults),
+ mNextResultFrameNumber(offlineStates.mNextResultFrameNumber),
+ mNextReprocessResultFrameNumber(offlineStates.mNextReprocessResultFrameNumber),
+ mNextZslStillResultFrameNumber(offlineStates.mNextZslStillResultFrameNumber),
+ mNextShutterFrameNumber(offlineStates.mNextShutterFrameNumber),
+ mNextReprocessShutterFrameNumber(offlineStates.mNextReprocessShutterFrameNumber),
+ mNextZslStillShutterFrameNumber(offlineStates.mNextZslStillShutterFrameNumber),
+ mDeviceInfo(offlineStates.mDeviceInfo),
+ mPhysicalDeviceInfoMap(offlineStates.mPhysicalDeviceInfoMap),
+ mDistortionMappers(offlineStates.mDistortionMappers),
+ mZoomRatioMappers(offlineStates.mZoomRatioMappers),
+ mStatus(STATUS_UNINITIALIZED) {
ATRACE_CALL();
ALOGV("%s: Created offline session for camera %s", __FUNCTION__, mId.string());
}
-Camera3OfflineSession::~Camera3OfflineSession()
-{
+Camera3OfflineSession::~Camera3OfflineSession() {
ATRACE_CALL();
ALOGV("%s: Tearing down offline session for camera id %s", __FUNCTION__, mId.string());
+ disconnectImpl();
}
const String8& Camera3OfflineSession::getId() const {
return mId;
}
-status_t Camera3OfflineSession::initialize(
- sp<hardware::camera::device::V3_6::ICameraOfflineSession> /*hidlSession*/) {
+status_t Camera3OfflineSession::initialize(wp<NotificationListener> listener) {
ATRACE_CALL();
+
+ if (mSession == nullptr) {
+ ALOGE("%s: HIDL session is null!", __FUNCTION__);
+ return DEAD_OBJECT;
+ }
+
+ {
+ std::lock_guard<std::mutex> lock(mLock);
+
+ mListener = listener;
+
+ // setup result FMQ
+ std::unique_ptr<ResultMetadataQueue>& resQueue = mResultMetadataQueue;
+ auto resultQueueRet = mSession->getCaptureResultMetadataQueue(
+ [&resQueue](const auto& descriptor) {
+ resQueue = std::make_unique<ResultMetadataQueue>(descriptor);
+ if (!resQueue->isValid() || resQueue->availableToWrite() <= 0) {
+ ALOGE("HAL returns empty result metadata fmq, not use it");
+ resQueue = nullptr;
+ // Don't use resQueue onwards.
+ }
+ });
+ if (!resultQueueRet.isOk()) {
+ ALOGE("Transaction error when getting result metadata queue from camera session: %s",
+ resultQueueRet.description().c_str());
+ return DEAD_OBJECT;
+ }
+ mStatus = STATUS_ACTIVE;
+ }
+
+ mSession->setCallback(this);
+
return OK;
}
status_t Camera3OfflineSession::dump(int /*fd*/) {
ATRACE_CALL();
- return OK;
-}
-
-status_t Camera3OfflineSession::abort() {
- ATRACE_CALL();
+ std::lock_guard<std::mutex> il(mInterfaceLock);
return OK;
}
status_t Camera3OfflineSession::disconnect() {
ATRACE_CALL();
+ return disconnectImpl();
+}
+
+status_t Camera3OfflineSession::disconnectImpl() {
+ ATRACE_CALL();
+ std::lock_guard<std::mutex> il(mInterfaceLock);
+
+ sp<NotificationListener> listener;
+ {
+ std::lock_guard<std::mutex> lock(mLock);
+ if (mStatus == STATUS_CLOSED) {
+ return OK; // don't close twice
+ } else if (mStatus == STATUS_ERROR) {
+ ALOGE("%s: offline session %s shutting down in error state",
+ __FUNCTION__, mId.string());
+ }
+ listener = mListener.promote();
+ }
+
+ ALOGV("%s: E", __FUNCTION__);
+
+ {
+ std::lock_guard<std::mutex> lock(mRequestBufferInterfaceLock);
+ mAllowRequestBuffer = false;
+ }
+
+ std::vector<wp<Camera3StreamInterface>> streams;
+ streams.reserve(mOutputStreams.size() + (mInputStream != nullptr ? 1 : 0));
+ for (size_t i = 0; i < mOutputStreams.size(); i++) {
+ streams.push_back(mOutputStreams[i]);
+ }
+ if (mInputStream != nullptr) {
+ streams.push_back(mInputStream);
+ }
+
+ mSession->close();
+
+ FlushInflightReqStates states {
+ mId, mOfflineReqsLock, mOfflineReqs, mUseHalBufManager,
+ listener, *this, mBufferRecords, *this};
+
+ camera3::flushInflightRequests(states);
+
+ {
+ std::lock_guard<std::mutex> lock(mLock);
+ mSession.clear();
+ mOutputStreams.clear();
+ mInputStream.clear();
+ mStatus = STATUS_CLOSED;
+ }
+
+ for (auto& weakStream : streams) {
+ sp<Camera3StreamInterface> stream = weakStream.promote();
+ if (stream != nullptr) {
+ ALOGE("%s: Stream %d leaked! strong reference (%d)!",
+ __FUNCTION__, stream->getId(), stream->getStrongCount() - 1);
+ }
+ }
+
+ ALOGV("%s: X", __FUNCTION__);
return OK;
}
-status_t Camera3OfflineSession::waitForNextFrame(nsecs_t /*timeout*/) {
+status_t Camera3OfflineSession::waitForNextFrame(nsecs_t timeout) {
ATRACE_CALL();
+ std::unique_lock<std::mutex> lk(mOutputLock);
+
+ while (mResultQueue.empty()) {
+ auto st = mResultSignal.wait_for(lk, std::chrono::nanoseconds(timeout));
+ if (st == std::cv_status::timeout) {
+ return TIMED_OUT;
+ }
+ }
return OK;
}
-status_t Camera3OfflineSession::getNextResult(CaptureResult* /*frame*/) {
+status_t Camera3OfflineSession::getNextResult(CaptureResult* frame) {
ATRACE_CALL();
+ std::lock_guard<std::mutex> l(mOutputLock);
+
+ if (mResultQueue.empty()) {
+ return NOT_ENOUGH_DATA;
+ }
+
+ if (frame == nullptr) {
+ ALOGE("%s: argument cannot be NULL", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ CaptureResult &result = *(mResultQueue.begin());
+ frame->mResultExtras = result.mResultExtras;
+ frame->mMetadata.acquire(result.mMetadata);
+ frame->mPhysicalMetadatas = std::move(result.mPhysicalMetadatas);
+ mResultQueue.erase(mResultQueue.begin());
+
return OK;
}
hardware::Return<void> Camera3OfflineSession::processCaptureResult_3_4(
const hardware::hidl_vec<
- hardware::camera::device::V3_4::CaptureResult>& /*results*/) {
+ hardware::camera::device::V3_4::CaptureResult>& results) {
+ sp<NotificationListener> listener;
+ {
+ std::lock_guard<std::mutex> lock(mLock);
+ if (mStatus != STATUS_ACTIVE) {
+ ALOGE("%s called in wrong state %d", __FUNCTION__, mStatus);
+ return hardware::Void();
+ }
+ listener = mListener.promote();
+ }
+
+ CaptureOutputStates states {
+ mId,
+ mOfflineReqsLock, mOfflineReqs,
+ mOutputLock, mResultQueue, mResultSignal,
+ mNextShutterFrameNumber,
+ mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
+ mNextResultFrameNumber,
+ mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+ mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+ mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
+ mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mTagMonitor,
+ mInputStream, mOutputStreams, listener, *this, *this, mBufferRecords
+ };
+
+ std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
+ for (const auto& result : results) {
+ processOneCaptureResultLocked(states, result.v3_2, result.physicalCameraMetadata);
+ }
return hardware::Void();
}
hardware::Return<void> Camera3OfflineSession::processCaptureResult(
const hardware::hidl_vec<
- hardware::camera::device::V3_2::CaptureResult>& /*results*/) {
+ hardware::camera::device::V3_2::CaptureResult>& results) {
+ // TODO: changed impl to call into processCaptureResult_3_4 instead?
+ // might need to figure how to reduce copy though.
+ sp<NotificationListener> listener;
+ {
+ std::lock_guard<std::mutex> lock(mLock);
+ if (mStatus != STATUS_ACTIVE) {
+ ALOGE("%s called in wrong state %d", __FUNCTION__, mStatus);
+ return hardware::Void();
+ }
+ listener = mListener.promote();
+ }
+
+ hardware::hidl_vec<hardware::camera::device::V3_4::PhysicalCameraMetadata> noPhysMetadata;
+
+ CaptureOutputStates states {
+ mId,
+ mOfflineReqsLock, mOfflineReqs,
+ mOutputLock, mResultQueue, mResultSignal,
+ mNextShutterFrameNumber,
+ mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
+ mNextResultFrameNumber,
+ mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+ mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+ mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
+ mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mTagMonitor,
+ mInputStream, mOutputStreams, listener, *this, *this, mBufferRecords
+ };
+
+ std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
+ for (const auto& result : results) {
+ processOneCaptureResultLocked(states, result, noPhysMetadata);
+ }
return hardware::Void();
}
hardware::Return<void> Camera3OfflineSession::notify(
- const hardware::hidl_vec<hardware::camera::device::V3_2::NotifyMsg>& /*msgs*/) {
+ const hardware::hidl_vec<hardware::camera::device::V3_2::NotifyMsg>& msgs) {
+ sp<NotificationListener> listener;
+ {
+ std::lock_guard<std::mutex> lock(mLock);
+ if (mStatus != STATUS_ACTIVE) {
+ ALOGE("%s called in wrong state %d", __FUNCTION__, mStatus);
+ return hardware::Void();
+ }
+ listener = mListener.promote();
+ }
+
+ CaptureOutputStates states {
+ mId,
+ mOfflineReqsLock, mOfflineReqs,
+ mOutputLock, mResultQueue, mResultSignal,
+ mNextShutterFrameNumber,
+ mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
+ mNextResultFrameNumber,
+ mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+ mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+ mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
+ mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mTagMonitor,
+ mInputStream, mOutputStreams, listener, *this, *this, mBufferRecords
+ };
+ for (const auto& msg : msgs) {
+ camera3::notify(states, msg);
+ }
return hardware::Void();
}
hardware::Return<void> Camera3OfflineSession::requestStreamBuffers(
- const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& /*bufReqs*/,
- requestStreamBuffers_cb /*_hidl_cb*/) {
+ const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& bufReqs,
+ requestStreamBuffers_cb _hidl_cb) {
+ {
+ std::lock_guard<std::mutex> lock(mLock);
+ if (mStatus != STATUS_ACTIVE) {
+ ALOGE("%s called in wrong state %d", __FUNCTION__, mStatus);
+ return hardware::Void();
+ }
+ }
+
+ RequestBufferStates states {
+ mId, mRequestBufferInterfaceLock, mUseHalBufManager, mOutputStreams,
+ *this, mBufferRecords, *this};
+ camera3::requestStreamBuffers(states, bufReqs, _hidl_cb);
return hardware::Void();
}
hardware::Return<void> Camera3OfflineSession::returnStreamBuffers(
- const hardware::hidl_vec<hardware::camera::device::V3_2::StreamBuffer>& /*buffers*/) {
+ const hardware::hidl_vec<hardware::camera::device::V3_2::StreamBuffer>& buffers) {
+ {
+ std::lock_guard<std::mutex> lock(mLock);
+ if (mStatus != STATUS_ACTIVE) {
+ ALOGE("%s called in wrong state %d", __FUNCTION__, mStatus);
+ return hardware::Void();
+ }
+ }
+
+ ReturnBufferStates states {
+ mId, mUseHalBufManager, mOutputStreams, mBufferRecords};
+ camera3::returnStreamBuffers(states, buffers);
return hardware::Void();
}
+void Camera3OfflineSession::setErrorState(const char *fmt, ...) {
+ ATRACE_CALL();
+ std::lock_guard<std::mutex> lock(mLock);
+ va_list args;
+ va_start(args, fmt);
+
+ setErrorStateLockedV(fmt, args);
+
+ va_end(args);
+
+ //FIXME: automatically disconnect here?
+}
+
+void Camera3OfflineSession::setErrorStateLocked(const char *fmt, ...) {
+ va_list args;
+ va_start(args, fmt);
+
+ setErrorStateLockedV(fmt, args);
+
+ va_end(args);
+}
+
+void Camera3OfflineSession::setErrorStateLockedV(const char *fmt, va_list args) {
+ // Print out all error messages to log
+ String8 errorCause = String8::formatV(fmt, args);
+ ALOGE("Camera %s: %s", mId.string(), errorCause.string());
+
+ // But only do error state transition steps for the first error
+ if (mStatus == STATUS_ERROR || mStatus == STATUS_UNINITIALIZED) return;
+
+ mErrorCause = errorCause;
+
+ mStatus = STATUS_ERROR;
+
+ // Notify upstream about a device error
+ sp<NotificationListener> listener = mListener.promote();
+ if (listener != NULL) {
+ listener->notifyError(hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE,
+ CaptureResultExtras());
+ }
+
+ // Save stack trace. View by dumping it later.
+ CameraTraces::saveTrace();
+}
+
+void Camera3OfflineSession::onInflightEntryRemovedLocked(nsecs_t /*duration*/) {
+ if (mOfflineReqs.size() == 0) {
+ std::lock_guard<std::mutex> lock(mRequestBufferInterfaceLock);
+ mAllowRequestBuffer = false;
+ }
+}
+
+void Camera3OfflineSession::checkInflightMapLengthLocked() {
+ // Intentional empty impl.
+}
+
+void Camera3OfflineSession::onInflightMapFlushedLocked() {
+ // Intentional empty impl.
+}
+
+bool Camera3OfflineSession::startRequestBuffer() {
+ return mAllowRequestBuffer;
+}
+
+void Camera3OfflineSession::endRequestBuffer() {
+ // Intentional empty impl.
+}
+
+nsecs_t Camera3OfflineSession::getWaitDuration() {
+ const nsecs_t kBaseGetBufferWait = 3000000000; // 3 sec.
+ return kBaseGetBufferWait;
+}
+
+void Camera3OfflineSession::getInflightBufferKeys(std::vector<std::pair<int32_t, int32_t>>* out) {
+ mBufferRecords.getInflightBufferKeys(out);
+}
+
+void Camera3OfflineSession::getInflightRequestBufferKeys(std::vector<uint64_t>* out) {
+ mBufferRecords.getInflightRequestBufferKeys(out);
+}
+
+std::vector<sp<Camera3StreamInterface>> Camera3OfflineSession::getAllStreams() {
+ std::vector<sp<Camera3StreamInterface>> ret;
+ bool hasInputStream = mInputStream != nullptr;
+ ret.reserve(mOutputStreams.size() + ((hasInputStream) ? 1 : 0));
+ if (hasInputStream) {
+ ret.push_back(mInputStream);
+ }
+ for (size_t i = 0; i < mOutputStreams.size(); i++) {
+ ret.push_back(mOutputStreams[i]);
+ }
+ return ret;
+}
+
}; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3OfflineSession.h b/services/camera/libcameraservice/device3/Camera3OfflineSession.h
index 30e8c4f..b6b8a7d 100644
--- a/services/camera/libcameraservice/device3/Camera3OfflineSession.h
+++ b/services/camera/libcameraservice/device3/Camera3OfflineSession.h
@@ -17,16 +17,23 @@
#ifndef ANDROID_SERVERS_CAMERA3OFFLINESESSION_H
#define ANDROID_SERVERS_CAMERA3OFFLINESESSION_H
+#include <memory>
+#include <mutex>
+
#include <utils/String8.h>
#include <utils/String16.h>
#include <android/hardware/camera/device/3.6/ICameraOfflineSession.h>
+
#include <fmq/MessageQueue.h>
#include "common/CameraOfflineSessionBase.h"
#include "device3/Camera3BufferManager.h"
#include "device3/DistortionMapper.h"
+#include "device3/InFlightRequest.h"
+#include "device3/Camera3OutputUtils.h"
+#include "device3/ZoomRatioMapper.h"
#include "utils/TagMonitor.h"
#include "utils/LatencyHistogram.h"
#include <camera_metadata_hidden.h>
@@ -41,26 +48,90 @@
} // namespace camera3
+
+// An immutable struct containing general states that will be copied from Camera3Device to
+// Camera3OfflineSession
+struct Camera3OfflineStates {
+ Camera3OfflineStates(
+ const TagMonitor& tagMonitor, const metadata_vendor_id_t vendorTagId,
+ const bool useHalBufManager, const bool needFixupMonochromeTags,
+ const bool usePartialResult, const uint32_t numPartialResults,
+ const uint32_t nextResultFN, const uint32_t nextReprocResultFN,
+ const uint32_t nextZslResultFN, const uint32_t nextShutterFN,
+ const uint32_t nextReprocShutterFN, const uint32_t nextZslShutterFN,
+ const CameraMetadata& deviceInfo,
+ const std::unordered_map<std::string, CameraMetadata>& physicalDeviceInfoMap,
+ const std::unordered_map<std::string, camera3::DistortionMapper>& distortionMappers,
+ const std::unordered_map<std::string, camera3::ZoomRatioMapper>& zoomRatioMappers) :
+ mTagMonitor(tagMonitor), mVendorTagId(vendorTagId),
+ mUseHalBufManager(useHalBufManager), mNeedFixupMonochromeTags(needFixupMonochromeTags),
+ mUsePartialResult(usePartialResult), mNumPartialResults(numPartialResults),
+ mNextResultFrameNumber(nextResultFN),
+ mNextReprocessResultFrameNumber(nextReprocResultFN),
+ mNextZslStillResultFrameNumber(nextZslResultFN),
+ mNextShutterFrameNumber(nextShutterFN),
+ mNextReprocessShutterFrameNumber(nextReprocShutterFN),
+ mNextZslStillShutterFrameNumber(nextZslShutterFN),
+ mDeviceInfo(deviceInfo),
+ mPhysicalDeviceInfoMap(physicalDeviceInfoMap),
+ mDistortionMappers(distortionMappers),
+ mZoomRatioMappers(zoomRatioMappers) {}
+
+ const TagMonitor& mTagMonitor;
+ const metadata_vendor_id_t mVendorTagId;
+
+ const bool mUseHalBufManager;
+ const bool mNeedFixupMonochromeTags;
+
+ const bool mUsePartialResult;
+ const uint32_t mNumPartialResults;
+
+ // the minimal frame number of the next non-reprocess result
+ const uint32_t mNextResultFrameNumber;
+ // the minimal frame number of the next reprocess result
+ const uint32_t mNextReprocessResultFrameNumber;
+ // the minimal frame number of the next ZSL still capture result
+ const uint32_t mNextZslStillResultFrameNumber;
+ // the minimal frame number of the next non-reprocess shutter
+ const uint32_t mNextShutterFrameNumber;
+ // the minimal frame number of the next reprocess shutter
+ const uint32_t mNextReprocessShutterFrameNumber;
+ // the minimal frame number of the next ZSL still capture shutter
+ const uint32_t mNextZslStillShutterFrameNumber;
+
+ const CameraMetadata& mDeviceInfo;
+
+ const std::unordered_map<std::string, CameraMetadata>& mPhysicalDeviceInfoMap;
+
+ const std::unordered_map<std::string, camera3::DistortionMapper>& mDistortionMappers;
+
+ const std::unordered_map<std::string, camera3::ZoomRatioMapper>& mZoomRatioMappers;
+};
+
/**
* Camera3OfflineSession for offline session defined in HIDL ICameraOfflineSession@3.6 or higher
*/
class Camera3OfflineSession :
public CameraOfflineSessionBase,
- virtual public hardware::camera::device::V3_5::ICameraDeviceCallback {
-
+ virtual public hardware::camera::device::V3_5::ICameraDeviceCallback,
+ public camera3::SetErrorInterface,
+ public camera3::InflightRequestUpdateInterface,
+ public camera3::RequestBufferInterface,
+ public camera3::FlushBufferInterface {
public:
- // initialize by Camera3Device. Camera3Device must send all info in separate argument.
- // monitored tags
- // mUseHalBufManager
- // mUsePartialResult
- // mNumPartialResults
- explicit Camera3OfflineSession(const String8& id);
+ // initialize by Camera3Device.
+ explicit Camera3OfflineSession(const String8& id,
+ const sp<camera3::Camera3Stream>& inputStream,
+ const camera3::StreamSet& offlineStreamSet,
+ camera3::BufferRecords&& bufferRecords,
+ const camera3::InFlightRequestMap& offlineReqs,
+ const Camera3OfflineStates& offlineStates,
+ sp<hardware::camera::device::V3_6::ICameraOfflineSession> offlineSession);
virtual ~Camera3OfflineSession();
- status_t initialize(
- sp<hardware::camera::device::V3_6::ICameraOfflineSession> hidlSession);
+ virtual status_t initialize(wp<NotificationListener> listener) override;
/**
* CameraOfflineSessionBase interface
@@ -71,8 +142,6 @@
status_t dump(int fd) override;
- status_t abort() override;
-
// methods for capture result passing
status_t waitForNextFrame(nsecs_t timeout) override;
status_t getNextResult(CaptureResult *frame) override;
@@ -115,10 +184,99 @@
*/
private:
-
// Camera device ID
const String8 mId;
+ sp<camera3::Camera3Stream> mInputStream;
+ camera3::StreamSet mOutputStreams;
+ camera3::BufferRecords mBufferRecords;
+ std::mutex mOfflineReqsLock;
+ camera3::InFlightRequestMap mOfflineReqs;
+
+ sp<hardware::camera::device::V3_6::ICameraOfflineSession> mSession;
+
+ TagMonitor mTagMonitor;
+ const metadata_vendor_id_t mVendorTagId;
+
+ const bool mUseHalBufManager;
+ const bool mNeedFixupMonochromeTags;
+
+ const bool mUsePartialResult;
+ const uint32_t mNumPartialResults;
+
+ std::mutex mOutputLock;
+ List<CaptureResult> mResultQueue;
+ std::condition_variable mResultSignal;
+ // the minimal frame number of the next non-reprocess result
+ uint32_t mNextResultFrameNumber;
+ // the minimal frame number of the next reprocess result
+ uint32_t mNextReprocessResultFrameNumber;
+ // the minimal frame number of the next ZSL still capture result
+ uint32_t mNextZslStillResultFrameNumber;
+ // the minimal frame number of the next non-reprocess shutter
+ uint32_t mNextShutterFrameNumber;
+ // the minimal frame number of the next reprocess shutter
+ uint32_t mNextReprocessShutterFrameNumber;
+ // the minimal frame number of the next ZSL still capture shutter
+ uint32_t mNextZslStillShutterFrameNumber;
+ // End of mOutputLock scope
+
+ const CameraMetadata mDeviceInfo;
+ std::unordered_map<std::string, CameraMetadata> mPhysicalDeviceInfoMap;
+
+ std::unordered_map<std::string, camera3::DistortionMapper> mDistortionMappers;
+
+ std::unordered_map<std::string, camera3::ZoomRatioMapper> mZoomRatioMappers;
+
+ mutable std::mutex mLock;
+
+ enum Status {
+ STATUS_UNINITIALIZED = 0,
+ STATUS_ACTIVE,
+ STATUS_ERROR,
+ STATUS_CLOSED
+ } mStatus;
+
+ wp<NotificationListener> mListener;
+ // End of mLock protect scope
+
+ std::mutex mProcessCaptureResultLock;
+ // FMQ to write result on. Must be guarded by mProcessCaptureResultLock.
+ std::unique_ptr<ResultMetadataQueue> mResultMetadataQueue;
+
+ // Tracking cause of fatal errors when in STATUS_ERROR
+ String8 mErrorCause;
+
+ // Lock to ensure requestStreamBuffers() callbacks are serialized
+ std::mutex mRequestBufferInterfaceLock;
+ // allow request buffer until all requests are processed or disconnectImpl is called
+ bool mAllowRequestBuffer = true;
+
+ // For client methods such as disconnect/dump
+ std::mutex mInterfaceLock;
+
+ // SetErrorInterface
+ void setErrorState(const char *fmt, ...) override;
+ void setErrorStateLocked(const char *fmt, ...) override;
+
+ // InflightRequestUpdateInterface
+ void onInflightEntryRemovedLocked(nsecs_t duration) override;
+ void checkInflightMapLengthLocked() override;
+ void onInflightMapFlushedLocked() override;
+
+ // RequestBufferInterface
+ bool startRequestBuffer() override;
+ void endRequestBuffer() override;
+ nsecs_t getWaitDuration() override;
+
+ // FlushBufferInterface
+ void getInflightBufferKeys(std::vector<std::pair<int32_t, int32_t>>* out) override;
+ void getInflightRequestBufferKeys(std::vector<uint64_t>* out) override;
+ std::vector<sp<camera3::Camera3StreamInterface>> getAllStreams() override;
+
+ void setErrorStateLockedV(const char *fmt, va_list args);
+
+ status_t disconnectImpl();
}; // class Camera3OfflineSession
}; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3OutputInterface.h b/services/camera/libcameraservice/device3/Camera3OutputInterface.h
new file mode 100644
index 0000000..8817833
--- /dev/null
+++ b/services/camera/libcameraservice/device3/Camera3OutputInterface.h
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA3_OUTPUT_INTERFACE_H
+#define ANDROID_SERVERS_CAMERA3_OUTPUT_INTERFACE_H
+
+#include <memory>
+
+#include <cutils/native_handle.h>
+
+#include <utils/Timers.h>
+
+#include "device3/Camera3StreamInterface.h"
+
+namespace android {
+
+namespace camera3 {
+
+ /**
+ * Interfaces used by result/notification path shared between Camera3Device and
+ * Camera3OfflineSession
+ */
+ class SetErrorInterface {
+ public:
+ // Switch device into error state and send a ERROR_DEVICE notification
+ virtual void setErrorState(const char *fmt, ...) = 0;
+ // Same as setErrorState except this method assumes callers holds the main object lock
+ virtual void setErrorStateLocked(const char *fmt, ...) = 0;
+
+ virtual ~SetErrorInterface() {}
+ };
+
+ // Interface used by callback path to update buffer records
+ class BufferRecordsInterface {
+ public:
+ // method to extract buffer's unique ID
+ // return pair of (newlySeenBuffer?, bufferId)
+ virtual std::pair<bool, uint64_t> getBufferId(const buffer_handle_t& buf, int streamId) = 0;
+
+ // Find a buffer_handle_t based on frame number and stream ID
+ virtual status_t popInflightBuffer(int32_t frameNumber, int32_t streamId,
+ /*out*/ buffer_handle_t **buffer) = 0;
+
+ // Register a bufId (streamId, buffer_handle_t) to inflight request buffer
+ virtual status_t pushInflightRequestBuffer(
+ uint64_t bufferId, buffer_handle_t* buf, int32_t streamId) = 0;
+
+ // Find a buffer_handle_t based on bufferId
+ virtual status_t popInflightRequestBuffer(uint64_t bufferId,
+ /*out*/ buffer_handle_t** buffer,
+ /*optional out*/ int32_t* streamId = nullptr) = 0;
+
+ virtual ~BufferRecordsInterface() {}
+ };
+
+ class InflightRequestUpdateInterface {
+ public:
+ // Caller must hold the lock proctecting InflightRequestMap
+ // duration: the maxExpectedDuration of the removed entry
+ virtual void onInflightEntryRemovedLocked(nsecs_t duration) = 0;
+
+ virtual void checkInflightMapLengthLocked() = 0;
+
+ virtual void onInflightMapFlushedLocked() = 0;
+
+ virtual ~InflightRequestUpdateInterface() {}
+ };
+
+ class RequestBufferInterface {
+ public:
+ // Return if the state machine currently allows for requestBuffers.
+ // If this returns true, caller must call endRequestBuffer() later to signal end of a
+ // request buffer transaction.
+ virtual bool startRequestBuffer() = 0;
+
+ virtual void endRequestBuffer() = 0;
+
+ // Returns how long should implementation wait for a buffer returned
+ virtual nsecs_t getWaitDuration() = 0;
+
+ virtual ~RequestBufferInterface() {}
+ };
+
+ class FlushBufferInterface {
+ public:
+ virtual void getInflightBufferKeys(std::vector<std::pair<int32_t, int32_t>>* out) = 0;
+
+ virtual void getInflightRequestBufferKeys(std::vector<uint64_t>* out) = 0;
+
+ virtual std::vector<sp<Camera3StreamInterface>> getAllStreams() = 0;
+
+ virtual ~FlushBufferInterface() {}
+ };
+} // namespace camera3
+
+} // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.cpp b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.cpp
new file mode 100644
index 0000000..64af91e
--- /dev/null
+++ b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.cpp
@@ -0,0 +1,84 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera3-OutStrmIntf"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+//#define LOG_NNDEBUG 0 // Per-frame verbose logging
+
+
+#include "Camera3OutputStreamInterface.h"
+
+namespace android {
+
+namespace camera3 {
+
+status_t StreamSet::add(
+ int streamId, sp<camera3::Camera3OutputStreamInterface> stream) {
+ if (stream == nullptr) {
+ ALOGE("%s: cannot add null stream", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ std::lock_guard<std::mutex> lock(mLock);
+ return mData.add(streamId, stream);
+}
+
+ssize_t StreamSet::remove(int streamId) {
+ std::lock_guard<std::mutex> lock(mLock);
+ return mData.removeItem(streamId);
+}
+
+sp<camera3::Camera3OutputStreamInterface> StreamSet::get(int streamId) {
+ std::lock_guard<std::mutex> lock(mLock);
+ ssize_t idx = mData.indexOfKey(streamId);
+ if (idx == NAME_NOT_FOUND) {
+ return nullptr;
+ }
+ return mData.editValueAt(idx);
+}
+
+sp<camera3::Camera3OutputStreamInterface> StreamSet::operator[] (size_t index) {
+ std::lock_guard<std::mutex> lock(mLock);
+ return mData.editValueAt(index);
+}
+
+size_t StreamSet::size() const {
+ std::lock_guard<std::mutex> lock(mLock);
+ return mData.size();
+}
+
+void StreamSet::clear() {
+ std::lock_guard<std::mutex> lock(mLock);
+ return mData.clear();
+}
+
+std::vector<int> StreamSet::getStreamIds() {
+ std::lock_guard<std::mutex> lock(mLock);
+ std::vector<int> streamIds(mData.size());
+ for (size_t i = 0; i < mData.size(); i++) {
+ streamIds[i] = mData.keyAt(i);
+ }
+ return streamIds;
+}
+
+StreamSet::StreamSet(const StreamSet& other) {
+ std::lock_guard<std::mutex> lock(other.mLock);
+ mData = other.mData;
+}
+
+} // namespace camera3
+
+} // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
index 2bde949..7f5c87a 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
@@ -97,6 +97,26 @@
virtual const String8& getPhysicalCameraId() const = 0;
};
+// Helper class to organize a synchronized mapping of stream IDs to stream instances
+class StreamSet {
+ public:
+ status_t add(int streamId, sp<camera3::Camera3OutputStreamInterface>);
+ ssize_t remove(int streamId);
+ sp<camera3::Camera3OutputStreamInterface> get(int streamId);
+ // get by (underlying) vector index
+ sp<camera3::Camera3OutputStreamInterface> operator[] (size_t index);
+ size_t size() const;
+ std::vector<int> getStreamIds();
+ void clear();
+
+ StreamSet() {};
+ StreamSet(const StreamSet& other);
+
+ private:
+ mutable std::mutex mLock;
+ KeyedVector<int, sp<camera3::Camera3OutputStreamInterface>> mData;
+};
+
} // namespace camera3
} // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
new file mode 100644
index 0000000..eb7b666
--- /dev/null
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -0,0 +1,1414 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera3-OutputUtils"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+//#define LOG_NNDEBUG 0 // Per-frame verbose logging
+
+#ifdef LOG_NNDEBUG
+#define ALOGVV(...) ALOGV(__VA_ARGS__)
+#else
+#define ALOGVV(...) ((void)0)
+#endif
+
+// Convenience macros for transitioning to the error state
+#define SET_ERR(fmt, ...) states.setErrIntf.setErrorState( \
+ "%s: " fmt, __FUNCTION__, \
+ ##__VA_ARGS__)
+
+#include <inttypes.h>
+
+#include <utils/Log.h>
+#include <utils/SortedVector.h>
+#include <utils/Trace.h>
+
+#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
+
+#include <android/hardware/camera/device/3.4/ICameraDeviceCallback.h>
+#include <android/hardware/camera/device/3.5/ICameraDeviceCallback.h>
+#include <android/hardware/camera/device/3.5/ICameraDeviceSession.h>
+
+#include <camera_metadata_hidden.h>
+
+#include "device3/Camera3OutputUtils.h"
+
+using namespace android::camera3;
+using namespace android::hardware::camera;
+
+namespace android {
+namespace camera3 {
+
+status_t fixupMonochromeTags(
+ CaptureOutputStates& states,
+ const CameraMetadata& deviceInfo,
+ CameraMetadata& resultMetadata) {
+ status_t res = OK;
+ if (!states.needFixupMonoChrome) {
+ return res;
+ }
+
+ // Remove tags that are not applicable to monochrome camera.
+ int32_t tagsToRemove[] = {
+ ANDROID_SENSOR_GREEN_SPLIT,
+ ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
+ ANDROID_COLOR_CORRECTION_MODE,
+ ANDROID_COLOR_CORRECTION_TRANSFORM,
+ ANDROID_COLOR_CORRECTION_GAINS,
+ };
+ for (auto tag : tagsToRemove) {
+ res = resultMetadata.erase(tag);
+ if (res != OK) {
+ ALOGE("%s: Failed to remove tag %d for monochrome camera", __FUNCTION__, tag);
+ return res;
+ }
+ }
+
+ // ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
+ camera_metadata_entry blEntry = resultMetadata.find(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
+ for (size_t i = 1; i < blEntry.count; i++) {
+ blEntry.data.f[i] = blEntry.data.f[0];
+ }
+
+ // ANDROID_SENSOR_NOISE_PROFILE
+ camera_metadata_entry npEntry = resultMetadata.find(ANDROID_SENSOR_NOISE_PROFILE);
+ if (npEntry.count > 0 && npEntry.count % 2 == 0) {
+ double np[] = {npEntry.data.d[0], npEntry.data.d[1]};
+ res = resultMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, np, 2);
+ if (res != OK) {
+ ALOGE("%s: Failed to update SENSOR_NOISE_PROFILE: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ }
+
+ // ANDROID_STATISTICS_LENS_SHADING_MAP
+ camera_metadata_ro_entry lsSizeEntry = deviceInfo.find(ANDROID_LENS_INFO_SHADING_MAP_SIZE);
+ camera_metadata_entry lsEntry = resultMetadata.find(ANDROID_STATISTICS_LENS_SHADING_MAP);
+ if (lsSizeEntry.count == 2 && lsEntry.count > 0
+ && (int32_t)lsEntry.count == 4 * lsSizeEntry.data.i32[0] * lsSizeEntry.data.i32[1]) {
+ for (int32_t i = 0; i < lsSizeEntry.data.i32[0] * lsSizeEntry.data.i32[1]; i++) {
+ lsEntry.data.f[4*i+1] = lsEntry.data.f[4*i];
+ lsEntry.data.f[4*i+2] = lsEntry.data.f[4*i];
+ lsEntry.data.f[4*i+3] = lsEntry.data.f[4*i];
+ }
+ }
+
+ // ANDROID_TONEMAP_CURVE_BLUE
+ // ANDROID_TONEMAP_CURVE_GREEN
+ // ANDROID_TONEMAP_CURVE_RED
+ camera_metadata_entry tcbEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_BLUE);
+ camera_metadata_entry tcgEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_GREEN);
+ camera_metadata_entry tcrEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_RED);
+ if (tcbEntry.count > 0
+ && tcbEntry.count == tcgEntry.count
+ && tcbEntry.count == tcrEntry.count) {
+ for (size_t i = 0; i < tcbEntry.count; i++) {
+ tcbEntry.data.f[i] = tcrEntry.data.f[i];
+ tcgEntry.data.f[i] = tcrEntry.data.f[i];
+ }
+ }
+
+ return res;
+}
+
+void insertResultLocked(CaptureOutputStates& states, CaptureResult *result, uint32_t frameNumber) {
+ if (result == nullptr) return;
+
+ camera_metadata_t *meta = const_cast<camera_metadata_t *>(
+ result->mMetadata.getAndLock());
+ set_camera_metadata_vendor_id(meta, states.vendorTagId);
+ result->mMetadata.unlock(meta);
+
+ if (result->mMetadata.update(ANDROID_REQUEST_FRAME_COUNT,
+ (int32_t*)&frameNumber, 1) != OK) {
+ SET_ERR("Failed to set frame number %d in metadata", frameNumber);
+ return;
+ }
+
+ if (result->mMetadata.update(ANDROID_REQUEST_ID, &result->mResultExtras.requestId, 1) != OK) {
+ SET_ERR("Failed to set request ID in metadata for frame %d", frameNumber);
+ return;
+ }
+
+ // Update vendor tag id for physical metadata
+ for (auto& physicalMetadata : result->mPhysicalMetadatas) {
+ camera_metadata_t *pmeta = const_cast<camera_metadata_t *>(
+ physicalMetadata.mPhysicalCameraMetadata.getAndLock());
+ set_camera_metadata_vendor_id(pmeta, states.vendorTagId);
+ physicalMetadata.mPhysicalCameraMetadata.unlock(pmeta);
+ }
+
+ // Valid result, insert into queue
+ List<CaptureResult>::iterator queuedResult =
+ states.resultQueue.insert(states.resultQueue.end(), CaptureResult(*result));
+ ALOGVV("%s: result requestId = %" PRId32 ", frameNumber = %" PRId64
+ ", burstId = %" PRId32, __FUNCTION__,
+ queuedResult->mResultExtras.requestId,
+ queuedResult->mResultExtras.frameNumber,
+ queuedResult->mResultExtras.burstId);
+
+ states.resultSignal.notify_one();
+}
+
+
+void sendPartialCaptureResult(CaptureOutputStates& states,
+ const camera_metadata_t * partialResult,
+ const CaptureResultExtras &resultExtras, uint32_t frameNumber) {
+ ATRACE_CALL();
+ std::lock_guard<std::mutex> l(states.outputLock);
+
+ CaptureResult captureResult;
+ captureResult.mResultExtras = resultExtras;
+ captureResult.mMetadata = partialResult;
+
+ // Fix up result metadata for monochrome camera.
+ status_t res = fixupMonochromeTags(states, states.deviceInfo, captureResult.mMetadata);
+ if (res != OK) {
+ SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
+ return;
+ }
+
+ insertResultLocked(states, &captureResult, frameNumber);
+}
+
+void sendCaptureResult(
+ CaptureOutputStates& states,
+ CameraMetadata &pendingMetadata,
+ CaptureResultExtras &resultExtras,
+ CameraMetadata &collectedPartialResult,
+ uint32_t frameNumber,
+ bool reprocess, bool zslStillCapture,
+ const std::set<std::string>& cameraIdsWithZoom,
+ const std::vector<PhysicalCaptureResultInfo>& physicalMetadatas) {
+ ATRACE_CALL();
+ if (pendingMetadata.isEmpty())
+ return;
+
+ std::lock_guard<std::mutex> l(states.outputLock);
+
+ // TODO: need to track errors for tighter bounds on expected frame number
+ if (reprocess) {
+ if (frameNumber < states.nextReprocResultFrameNum) {
+ SET_ERR("Out-of-order reprocess capture result metadata submitted! "
+ "(got frame number %d, expecting %d)",
+ frameNumber, states.nextReprocResultFrameNum);
+ return;
+ }
+ states.nextReprocResultFrameNum = frameNumber + 1;
+ } else if (zslStillCapture) {
+ if (frameNumber < states.nextZslResultFrameNum) {
+ SET_ERR("Out-of-order ZSL still capture result metadata submitted! "
+ "(got frame number %d, expecting %d)",
+ frameNumber, states.nextZslResultFrameNum);
+ return;
+ }
+ states.nextZslResultFrameNum = frameNumber + 1;
+ } else {
+ if (frameNumber < states.nextResultFrameNum) {
+ SET_ERR("Out-of-order capture result metadata submitted! "
+ "(got frame number %d, expecting %d)",
+ frameNumber, states.nextResultFrameNum);
+ return;
+ }
+ states.nextResultFrameNum = frameNumber + 1;
+ }
+
+ CaptureResult captureResult;
+ captureResult.mResultExtras = resultExtras;
+ captureResult.mMetadata = pendingMetadata;
+ captureResult.mPhysicalMetadatas = physicalMetadatas;
+
+ // Append any previous partials to form a complete result
+ if (states.usePartialResult && !collectedPartialResult.isEmpty()) {
+ captureResult.mMetadata.append(collectedPartialResult);
+ }
+
+ captureResult.mMetadata.sort();
+
+ // Check that there's a timestamp in the result metadata
+ camera_metadata_entry timestamp = captureResult.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
+ if (timestamp.count == 0) {
+ SET_ERR("No timestamp provided by HAL for frame %d!",
+ frameNumber);
+ return;
+ }
+ for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
+ camera_metadata_entry timestamp =
+ physicalMetadata.mPhysicalCameraMetadata.find(ANDROID_SENSOR_TIMESTAMP);
+ if (timestamp.count == 0) {
+ SET_ERR("No timestamp provided by HAL for physical camera %s frame %d!",
+ String8(physicalMetadata.mPhysicalCameraId).c_str(), frameNumber);
+ return;
+ }
+ }
+
+ // Fix up some result metadata to account for HAL-level distortion correction
+ status_t res =
+ states.distortionMappers[states.cameraId.c_str()].correctCaptureResult(
+ &captureResult.mMetadata);
+ if (res != OK) {
+ SET_ERR("Unable to correct capture result metadata for frame %d: %s (%d)",
+ frameNumber, strerror(-res), res);
+ return;
+ }
+
+ // Fix up result metadata to account for zoom ratio availabilities between
+ // HAL and app.
+ bool zoomRatioIs1 = cameraIdsWithZoom.find(states.cameraId.c_str()) == cameraIdsWithZoom.end();
+ res = states.zoomRatioMappers[states.cameraId.c_str()].updateCaptureResult(
+ &captureResult.mMetadata, zoomRatioIs1);
+ if (res != OK) {
+ SET_ERR("Failed to update capture result zoom ratio metadata for frame %d: %s (%d)",
+ frameNumber, strerror(-res), res);
+ return;
+ }
+
+ for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
+ String8 cameraId8(physicalMetadata.mPhysicalCameraId);
+ if (states.distortionMappers.find(cameraId8.c_str()) != states.distortionMappers.end()) {
+ res = states.distortionMappers[cameraId8.c_str()].correctCaptureResult(
+ &physicalMetadata.mPhysicalCameraMetadata);
+ if (res != OK) {
+ SET_ERR("Unable to correct physical capture result metadata for frame %d: %s (%d)",
+ frameNumber, strerror(-res), res);
+ return;
+ }
+ }
+
+ zoomRatioIs1 = cameraIdsWithZoom.find(cameraId8.c_str()) == cameraIdsWithZoom.end();
+ res = states.zoomRatioMappers[cameraId8.c_str()].updateCaptureResult(
+ &physicalMetadata.mPhysicalCameraMetadata, zoomRatioIs1);
+ if (res != OK) {
+ SET_ERR("Failed to update camera %s's physical zoom ratio metadata for "
+ "frame %d: %s(%d)", cameraId8.c_str(), frameNumber, strerror(-res), res);
+ return;
+ }
+ }
+
+ // Fix up result metadata for monochrome camera.
+ res = fixupMonochromeTags(states, states.deviceInfo, captureResult.mMetadata);
+ if (res != OK) {
+ SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
+ return;
+ }
+ for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
+ String8 cameraId8(physicalMetadata.mPhysicalCameraId);
+ res = fixupMonochromeTags(states,
+ states.physicalDeviceInfoMap.at(cameraId8.c_str()),
+ physicalMetadata.mPhysicalCameraMetadata);
+ if (res != OK) {
+ SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
+ return;
+ }
+ }
+
+ std::unordered_map<std::string, CameraMetadata> monitoredPhysicalMetadata;
+ for (auto& m : physicalMetadatas) {
+ monitoredPhysicalMetadata.emplace(String8(m.mPhysicalCameraId).string(),
+ CameraMetadata(m.mPhysicalCameraMetadata));
+ }
+ states.tagMonitor.monitorMetadata(TagMonitor::RESULT,
+ frameNumber, timestamp.data.i64[0], captureResult.mMetadata,
+ monitoredPhysicalMetadata);
+
+ insertResultLocked(states, &captureResult, frameNumber);
+}
+
+// Reading one camera metadata from result argument via fmq or from the result
+// Assuming the fmq is protected by a lock already
+status_t readOneCameraMetadataLocked(
+ std::unique_ptr<ResultMetadataQueue>& fmq,
+ uint64_t fmqResultSize,
+ hardware::camera::device::V3_2::CameraMetadata& resultMetadata,
+ const hardware::camera::device::V3_2::CameraMetadata& result) {
+ if (fmqResultSize > 0) {
+ resultMetadata.resize(fmqResultSize);
+ if (fmq == nullptr) {
+ return NO_MEMORY; // logged in initialize()
+ }
+ if (!fmq->read(resultMetadata.data(), fmqResultSize)) {
+ ALOGE("%s: Cannot read camera metadata from fmq, size = %" PRIu64,
+ __FUNCTION__, fmqResultSize);
+ return INVALID_OPERATION;
+ }
+ } else {
+ resultMetadata.setToExternal(const_cast<uint8_t *>(result.data()),
+ result.size());
+ }
+
+ if (resultMetadata.size() != 0) {
+ status_t res;
+ const camera_metadata_t* metadata =
+ reinterpret_cast<const camera_metadata_t*>(resultMetadata.data());
+ size_t expected_metadata_size = resultMetadata.size();
+ if ((res = validate_camera_metadata_structure(metadata, &expected_metadata_size)) != OK) {
+ ALOGE("%s: Invalid camera metadata received by camera service from HAL: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return INVALID_OPERATION;
+ }
+ }
+
+ return OK;
+}
+
+void removeInFlightMapEntryLocked(CaptureOutputStates& states, int idx) {
+ ATRACE_CALL();
+ InFlightRequestMap& inflightMap = states.inflightMap;
+ nsecs_t duration = inflightMap.valueAt(idx).maxExpectedDuration;
+ inflightMap.removeItemsAt(idx, 1);
+
+ states.inflightIntf.onInflightEntryRemovedLocked(duration);
+}
+
+void removeInFlightRequestIfReadyLocked(CaptureOutputStates& states, int idx) {
+ InFlightRequestMap& inflightMap = states.inflightMap;
+ const InFlightRequest &request = inflightMap.valueAt(idx);
+ const uint32_t frameNumber = inflightMap.keyAt(idx);
+
+ nsecs_t sensorTimestamp = request.sensorTimestamp;
+ nsecs_t shutterTimestamp = request.shutterTimestamp;
+
+ // Check if it's okay to remove the request from InFlightMap:
+ // In the case of a successful request:
+ // all input and output buffers, all result metadata, shutter callback
+ // arrived.
+ // In the case of a unsuccessful request:
+ // all input and output buffers arrived.
+ if (request.numBuffersLeft == 0 &&
+ (request.skipResultMetadata ||
+ (request.haveResultMetadata && shutterTimestamp != 0))) {
+ if (request.stillCapture) {
+ ATRACE_ASYNC_END("still capture", frameNumber);
+ }
+
+ ATRACE_ASYNC_END("frame capture", frameNumber);
+
+ // Sanity check - if sensor timestamp matches shutter timestamp in the
+ // case of request having callback.
+ if (request.hasCallback && request.requestStatus == OK &&
+ sensorTimestamp != shutterTimestamp) {
+ SET_ERR("sensor timestamp (%" PRId64
+ ") for frame %d doesn't match shutter timestamp (%" PRId64 ")",
+ sensorTimestamp, frameNumber, shutterTimestamp);
+ }
+
+ // for an unsuccessful request, it may have pending output buffers to
+ // return.
+ assert(request.requestStatus != OK ||
+ request.pendingOutputBuffers.size() == 0);
+
+ returnOutputBuffers(
+ states.useHalBufManager, states.listener,
+ request.pendingOutputBuffers.array(),
+ request.pendingOutputBuffers.size(), 0, /*timestampIncreasing*/true,
+ request.outputSurfaces, request.resultExtras);
+
+ removeInFlightMapEntryLocked(states, idx);
+ ALOGVV("%s: removed frame %d from InFlightMap", __FUNCTION__, frameNumber);
+ }
+
+ states.inflightIntf.checkInflightMapLengthLocked();
+}
+
+void processCaptureResult(CaptureOutputStates& states, const camera3_capture_result *result) {
+ ATRACE_CALL();
+
+ status_t res;
+
+ uint32_t frameNumber = result->frame_number;
+ if (result->result == NULL && result->num_output_buffers == 0 &&
+ result->input_buffer == NULL) {
+ SET_ERR("No result data provided by HAL for frame %d",
+ frameNumber);
+ return;
+ }
+
+ if (!states.usePartialResult &&
+ result->result != NULL &&
+ result->partial_result != 1) {
+ SET_ERR("Result is malformed for frame %d: partial_result %u must be 1"
+ " if partial result is not supported",
+ frameNumber, result->partial_result);
+ return;
+ }
+
+ bool isPartialResult = false;
+ CameraMetadata collectedPartialResult;
+ bool hasInputBufferInRequest = false;
+
+ // Get shutter timestamp and resultExtras from list of in-flight requests,
+ // where it was added by the shutter notification for this frame. If the
+ // shutter timestamp isn't received yet, append the output buffers to the
+ // in-flight request and they will be returned when the shutter timestamp
+ // arrives. Update the in-flight status and remove the in-flight entry if
+ // all result data and shutter timestamp have been received.
+ nsecs_t shutterTimestamp = 0;
+ {
+ std::lock_guard<std::mutex> l(states.inflightLock);
+ ssize_t idx = states.inflightMap.indexOfKey(frameNumber);
+ if (idx == NAME_NOT_FOUND) {
+ SET_ERR("Unknown frame number for capture result: %d",
+ frameNumber);
+ return;
+ }
+ InFlightRequest &request = states.inflightMap.editValueAt(idx);
+ ALOGVV("%s: got InFlightRequest requestId = %" PRId32
+ ", frameNumber = %" PRId64 ", burstId = %" PRId32
+ ", partialResultCount = %d, hasCallback = %d",
+ __FUNCTION__, request.resultExtras.requestId,
+ request.resultExtras.frameNumber, request.resultExtras.burstId,
+ result->partial_result, request.hasCallback);
+ // Always update the partial count to the latest one if it's not 0
+ // (buffers only). When framework aggregates adjacent partial results
+ // into one, the latest partial count will be used.
+ if (result->partial_result != 0)
+ request.resultExtras.partialResultCount = result->partial_result;
+
+ // Check if this result carries only partial metadata
+ if (states.usePartialResult && result->result != NULL) {
+ if (result->partial_result > states.numPartialResults || result->partial_result < 1) {
+ SET_ERR("Result is malformed for frame %d: partial_result %u must be in"
+ " the range of [1, %d] when metadata is included in the result",
+ frameNumber, result->partial_result, states.numPartialResults);
+ return;
+ }
+ isPartialResult = (result->partial_result < states.numPartialResults);
+ if (isPartialResult && result->num_physcam_metadata) {
+ SET_ERR("Result is malformed for frame %d: partial_result not allowed for"
+ " physical camera result", frameNumber);
+ return;
+ }
+ if (isPartialResult) {
+ request.collectedPartialResult.append(result->result);
+ }
+
+ if (isPartialResult && request.hasCallback) {
+ // Send partial capture result
+ sendPartialCaptureResult(states, result->result, request.resultExtras,
+ frameNumber);
+ }
+ }
+
+ shutterTimestamp = request.shutterTimestamp;
+ hasInputBufferInRequest = request.hasInputBuffer;
+
+ // Did we get the (final) result metadata for this capture?
+ if (result->result != NULL && !isPartialResult) {
+ if (request.physicalCameraIds.size() != result->num_physcam_metadata) {
+ SET_ERR("Expected physical Camera metadata count %d not equal to actual count %d",
+ request.physicalCameraIds.size(), result->num_physcam_metadata);
+ return;
+ }
+ if (request.haveResultMetadata) {
+ SET_ERR("Called multiple times with metadata for frame %d",
+ frameNumber);
+ return;
+ }
+ for (uint32_t i = 0; i < result->num_physcam_metadata; i++) {
+ String8 physicalId(result->physcam_ids[i]);
+ std::set<String8>::iterator cameraIdIter =
+ request.physicalCameraIds.find(physicalId);
+ if (cameraIdIter != request.physicalCameraIds.end()) {
+ request.physicalCameraIds.erase(cameraIdIter);
+ } else {
+ SET_ERR("Total result for frame %d has already returned for camera %s",
+ frameNumber, physicalId.c_str());
+ return;
+ }
+ }
+ if (states.usePartialResult &&
+ !request.collectedPartialResult.isEmpty()) {
+ collectedPartialResult.acquire(
+ request.collectedPartialResult);
+ }
+ request.haveResultMetadata = true;
+ }
+
+ uint32_t numBuffersReturned = result->num_output_buffers;
+ if (result->input_buffer != NULL) {
+ if (hasInputBufferInRequest) {
+ numBuffersReturned += 1;
+ } else {
+ ALOGW("%s: Input buffer should be NULL if there is no input"
+ " buffer sent in the request",
+ __FUNCTION__);
+ }
+ }
+ request.numBuffersLeft -= numBuffersReturned;
+ if (request.numBuffersLeft < 0) {
+ SET_ERR("Too many buffers returned for frame %d",
+ frameNumber);
+ return;
+ }
+
+ camera_metadata_ro_entry_t entry;
+ res = find_camera_metadata_ro_entry(result->result,
+ ANDROID_SENSOR_TIMESTAMP, &entry);
+ if (res == OK && entry.count == 1) {
+ request.sensorTimestamp = entry.data.i64[0];
+ }
+
+ // If shutter event isn't received yet, append the output buffers to
+ // the in-flight request. Otherwise, return the output buffers to
+ // streams.
+ if (shutterTimestamp == 0) {
+ request.pendingOutputBuffers.appendArray(result->output_buffers,
+ result->num_output_buffers);
+ } else {
+ bool timestampIncreasing = !(request.zslCapture || request.hasInputBuffer);
+ returnOutputBuffers(states.useHalBufManager, states.listener,
+ result->output_buffers, result->num_output_buffers,
+ shutterTimestamp, timestampIncreasing,
+ request.outputSurfaces, request.resultExtras);
+ }
+
+ if (result->result != NULL && !isPartialResult) {
+ for (uint32_t i = 0; i < result->num_physcam_metadata; i++) {
+ CameraMetadata physicalMetadata;
+ physicalMetadata.append(result->physcam_metadata[i]);
+ request.physicalMetadatas.push_back({String16(result->physcam_ids[i]),
+ physicalMetadata});
+ }
+ if (shutterTimestamp == 0) {
+ request.pendingMetadata = result->result;
+ request.collectedPartialResult = collectedPartialResult;
+ } else if (request.hasCallback) {
+ CameraMetadata metadata;
+ metadata = result->result;
+ sendCaptureResult(states, metadata, request.resultExtras,
+ collectedPartialResult, frameNumber,
+ hasInputBufferInRequest, request.zslCapture && request.stillCapture,
+ request.cameraIdsWithZoom, request.physicalMetadatas);
+ }
+ }
+ removeInFlightRequestIfReadyLocked(states, idx);
+ } // scope for states.inFlightLock
+
+ if (result->input_buffer != NULL) {
+ if (hasInputBufferInRequest) {
+ Camera3Stream *stream =
+ Camera3Stream::cast(result->input_buffer->stream);
+ res = stream->returnInputBuffer(*(result->input_buffer));
+ // Note: stream may be deallocated at this point, if this buffer was the
+ // last reference to it.
+ if (res != OK) {
+ ALOGE("%s: RequestThread: Can't return input buffer for frame %d to"
+ " its stream:%s (%d)", __FUNCTION__,
+ frameNumber, strerror(-res), res);
+ }
+ } else {
+ ALOGW("%s: Input buffer should be NULL if there is no input"
+ " buffer sent in the request, skipping input buffer return.",
+ __FUNCTION__);
+ }
+ }
+}
+
+void processOneCaptureResultLocked(
+ CaptureOutputStates& states,
+ const hardware::camera::device::V3_2::CaptureResult& result,
+ const hardware::hidl_vec<
+ hardware::camera::device::V3_4::PhysicalCameraMetadata> physicalCameraMetadata) {
+ using hardware::camera::device::V3_2::StreamBuffer;
+ using hardware::camera::device::V3_2::BufferStatus;
+ std::unique_ptr<ResultMetadataQueue>& fmq = states.fmq;
+ BufferRecordsInterface& bufferRecords = states.bufferRecordsIntf;
+ camera3_capture_result r;
+ status_t res;
+ r.frame_number = result.frameNumber;
+
+ // Read and validate the result metadata.
+ hardware::camera::device::V3_2::CameraMetadata resultMetadata;
+ res = readOneCameraMetadataLocked(
+ fmq, result.fmqResultSize,
+ resultMetadata, result.result);
+ if (res != OK) {
+ ALOGE("%s: Frame %d: Failed to read capture result metadata",
+ __FUNCTION__, result.frameNumber);
+ return;
+ }
+ r.result = reinterpret_cast<const camera_metadata_t*>(resultMetadata.data());
+
+ // Read and validate physical camera metadata
+ size_t physResultCount = physicalCameraMetadata.size();
+ std::vector<const char*> physCamIds(physResultCount);
+ std::vector<const camera_metadata_t *> phyCamMetadatas(physResultCount);
+ std::vector<hardware::camera::device::V3_2::CameraMetadata> physResultMetadata;
+ physResultMetadata.resize(physResultCount);
+ for (size_t i = 0; i < physicalCameraMetadata.size(); i++) {
+ res = readOneCameraMetadataLocked(fmq, physicalCameraMetadata[i].fmqMetadataSize,
+ physResultMetadata[i], physicalCameraMetadata[i].metadata);
+ if (res != OK) {
+ ALOGE("%s: Frame %d: Failed to read capture result metadata for camera %s",
+ __FUNCTION__, result.frameNumber,
+ physicalCameraMetadata[i].physicalCameraId.c_str());
+ return;
+ }
+ physCamIds[i] = physicalCameraMetadata[i].physicalCameraId.c_str();
+ phyCamMetadatas[i] = reinterpret_cast<const camera_metadata_t*>(
+ physResultMetadata[i].data());
+ }
+ r.num_physcam_metadata = physResultCount;
+ r.physcam_ids = physCamIds.data();
+ r.physcam_metadata = phyCamMetadatas.data();
+
+ std::vector<camera3_stream_buffer_t> outputBuffers(result.outputBuffers.size());
+ std::vector<buffer_handle_t> outputBufferHandles(result.outputBuffers.size());
+ for (size_t i = 0; i < result.outputBuffers.size(); i++) {
+ auto& bDst = outputBuffers[i];
+ const StreamBuffer &bSrc = result.outputBuffers[i];
+
+ sp<Camera3StreamInterface> stream = states.outputStreams.get(bSrc.streamId);
+ if (stream == nullptr) {
+ ALOGE("%s: Frame %d: Buffer %zu: Invalid output stream id %d",
+ __FUNCTION__, result.frameNumber, i, bSrc.streamId);
+ return;
+ }
+ bDst.stream = stream->asHalStream();
+
+ bool noBufferReturned = false;
+ buffer_handle_t *buffer = nullptr;
+ if (states.useHalBufManager) {
+ // This is suspicious most of the time but can be correct during flush where HAL
+ // has to return capture result before a buffer is requested
+ if (bSrc.bufferId == BUFFER_ID_NO_BUFFER) {
+ if (bSrc.status == BufferStatus::OK) {
+ ALOGE("%s: Frame %d: Buffer %zu: No bufferId for stream %d",
+ __FUNCTION__, result.frameNumber, i, bSrc.streamId);
+ // Still proceeds so other buffers can be returned
+ }
+ noBufferReturned = true;
+ }
+ if (noBufferReturned) {
+ res = OK;
+ } else {
+ res = bufferRecords.popInflightRequestBuffer(bSrc.bufferId, &buffer);
+ }
+ } else {
+ res = bufferRecords.popInflightBuffer(result.frameNumber, bSrc.streamId, &buffer);
+ }
+
+ if (res != OK) {
+ ALOGE("%s: Frame %d: Buffer %zu: No in-flight buffer for stream %d",
+ __FUNCTION__, result.frameNumber, i, bSrc.streamId);
+ return;
+ }
+
+ bDst.buffer = buffer;
+ bDst.status = mapHidlBufferStatus(bSrc.status);
+ bDst.acquire_fence = -1;
+ if (bSrc.releaseFence == nullptr) {
+ bDst.release_fence = -1;
+ } else if (bSrc.releaseFence->numFds == 1) {
+ if (noBufferReturned) {
+ ALOGE("%s: got releaseFence without output buffer!", __FUNCTION__);
+ }
+ bDst.release_fence = dup(bSrc.releaseFence->data[0]);
+ } else {
+ ALOGE("%s: Frame %d: Invalid release fence for buffer %zu, fd count is %d, not 1",
+ __FUNCTION__, result.frameNumber, i, bSrc.releaseFence->numFds);
+ return;
+ }
+ }
+ r.num_output_buffers = outputBuffers.size();
+ r.output_buffers = outputBuffers.data();
+
+ camera3_stream_buffer_t inputBuffer;
+ if (result.inputBuffer.streamId == -1) {
+ r.input_buffer = nullptr;
+ } else {
+ if (states.inputStream->getId() != result.inputBuffer.streamId) {
+ ALOGE("%s: Frame %d: Invalid input stream id %d", __FUNCTION__,
+ result.frameNumber, result.inputBuffer.streamId);
+ return;
+ }
+ inputBuffer.stream = states.inputStream->asHalStream();
+ buffer_handle_t *buffer;
+ res = bufferRecords.popInflightBuffer(result.frameNumber, result.inputBuffer.streamId,
+ &buffer);
+ if (res != OK) {
+ ALOGE("%s: Frame %d: Input buffer: No in-flight buffer for stream %d",
+ __FUNCTION__, result.frameNumber, result.inputBuffer.streamId);
+ return;
+ }
+ inputBuffer.buffer = buffer;
+ inputBuffer.status = mapHidlBufferStatus(result.inputBuffer.status);
+ inputBuffer.acquire_fence = -1;
+ if (result.inputBuffer.releaseFence == nullptr) {
+ inputBuffer.release_fence = -1;
+ } else if (result.inputBuffer.releaseFence->numFds == 1) {
+ inputBuffer.release_fence = dup(result.inputBuffer.releaseFence->data[0]);
+ } else {
+ ALOGE("%s: Frame %d: Invalid release fence for input buffer, fd count is %d, not 1",
+ __FUNCTION__, result.frameNumber, result.inputBuffer.releaseFence->numFds);
+ return;
+ }
+ r.input_buffer = &inputBuffer;
+ }
+
+ r.partial_result = result.partialResult;
+
+ processCaptureResult(states, &r);
+}
+
+void returnOutputBuffers(
+ bool useHalBufManager,
+ sp<NotificationListener> listener,
+ const camera3_stream_buffer_t *outputBuffers, size_t numBuffers,
+ nsecs_t timestamp, bool timestampIncreasing,
+ const SurfaceMap& outputSurfaces,
+ const CaptureResultExtras &inResultExtras) {
+
+ for (size_t i = 0; i < numBuffers; i++)
+ {
+ if (outputBuffers[i].buffer == nullptr) {
+ if (!useHalBufManager) {
+ // With HAL buffer management API, HAL sometimes will have to return buffers that
+ // has not got a output buffer handle filled yet. This is though illegal if HAL
+ // buffer management API is not being used.
+ ALOGE("%s: cannot return a null buffer!", __FUNCTION__);
+ }
+ continue;
+ }
+
+ Camera3StreamInterface *stream = Camera3Stream::cast(outputBuffers[i].stream);
+ int streamId = stream->getId();
+ const auto& it = outputSurfaces.find(streamId);
+ status_t res = OK;
+ if (it != outputSurfaces.end()) {
+ res = stream->returnBuffer(
+ outputBuffers[i], timestamp, timestampIncreasing, it->second,
+ inResultExtras.frameNumber);
+ } else {
+ res = stream->returnBuffer(
+ outputBuffers[i], timestamp, timestampIncreasing, std::vector<size_t> (),
+ inResultExtras.frameNumber);
+ }
+
+ // Note: stream may be deallocated at this point, if this buffer was
+ // the last reference to it.
+ if (res == NO_INIT || res == DEAD_OBJECT) {
+ ALOGV("Can't return buffer to its stream: %s (%d)", strerror(-res), res);
+ } else if (res != OK) {
+ ALOGE("Can't return buffer to its stream: %s (%d)", strerror(-res), res);
+ }
+
+ // Long processing consumers can cause returnBuffer timeout for shared stream
+ // If that happens, cancel the buffer and send a buffer error to client
+ if (it != outputSurfaces.end() && res == TIMED_OUT &&
+ outputBuffers[i].status == CAMERA3_BUFFER_STATUS_OK) {
+ // cancel the buffer
+ camera3_stream_buffer_t sb = outputBuffers[i];
+ sb.status = CAMERA3_BUFFER_STATUS_ERROR;
+ stream->returnBuffer(sb, /*timestamp*/0, timestampIncreasing, std::vector<size_t> (),
+ inResultExtras.frameNumber);
+
+ if (listener != nullptr) {
+ CaptureResultExtras extras = inResultExtras;
+ extras.errorStreamId = streamId;
+ listener->notifyError(
+ hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER,
+ extras);
+ }
+ }
+ }
+}
+
+void notifyShutter(CaptureOutputStates& states, const camera3_shutter_msg_t &msg) {
+ ATRACE_CALL();
+ ssize_t idx;
+
+ // Set timestamp for the request in the in-flight tracking
+ // and get the request ID to send upstream
+ {
+ std::lock_guard<std::mutex> l(states.inflightLock);
+ InFlightRequestMap& inflightMap = states.inflightMap;
+ idx = inflightMap.indexOfKey(msg.frame_number);
+ if (idx >= 0) {
+ InFlightRequest &r = inflightMap.editValueAt(idx);
+
+ // Verify ordering of shutter notifications
+ {
+ std::lock_guard<std::mutex> l(states.outputLock);
+ // TODO: need to track errors for tighter bounds on expected frame number.
+ if (r.hasInputBuffer) {
+ if (msg.frame_number < states.nextReprocShutterFrameNum) {
+ SET_ERR("Reprocess shutter notification out-of-order. Expected "
+ "notification for frame %d, got frame %d",
+ states.nextReprocShutterFrameNum, msg.frame_number);
+ return;
+ }
+ states.nextReprocShutterFrameNum = msg.frame_number + 1;
+ } else if (r.zslCapture && r.stillCapture) {
+ if (msg.frame_number < states.nextZslShutterFrameNum) {
+ SET_ERR("ZSL still capture shutter notification out-of-order. Expected "
+ "notification for frame %d, got frame %d",
+ states.nextZslShutterFrameNum, msg.frame_number);
+ return;
+ }
+ states.nextZslShutterFrameNum = msg.frame_number + 1;
+ } else {
+ if (msg.frame_number < states.nextShutterFrameNum) {
+ SET_ERR("Shutter notification out-of-order. Expected "
+ "notification for frame %d, got frame %d",
+ states.nextShutterFrameNum, msg.frame_number);
+ return;
+ }
+ states.nextShutterFrameNum = msg.frame_number + 1;
+ }
+ }
+
+ r.shutterTimestamp = msg.timestamp;
+ if (r.hasCallback) {
+ ALOGVV("Camera %s: %s: Shutter fired for frame %d (id %d) at %" PRId64,
+ states.cameraId.string(), __FUNCTION__,
+ msg.frame_number, r.resultExtras.requestId, msg.timestamp);
+ // Call listener, if any
+ if (states.listener != nullptr) {
+ states.listener->notifyShutter(r.resultExtras, msg.timestamp);
+ }
+ // send pending result and buffers
+ sendCaptureResult(states,
+ r.pendingMetadata, r.resultExtras,
+ r.collectedPartialResult, msg.frame_number,
+ r.hasInputBuffer, r.zslCapture && r.stillCapture,
+ r.cameraIdsWithZoom, r.physicalMetadatas);
+ }
+ bool timestampIncreasing = !(r.zslCapture || r.hasInputBuffer);
+ returnOutputBuffers(
+ states.useHalBufManager, states.listener,
+ r.pendingOutputBuffers.array(),
+ r.pendingOutputBuffers.size(), r.shutterTimestamp, timestampIncreasing,
+ r.outputSurfaces, r.resultExtras);
+ r.pendingOutputBuffers.clear();
+
+ removeInFlightRequestIfReadyLocked(states, idx);
+ }
+ }
+ if (idx < 0) {
+ SET_ERR("Shutter notification for non-existent frame number %d",
+ msg.frame_number);
+ }
+}
+
+void notifyError(CaptureOutputStates& states, const camera3_error_msg_t &msg) {
+ ATRACE_CALL();
+ // Map camera HAL error codes to ICameraDeviceCallback error codes
+ // Index into this with the HAL error code
+ static const int32_t halErrorMap[CAMERA3_MSG_NUM_ERRORS] = {
+ // 0 = Unused error code
+ hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_INVALID_ERROR,
+ // 1 = CAMERA3_MSG_ERROR_DEVICE
+ hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE,
+ // 2 = CAMERA3_MSG_ERROR_REQUEST
+ hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST,
+ // 3 = CAMERA3_MSG_ERROR_RESULT
+ hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT,
+ // 4 = CAMERA3_MSG_ERROR_BUFFER
+ hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER
+ };
+
+ int32_t errorCode =
+ ((msg.error_code >= 0) &&
+ (msg.error_code < CAMERA3_MSG_NUM_ERRORS)) ?
+ halErrorMap[msg.error_code] :
+ hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_INVALID_ERROR;
+
+ int streamId = 0;
+ String16 physicalCameraId;
+ if (msg.error_stream != nullptr) {
+ Camera3Stream *stream =
+ Camera3Stream::cast(msg.error_stream);
+ streamId = stream->getId();
+ physicalCameraId = String16(stream->physicalCameraId());
+ }
+ ALOGV("Camera %s: %s: HAL error, frame %d, stream %d: %d",
+ states.cameraId.string(), __FUNCTION__, msg.frame_number,
+ streamId, msg.error_code);
+
+ CaptureResultExtras resultExtras;
+ switch (errorCode) {
+ case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE:
+ // SET_ERR calls into listener to notify application
+ SET_ERR("Camera HAL reported serious device error");
+ break;
+ case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST:
+ case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT:
+ case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
+ {
+ std::lock_guard<std::mutex> l(states.inflightLock);
+ ssize_t idx = states.inflightMap.indexOfKey(msg.frame_number);
+ if (idx >= 0) {
+ InFlightRequest &r = states.inflightMap.editValueAt(idx);
+ r.requestStatus = msg.error_code;
+ resultExtras = r.resultExtras;
+ bool logicalDeviceResultError = false;
+ if (hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT ==
+ errorCode) {
+ if (physicalCameraId.size() > 0) {
+ String8 cameraId(physicalCameraId);
+ auto iter = r.physicalCameraIds.find(cameraId);
+ if (iter == r.physicalCameraIds.end()) {
+ ALOGE("%s: Reported result failure for physical camera device: %s "
+ " which is not part of the respective request!",
+ __FUNCTION__, cameraId.string());
+ break;
+ }
+ r.physicalCameraIds.erase(iter);
+ resultExtras.errorPhysicalCameraId = physicalCameraId;
+ } else {
+ logicalDeviceResultError = true;
+ }
+ }
+
+ if (logicalDeviceResultError
+ || hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST ==
+ errorCode) {
+ r.skipResultMetadata = true;
+ }
+ if (logicalDeviceResultError) {
+ // In case of missing result check whether the buffers
+ // returned. If they returned, then remove inflight
+ // request.
+ // TODO: should we call this for ERROR_CAMERA_REQUEST as well?
+ // otherwise we are depending on HAL to send the buffers back after
+ // calling notifyError. Not sure if that's in the spec.
+ removeInFlightRequestIfReadyLocked(states, idx);
+ }
+ } else {
+ resultExtras.frameNumber = msg.frame_number;
+ ALOGE("Camera %s: %s: cannot find in-flight request on "
+ "frame %" PRId64 " error", states.cameraId.string(), __FUNCTION__,
+ resultExtras.frameNumber);
+ }
+ }
+ resultExtras.errorStreamId = streamId;
+ if (states.listener != nullptr) {
+ states.listener->notifyError(errorCode, resultExtras);
+ } else {
+ ALOGE("Camera %s: %s: no listener available",
+ states.cameraId.string(), __FUNCTION__);
+ }
+ break;
+ default:
+ // SET_ERR calls notifyError
+ SET_ERR("Unknown error message from HAL: %d", msg.error_code);
+ break;
+ }
+}
+
+void notify(CaptureOutputStates& states, const camera3_notify_msg *msg) {
+ switch (msg->type) {
+ case CAMERA3_MSG_ERROR: {
+ notifyError(states, msg->message.error);
+ break;
+ }
+ case CAMERA3_MSG_SHUTTER: {
+ notifyShutter(states, msg->message.shutter);
+ break;
+ }
+ default:
+ SET_ERR("Unknown notify message from HAL: %d",
+ msg->type);
+ }
+}
+
+void notify(CaptureOutputStates& states,
+ const hardware::camera::device::V3_2::NotifyMsg& msg) {
+ using android::hardware::camera::device::V3_2::MsgType;
+ using android::hardware::camera::device::V3_2::ErrorCode;
+
+ ATRACE_CALL();
+ camera3_notify_msg m;
+ switch (msg.type) {
+ case MsgType::ERROR:
+ m.type = CAMERA3_MSG_ERROR;
+ m.message.error.frame_number = msg.msg.error.frameNumber;
+ if (msg.msg.error.errorStreamId >= 0) {
+ sp<Camera3StreamInterface> stream =
+ states.outputStreams.get(msg.msg.error.errorStreamId);
+ if (stream == nullptr) {
+ ALOGE("%s: Frame %d: Invalid error stream id %d", __FUNCTION__,
+ m.message.error.frame_number, msg.msg.error.errorStreamId);
+ return;
+ }
+ m.message.error.error_stream = stream->asHalStream();
+ } else {
+ m.message.error.error_stream = nullptr;
+ }
+ switch (msg.msg.error.errorCode) {
+ case ErrorCode::ERROR_DEVICE:
+ m.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
+ break;
+ case ErrorCode::ERROR_REQUEST:
+ m.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
+ break;
+ case ErrorCode::ERROR_RESULT:
+ m.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
+ break;
+ case ErrorCode::ERROR_BUFFER:
+ m.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
+ break;
+ }
+ break;
+ case MsgType::SHUTTER:
+ m.type = CAMERA3_MSG_SHUTTER;
+ m.message.shutter.frame_number = msg.msg.shutter.frameNumber;
+ m.message.shutter.timestamp = msg.msg.shutter.timestamp;
+ break;
+ }
+ notify(states, &m);
+}
+
+void requestStreamBuffers(RequestBufferStates& states,
+ const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& bufReqs,
+ hardware::camera::device::V3_5::ICameraDeviceCallback::requestStreamBuffers_cb _hidl_cb) {
+ using android::hardware::camera::device::V3_2::BufferStatus;
+ using android::hardware::camera::device::V3_2::StreamBuffer;
+ using android::hardware::camera::device::V3_5::BufferRequestStatus;
+ using android::hardware::camera::device::V3_5::StreamBufferRet;
+ using android::hardware::camera::device::V3_5::StreamBufferRequestError;
+
+ std::lock_guard<std::mutex> lock(states.reqBufferLock);
+
+ hardware::hidl_vec<StreamBufferRet> bufRets;
+ if (!states.useHalBufManager) {
+ ALOGE("%s: Camera %s does not support HAL buffer management",
+ __FUNCTION__, states.cameraId.string());
+ _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
+ return;
+ }
+
+ SortedVector<int32_t> streamIds;
+ ssize_t sz = streamIds.setCapacity(bufReqs.size());
+ if (sz < 0 || static_cast<size_t>(sz) != bufReqs.size()) {
+ ALOGE("%s: failed to allocate memory for %zu buffer requests",
+ __FUNCTION__, bufReqs.size());
+ _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
+ return;
+ }
+
+ if (bufReqs.size() > states.outputStreams.size()) {
+ ALOGE("%s: too many buffer requests (%zu > # of output streams %zu)",
+ __FUNCTION__, bufReqs.size(), states.outputStreams.size());
+ _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
+ return;
+ }
+
+ // Check for repeated streamId
+ for (const auto& bufReq : bufReqs) {
+ if (streamIds.indexOf(bufReq.streamId) != NAME_NOT_FOUND) {
+ ALOGE("%s: Stream %d appear multiple times in buffer requests",
+ __FUNCTION__, bufReq.streamId);
+ _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
+ return;
+ }
+ streamIds.add(bufReq.streamId);
+ }
+
+ if (!states.reqBufferIntf.startRequestBuffer()) {
+ ALOGE("%s: request buffer disallowed while camera service is configuring",
+ __FUNCTION__);
+ _hidl_cb(BufferRequestStatus::FAILED_CONFIGURING, bufRets);
+ return;
+ }
+
+ bufRets.resize(bufReqs.size());
+
+ bool allReqsSucceeds = true;
+ bool oneReqSucceeds = false;
+ for (size_t i = 0; i < bufReqs.size(); i++) {
+ const auto& bufReq = bufReqs[i];
+ auto& bufRet = bufRets[i];
+ int32_t streamId = bufReq.streamId;
+ sp<Camera3OutputStreamInterface> outputStream = states.outputStreams.get(streamId);
+ if (outputStream == nullptr) {
+ ALOGE("%s: Output stream id %d not found!", __FUNCTION__, streamId);
+ hardware::hidl_vec<StreamBufferRet> emptyBufRets;
+ _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, emptyBufRets);
+ states.reqBufferIntf.endRequestBuffer();
+ return;
+ }
+
+ if (outputStream->isAbandoned()) {
+ bufRet.val.error(StreamBufferRequestError::STREAM_DISCONNECTED);
+ allReqsSucceeds = false;
+ continue;
+ }
+
+ bufRet.streamId = streamId;
+ size_t handOutBufferCount = outputStream->getOutstandingBuffersCount();
+ uint32_t numBuffersRequested = bufReq.numBuffersRequested;
+ size_t totalHandout = handOutBufferCount + numBuffersRequested;
+ uint32_t maxBuffers = outputStream->asHalStream()->max_buffers;
+ if (totalHandout > maxBuffers) {
+ // Not able to allocate enough buffer. Exit early for this stream
+ ALOGE("%s: request too much buffers for stream %d: at HAL: %zu + requesting: %d"
+ " > max: %d", __FUNCTION__, streamId, handOutBufferCount,
+ numBuffersRequested, maxBuffers);
+ bufRet.val.error(StreamBufferRequestError::MAX_BUFFER_EXCEEDED);
+ allReqsSucceeds = false;
+ continue;
+ }
+
+ hardware::hidl_vec<StreamBuffer> tmpRetBuffers(numBuffersRequested);
+ bool currentReqSucceeds = true;
+ std::vector<camera3_stream_buffer_t> streamBuffers(numBuffersRequested);
+ size_t numAllocatedBuffers = 0;
+ size_t numPushedInflightBuffers = 0;
+ for (size_t b = 0; b < numBuffersRequested; b++) {
+ camera3_stream_buffer_t& sb = streamBuffers[b];
+ // Since this method can run concurrently with request thread
+ // We need to update the wait duration everytime we call getbuffer
+ nsecs_t waitDuration = states.reqBufferIntf.getWaitDuration();
+ status_t res = outputStream->getBuffer(&sb, waitDuration);
+ if (res != OK) {
+ if (res == NO_INIT || res == DEAD_OBJECT) {
+ ALOGV("%s: Can't get output buffer for stream %d: %s (%d)",
+ __FUNCTION__, streamId, strerror(-res), res);
+ bufRet.val.error(StreamBufferRequestError::STREAM_DISCONNECTED);
+ } else {
+ ALOGE("%s: Can't get output buffer for stream %d: %s (%d)",
+ __FUNCTION__, streamId, strerror(-res), res);
+ if (res == TIMED_OUT || res == NO_MEMORY) {
+ bufRet.val.error(StreamBufferRequestError::NO_BUFFER_AVAILABLE);
+ } else {
+ bufRet.val.error(StreamBufferRequestError::UNKNOWN_ERROR);
+ }
+ }
+ currentReqSucceeds = false;
+ break;
+ }
+ numAllocatedBuffers++;
+
+ buffer_handle_t *buffer = sb.buffer;
+ auto pair = states.bufferRecordsIntf.getBufferId(*buffer, streamId);
+ bool isNewBuffer = pair.first;
+ uint64_t bufferId = pair.second;
+ StreamBuffer& hBuf = tmpRetBuffers[b];
+
+ hBuf.streamId = streamId;
+ hBuf.bufferId = bufferId;
+ hBuf.buffer = (isNewBuffer) ? *buffer : nullptr;
+ hBuf.status = BufferStatus::OK;
+ hBuf.releaseFence = nullptr;
+
+ native_handle_t *acquireFence = nullptr;
+ if (sb.acquire_fence != -1) {
+ acquireFence = native_handle_create(1,0);
+ acquireFence->data[0] = sb.acquire_fence;
+ }
+ hBuf.acquireFence.setTo(acquireFence, /*shouldOwn*/true);
+ hBuf.releaseFence = nullptr;
+
+ res = states.bufferRecordsIntf.pushInflightRequestBuffer(bufferId, buffer, streamId);
+ if (res != OK) {
+ ALOGE("%s: Can't get register request buffers for stream %d: %s (%d)",
+ __FUNCTION__, streamId, strerror(-res), res);
+ bufRet.val.error(StreamBufferRequestError::UNKNOWN_ERROR);
+ currentReqSucceeds = false;
+ break;
+ }
+ numPushedInflightBuffers++;
+ }
+ if (currentReqSucceeds) {
+ bufRet.val.buffers(std::move(tmpRetBuffers));
+ oneReqSucceeds = true;
+ } else {
+ allReqsSucceeds = false;
+ for (size_t b = 0; b < numPushedInflightBuffers; b++) {
+ StreamBuffer& hBuf = tmpRetBuffers[b];
+ buffer_handle_t* buffer;
+ status_t res = states.bufferRecordsIntf.popInflightRequestBuffer(
+ hBuf.bufferId, &buffer);
+ if (res != OK) {
+ SET_ERR("%s: popInflightRequestBuffer failed for stream %d: %s (%d)",
+ __FUNCTION__, streamId, strerror(-res), res);
+ }
+ }
+ for (size_t b = 0; b < numAllocatedBuffers; b++) {
+ camera3_stream_buffer_t& sb = streamBuffers[b];
+ sb.acquire_fence = -1;
+ sb.status = CAMERA3_BUFFER_STATUS_ERROR;
+ }
+ returnOutputBuffers(states.useHalBufManager, /*listener*/nullptr,
+ streamBuffers.data(), numAllocatedBuffers, 0);
+ }
+ }
+
+ _hidl_cb(allReqsSucceeds ? BufferRequestStatus::OK :
+ oneReqSucceeds ? BufferRequestStatus::FAILED_PARTIAL :
+ BufferRequestStatus::FAILED_UNKNOWN,
+ bufRets);
+ states.reqBufferIntf.endRequestBuffer();
+}
+
+void returnStreamBuffers(ReturnBufferStates& states,
+ const hardware::hidl_vec<hardware::camera::device::V3_2::StreamBuffer>& buffers) {
+ if (!states.useHalBufManager) {
+ ALOGE("%s: Camera %s does not support HAL buffer managerment",
+ __FUNCTION__, states.cameraId.string());
+ return;
+ }
+
+ for (const auto& buf : buffers) {
+ if (buf.bufferId == BUFFER_ID_NO_BUFFER) {
+ ALOGE("%s: cannot return a buffer without bufferId", __FUNCTION__);
+ continue;
+ }
+
+ buffer_handle_t* buffer;
+ status_t res = states.bufferRecordsIntf.popInflightRequestBuffer(buf.bufferId, &buffer);
+
+ if (res != OK) {
+ ALOGE("%s: cannot find in-flight buffer %" PRIu64 " for stream %d",
+ __FUNCTION__, buf.bufferId, buf.streamId);
+ continue;
+ }
+
+ camera3_stream_buffer_t streamBuffer;
+ streamBuffer.buffer = buffer;
+ streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
+ streamBuffer.acquire_fence = -1;
+ streamBuffer.release_fence = -1;
+
+ if (buf.releaseFence == nullptr) {
+ streamBuffer.release_fence = -1;
+ } else if (buf.releaseFence->numFds == 1) {
+ streamBuffer.release_fence = dup(buf.releaseFence->data[0]);
+ } else {
+ ALOGE("%s: Invalid release fence, fd count is %d, not 1",
+ __FUNCTION__, buf.releaseFence->numFds);
+ continue;
+ }
+
+ sp<Camera3StreamInterface> stream = states.outputStreams.get(buf.streamId);
+ if (stream == nullptr) {
+ ALOGE("%s: Output stream id %d not found!", __FUNCTION__, buf.streamId);
+ continue;
+ }
+ streamBuffer.stream = stream->asHalStream();
+ returnOutputBuffers(states.useHalBufManager, /*listener*/nullptr,
+ &streamBuffer, /*size*/1, /*timestamp*/ 0);
+ }
+}
+
+void flushInflightRequests(FlushInflightReqStates& states) {
+ ATRACE_CALL();
+ { // First return buffers cached in mInFlightMap
+ std::lock_guard<std::mutex> l(states.inflightLock);
+ for (size_t idx = 0; idx < states.inflightMap.size(); idx++) {
+ const InFlightRequest &request = states.inflightMap.valueAt(idx);
+ returnOutputBuffers(
+ states.useHalBufManager, states.listener,
+ request.pendingOutputBuffers.array(),
+ request.pendingOutputBuffers.size(), 0,
+ /*timestampIncreasing*/true, request.outputSurfaces,
+ request.resultExtras);
+ }
+ states.inflightMap.clear();
+ states.inflightIntf.onInflightMapFlushedLocked();
+ }
+
+ // Then return all inflight buffers not returned by HAL
+ std::vector<std::pair<int32_t, int32_t>> inflightKeys;
+ states.flushBufferIntf.getInflightBufferKeys(&inflightKeys);
+
+ // Inflight buffers for HAL buffer manager
+ std::vector<uint64_t> inflightRequestBufferKeys;
+ states.flushBufferIntf.getInflightRequestBufferKeys(&inflightRequestBufferKeys);
+
+ // (streamId, frameNumber, buffer_handle_t*) tuple for all inflight buffers.
+ // frameNumber will be -1 for buffers from HAL buffer manager
+ std::vector<std::tuple<int32_t, int32_t, buffer_handle_t*>> inflightBuffers;
+ inflightBuffers.reserve(inflightKeys.size() + inflightRequestBufferKeys.size());
+
+ for (auto& pair : inflightKeys) {
+ int32_t frameNumber = pair.first;
+ int32_t streamId = pair.second;
+ buffer_handle_t* buffer;
+ status_t res = states.bufferRecordsIntf.popInflightBuffer(frameNumber, streamId, &buffer);
+ if (res != OK) {
+ ALOGE("%s: Frame %d: No in-flight buffer for stream %d",
+ __FUNCTION__, frameNumber, streamId);
+ continue;
+ }
+ inflightBuffers.push_back(std::make_tuple(streamId, frameNumber, buffer));
+ }
+
+ for (auto& bufferId : inflightRequestBufferKeys) {
+ int32_t streamId = -1;
+ buffer_handle_t* buffer = nullptr;
+ status_t res = states.bufferRecordsIntf.popInflightRequestBuffer(
+ bufferId, &buffer, &streamId);
+ if (res != OK) {
+ ALOGE("%s: cannot find in-flight buffer %" PRIu64, __FUNCTION__, bufferId);
+ continue;
+ }
+ inflightBuffers.push_back(std::make_tuple(streamId, /*frameNumber*/-1, buffer));
+ }
+
+ std::vector<sp<Camera3StreamInterface>> streams = states.flushBufferIntf.getAllStreams();
+
+ for (auto& tuple : inflightBuffers) {
+ status_t res = OK;
+ int32_t streamId = std::get<0>(tuple);
+ int32_t frameNumber = std::get<1>(tuple);
+ buffer_handle_t* buffer = std::get<2>(tuple);
+
+ camera3_stream_buffer_t streamBuffer;
+ streamBuffer.buffer = buffer;
+ streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
+ streamBuffer.acquire_fence = -1;
+ streamBuffer.release_fence = -1;
+
+ for (auto& stream : streams) {
+ if (streamId == stream->getId()) {
+ // Return buffer to deleted stream
+ camera3_stream* halStream = stream->asHalStream();
+ streamBuffer.stream = halStream;
+ switch (halStream->stream_type) {
+ case CAMERA3_STREAM_OUTPUT:
+ res = stream->returnBuffer(streamBuffer, /*timestamp*/ 0,
+ /*timestampIncreasing*/true, std::vector<size_t> (), frameNumber);
+ if (res != OK) {
+ ALOGE("%s: Can't return output buffer for frame %d to"
+ " stream %d: %s (%d)", __FUNCTION__,
+ frameNumber, streamId, strerror(-res), res);
+ }
+ break;
+ case CAMERA3_STREAM_INPUT:
+ res = stream->returnInputBuffer(streamBuffer);
+ if (res != OK) {
+ ALOGE("%s: Can't return input buffer for frame %d to"
+ " stream %d: %s (%d)", __FUNCTION__,
+ frameNumber, streamId, strerror(-res), res);
+ }
+ break;
+ default: // Bi-direcitonal stream is deprecated
+ ALOGE("%s: stream %d has unknown stream type %d",
+ __FUNCTION__, streamId, halStream->stream_type);
+ break;
+ }
+ break;
+ }
+ }
+ }
+}
+
+} // camera3
+} // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.h b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
new file mode 100644
index 0000000..47d8095
--- /dev/null
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
@@ -0,0 +1,143 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA3_OUTPUT_UTILS_H
+#define ANDROID_SERVERS_CAMERA3_OUTPUT_UTILS_H
+
+#include <memory>
+#include <mutex>
+
+#include <cutils/native_handle.h>
+
+#include <fmq/MessageQueue.h>
+
+#include <common/CameraDeviceBase.h>
+
+#include "device3/BufferUtils.h"
+#include "device3/DistortionMapper.h"
+#include "device3/ZoomRatioMapper.h"
+#include "device3/InFlightRequest.h"
+#include "device3/Camera3Stream.h"
+#include "device3/Camera3OutputStreamInterface.h"
+#include "utils/TagMonitor.h"
+
+namespace android {
+
+using ResultMetadataQueue = hardware::MessageQueue<uint8_t, hardware::kSynchronizedReadWrite>;
+
+namespace camera3 {
+
+ /**
+ * Helper methods shared between Camera3Device/Camera3OfflineSession for HAL callbacks
+ */
+ // helper function to return the output buffers to output streams.
+ void returnOutputBuffers(
+ bool useHalBufManager,
+ sp<NotificationListener> listener, // Only needed when outputSurfaces is not empty
+ const camera3_stream_buffer_t *outputBuffers,
+ size_t numBuffers, nsecs_t timestamp, bool timestampIncreasing = true,
+ // The following arguments are only meant for surface sharing use case
+ const SurfaceMap& outputSurfaces = SurfaceMap{},
+ // Used to send buffer error callback when failing to return buffer
+ const CaptureResultExtras &resultExtras = CaptureResultExtras{});
+
+ // Camera3Device/Camera3OfflineSession internal states used in notify/processCaptureResult
+ // callbacks
+ struct CaptureOutputStates {
+ const String8& cameraId;
+ std::mutex& inflightLock;
+ InFlightRequestMap& inflightMap; // end of inflightLock scope
+ std::mutex& outputLock;
+ List<CaptureResult>& resultQueue;
+ std::condition_variable& resultSignal;
+ uint32_t& nextShutterFrameNum;
+ uint32_t& nextReprocShutterFrameNum;
+ uint32_t& nextZslShutterFrameNum;
+ uint32_t& nextResultFrameNum;
+ uint32_t& nextReprocResultFrameNum;
+ uint32_t& nextZslResultFrameNum; // end of outputLock scope
+ const bool useHalBufManager;
+ const bool usePartialResult;
+ const bool needFixupMonoChrome;
+ const uint32_t numPartialResults;
+ const metadata_vendor_id_t vendorTagId;
+ const CameraMetadata& deviceInfo;
+ const std::unordered_map<std::string, CameraMetadata>& physicalDeviceInfoMap;
+ std::unique_ptr<ResultMetadataQueue>& fmq;
+ std::unordered_map<std::string, camera3::DistortionMapper>& distortionMappers;
+ std::unordered_map<std::string, camera3::ZoomRatioMapper>& zoomRatioMappers;
+ TagMonitor& tagMonitor;
+ sp<Camera3Stream> inputStream;
+ StreamSet& outputStreams;
+ sp<NotificationListener> listener;
+ SetErrorInterface& setErrIntf;
+ InflightRequestUpdateInterface& inflightIntf;
+ BufferRecordsInterface& bufferRecordsIntf;
+ };
+
+ // Handle one capture result. Assume callers hold the lock to serialize all
+ // processCaptureResult calls
+ void processOneCaptureResultLocked(
+ CaptureOutputStates& states,
+ const hardware::camera::device::V3_2::CaptureResult& result,
+ const hardware::hidl_vec<
+ hardware::camera::device::V3_4::PhysicalCameraMetadata> physicalCameraMetadata);
+
+ // Handle one notify message
+ void notify(CaptureOutputStates& states,
+ const hardware::camera::device::V3_2::NotifyMsg& msg);
+
+ struct RequestBufferStates {
+ const String8& cameraId;
+ std::mutex& reqBufferLock; // lock to serialize request buffer calls
+ const bool useHalBufManager;
+ StreamSet& outputStreams;
+ SetErrorInterface& setErrIntf;
+ BufferRecordsInterface& bufferRecordsIntf;
+ RequestBufferInterface& reqBufferIntf;
+ };
+
+ void requestStreamBuffers(RequestBufferStates& states,
+ const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& bufReqs,
+ hardware::camera::device::V3_5::ICameraDeviceCallback::requestStreamBuffers_cb _hidl_cb);
+
+ struct ReturnBufferStates {
+ const String8& cameraId;
+ const bool useHalBufManager;
+ StreamSet& outputStreams;
+ BufferRecordsInterface& bufferRecordsIntf;
+ };
+
+ void returnStreamBuffers(ReturnBufferStates& states,
+ const hardware::hidl_vec<hardware::camera::device::V3_2::StreamBuffer>& buffers);
+
+ struct FlushInflightReqStates {
+ const String8& cameraId;
+ std::mutex& inflightLock;
+ InFlightRequestMap& inflightMap; // end of inflightLock scope
+ const bool useHalBufManager;
+ sp<NotificationListener> listener;
+ InflightRequestUpdateInterface& inflightIntf;
+ BufferRecordsInterface& bufferRecordsIntf;
+ FlushBufferInterface& flushBufferIntf;
+ };
+
+ void flushInflightRequests(FlushInflightReqStates& states);
+} // namespace camera3
+
+} // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
index b5e37c2..e645e05 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
@@ -65,6 +65,12 @@
const std::vector<size_t> &removedSurfaceIds,
KeyedVector<sp<Surface>, size_t> *outputMap/*out*/);
+ virtual bool getOfflineProcessingSupport() const {
+ // As per Camera spec. shared streams currently do not support
+ // offline mode.
+ return false;
+ }
+
private:
static const size_t kMaxOutputs = 4;
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index f707ef8..7916ddb 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -151,6 +151,14 @@
return mPhysicalCameraId;
}
+void Camera3Stream::setOfflineProcessingSupport(bool support) {
+ mSupportOfflineProcessing = support;
+}
+
+bool Camera3Stream::getOfflineProcessingSupport() const {
+ return mSupportOfflineProcessing;
+}
+
status_t Camera3Stream::forceToIdle() {
ATRACE_CALL();
Mutex::Autolock l(mLock);
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index 805df82..d768d3d 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -167,6 +167,9 @@
android_dataspace getOriginalDataSpace() const;
const String8& physicalCameraId() const;
+ void setOfflineProcessingSupport(bool) override;
+ bool getOfflineProcessingSupport() const override;
+
camera3_stream* asHalStream() override {
return this;
}
@@ -592,6 +595,8 @@
String8 mPhysicalCameraId;
nsecs_t mLastTimestamp;
+
+ bool mSupportOfflineProcessing = false;
}; // class Camera3Stream
}; // namespace camera3
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 73f501a..667e3bb 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -23,6 +23,7 @@
#include "Camera3StreamBufferListener.h"
#include "Camera3StreamBufferFreedListener.h"
+struct camera3_stream;
struct camera3_stream_buffer;
namespace android {
@@ -54,6 +55,7 @@
android_dataspace dataSpace;
uint64_t consumerUsage;
bool finalized = false;
+ bool supportsOffline = false;
OutputStreamInfo() :
width(-1), height(-1), format(-1), dataSpace(HAL_DATASPACE_UNKNOWN),
consumerUsage(0) {}
@@ -99,6 +101,12 @@
virtual android_dataspace getOriginalDataSpace() const = 0;
/**
+ * Offline processing
+ */
+ virtual void setOfflineProcessingSupport(bool support) = 0;
+ virtual bool getOfflineProcessingSupport() const = 0;
+
+ /**
* Get a HAL3 handle for the stream, without starting stream configuration.
*/
virtual camera3_stream* asHalStream() = 0;
diff --git a/services/camera/libcameraservice/device3/DistortionMapper.h b/services/camera/libcameraservice/device3/DistortionMapper.h
index a255003..7dcb67b 100644
--- a/services/camera/libcameraservice/device3/DistortionMapper.h
+++ b/services/camera/libcameraservice/device3/DistortionMapper.h
@@ -36,6 +36,15 @@
public:
DistortionMapper();
+ DistortionMapper(const DistortionMapper& other) :
+ mValidMapping(other.mValidMapping), mValidGrids(other.mValidGrids),
+ mFx(other.mFx), mFy(other.mFy), mCx(other.mCx), mCy(other.mCy), mS(other.mS),
+ mInvFx(other.mInvFx), mInvFy(other.mInvFy), mK(other.mK),
+ mArrayWidth(other.mArrayWidth), mArrayHeight(other.mArrayHeight),
+ mActiveWidth(other.mActiveWidth), mActiveHeight(other.mActiveHeight),
+ mArrayDiffX(other.mArrayDiffX), mArrayDiffY(other.mArrayDiffY),
+ mCorrectedGrid(other.mCorrectedGrid), mDistortedGrid(other.mDistortedGrid) {}
+
/**
* Check whether distortion correction is supported by the camera HAL
*/
@@ -173,7 +182,7 @@
// pre-calculated inverses for speed
float mInvFx, mInvFy;
// radial/tangential distortion parameters
- float mK[5];
+ std::array<float, 5> mK;
// pre-correction active array dimensions
float mArrayWidth, mArrayHeight;
diff --git a/services/camera/libcameraservice/device3/InFlightRequest.h b/services/camera/libcameraservice/device3/InFlightRequest.h
new file mode 100644
index 0000000..ceeaa71
--- /dev/null
+++ b/services/camera/libcameraservice/device3/InFlightRequest.h
@@ -0,0 +1,148 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA3_INFLIGHT_REQUEST_H
+#define ANDROID_SERVERS_CAMERA3_INFLIGHT_REQUEST_H
+
+#include <set>
+
+#include <camera/CaptureResult.h>
+#include <camera/CameraMetadata.h>
+#include <utils/String8.h>
+#include <utils/Timers.h>
+
+#include "hardware/camera3.h"
+
+#include "common/CameraDeviceBase.h"
+
+namespace android {
+
+namespace camera3 {
+
+struct InFlightRequest {
+ // Set by notify() SHUTTER call.
+ nsecs_t shutterTimestamp;
+ // Set by process_capture_result().
+ nsecs_t sensorTimestamp;
+ int requestStatus;
+ // Set by process_capture_result call with valid metadata
+ bool haveResultMetadata;
+ // Decremented by calls to process_capture_result with valid output
+ // and input buffers
+ int numBuffersLeft;
+ CaptureResultExtras resultExtras;
+ // If this request has any input buffer
+ bool hasInputBuffer;
+
+ // The last metadata that framework receives from HAL and
+ // not yet send out because the shutter event hasn't arrived.
+ // It's added by process_capture_result and sent when framework
+ // receives the shutter event.
+ CameraMetadata pendingMetadata;
+
+ // The metadata of the partial results that framework receives from HAL so far
+ // and has sent out.
+ CameraMetadata collectedPartialResult;
+
+ // Buffers are added by process_capture_result when output buffers
+ // return from HAL but framework has not yet received the shutter
+ // event. They will be returned to the streams when framework receives
+ // the shutter event.
+ Vector<camera3_stream_buffer_t> pendingOutputBuffers;
+
+ // Whether this inflight request's shutter and result callback are to be
+ // called. The policy is that if the request is the last one in the constrained
+ // high speed recording request list, this flag will be true. If the request list
+ // is not for constrained high speed recording, this flag will also be true.
+ bool hasCallback;
+
+ // Maximum expected frame duration for this request.
+ // For manual captures, equal to the max of requested exposure time and frame duration
+ // For auto-exposure modes, equal to 1/(lower end of target FPS range)
+ nsecs_t maxExpectedDuration;
+
+ // Whether the result metadata for this request is to be skipped. The
+ // result metadata should be skipped in the case of
+ // REQUEST/RESULT error.
+ bool skipResultMetadata;
+
+ // The physical camera ids being requested.
+ std::set<String8> physicalCameraIds;
+
+ // Map of physicalCameraId <-> Metadata
+ std::vector<PhysicalCaptureResultInfo> physicalMetadatas;
+
+ // Indicates a still capture request.
+ bool stillCapture;
+
+ // Indicates a ZSL capture request
+ bool zslCapture;
+
+ // Requested camera ids (both logical and physical) with zoomRatio != 1.0f
+ std::set<std::string> cameraIdsWithZoom;
+
+ // What shared surfaces an output should go to
+ SurfaceMap outputSurfaces;
+
+ // TODO: dedupe
+ static const nsecs_t kDefaultExpectedDuration = 100000000; // 100 ms
+
+ // Default constructor needed by KeyedVector
+ InFlightRequest() :
+ shutterTimestamp(0),
+ sensorTimestamp(0),
+ requestStatus(OK),
+ haveResultMetadata(false),
+ numBuffersLeft(0),
+ hasInputBuffer(false),
+ hasCallback(true),
+ maxExpectedDuration(kDefaultExpectedDuration),
+ skipResultMetadata(false),
+ stillCapture(false),
+ zslCapture(false) {
+ }
+
+ InFlightRequest(int numBuffers, CaptureResultExtras extras, bool hasInput,
+ bool hasAppCallback, nsecs_t maxDuration,
+ const std::set<String8>& physicalCameraIdSet, bool isStillCapture,
+ bool isZslCapture, const std::set<std::string>& idsWithZoom,
+ const SurfaceMap& outSurfaces = SurfaceMap{}) :
+ shutterTimestamp(0),
+ sensorTimestamp(0),
+ requestStatus(OK),
+ haveResultMetadata(false),
+ numBuffersLeft(numBuffers),
+ resultExtras(extras),
+ hasInputBuffer(hasInput),
+ hasCallback(hasAppCallback),
+ maxExpectedDuration(maxDuration),
+ skipResultMetadata(false),
+ physicalCameraIds(physicalCameraIdSet),
+ stillCapture(isStillCapture),
+ zslCapture(isZslCapture),
+ cameraIdsWithZoom(idsWithZoom),
+ outputSurfaces(outSurfaces) {
+ }
+};
+
+// Map from frame number to the in-flight request state
+typedef KeyedVector<uint32_t, InFlightRequest> InFlightRequestMap;
+
+} // namespace camera3
+
+} // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h b/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h
index 0f6be79..175eb8e 100644
--- a/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h
+++ b/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h
@@ -46,12 +46,18 @@
~H2BCameraServiceListener() { }
virtual ::android::binder::Status onStatusChanged(int32_t status,
- const ::android::String16& cameraId) override;
+ const ::android::String16& cameraId) override;
+ virtual ::android::binder::Status onPhysicalCameraStatusChanged(int32_t /*status*/,
+ const ::android::String16& /*cameraId*/,
+ const ::android::String16& /*physicalCameraId*/) override {
+ // no implementation yet.
+ return binder::Status::ok();
+ }
virtual ::android::binder::Status onTorchStatusChanged(
- int32_t status, const ::android::String16& cameraId) override;
+ int32_t status, const ::android::String16& cameraId) override;
virtual binder::Status onCameraAccessPrioritiesChanged() {
- // TODO: no implementation yet.
+ // TODO: no implementation yet. b/148146086
return binder::Status::ok();
}
};
diff --git a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
index 675ad24..bf89ca5 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
@@ -201,8 +201,9 @@
return HStatus::ILLEGAL_ARGUMENT;
}
+ std::vector<int> offlineStreamIds;
binder::Status ret = mDeviceRemote->endConfigure(convertFromHidl(operatingMode),
- cameraMetadata);
+ cameraMetadata, &offlineStreamIds);
return B2HStatus(ret);
}
diff --git a/services/camera/libcameraservice/tests/Android.mk b/services/camera/libcameraservice/tests/Android.mk
index ec5e876..ea4eb3b 100644
--- a/services/camera/libcameraservice/tests/Android.mk
+++ b/services/camera/libcameraservice/tests/Android.mk
@@ -31,6 +31,7 @@
android.hardware.camera.common@1.0 \
android.hardware.camera.provider@2.4 \
android.hardware.camera.provider@2.5 \
+ android.hardware.camera.provider@2.6 \
android.hardware.camera.device@1.0 \
android.hardware.camera.device@3.2 \
android.hardware.camera.device@3.4
diff --git a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
index 084dc62..a8f6889 100644
--- a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
@@ -236,6 +236,8 @@
void onDeviceStatusChanged(const String8 &,
hardware::camera::common::V1_0::CameraDeviceStatus) override {}
+ void onDeviceStatusChanged(const String8 &, const String8 &,
+ hardware::camera::common::V1_0::CameraDeviceStatus) override {}
void onTorchStatusChanged(const String8 &,
hardware::camera::common::V1_0::TorchModeStatus) override {}
void onNewProviderRegistered() override {}
diff --git a/services/camera/libcameraservice/utils/TagMonitor.cpp b/services/camera/libcameraservice/utils/TagMonitor.cpp
index 4037a66..262f962 100644
--- a/services/camera/libcameraservice/utils/TagMonitor.cpp
+++ b/services/camera/libcameraservice/utils/TagMonitor.cpp
@@ -33,6 +33,16 @@
mVendorTagId(CAMERA_METADATA_INVALID_VENDOR_ID)
{}
+TagMonitor::TagMonitor(const TagMonitor& other):
+ mMonitoringEnabled(other.mMonitoringEnabled.load()),
+ mMonitoredTagList(other.mMonitoredTagList),
+ mLastMonitoredRequestValues(other.mLastMonitoredRequestValues),
+ mLastMonitoredResultValues(other.mLastMonitoredResultValues),
+ mLastMonitoredPhysicalRequestKeys(other.mLastMonitoredPhysicalRequestKeys),
+ mLastMonitoredPhysicalResultKeys(other.mLastMonitoredPhysicalResultKeys),
+ mMonitoringEvents(other.mMonitoringEvents),
+ mVendorTagId(other.mVendorTagId) {}
+
const String16 TagMonitor::kMonitorOption = String16("-m");
const char* TagMonitor::k3aTags =
diff --git a/services/camera/libcameraservice/utils/TagMonitor.h b/services/camera/libcameraservice/utils/TagMonitor.h
index 1b7b033..413f502 100644
--- a/services/camera/libcameraservice/utils/TagMonitor.h
+++ b/services/camera/libcameraservice/utils/TagMonitor.h
@@ -50,6 +50,8 @@
TagMonitor();
+ TagMonitor(const TagMonitor& other);
+
void initialize(metadata_vendor_id_t id) { mVendorTagId = id; }
// Parse tag name list (comma-separated) and if valid, enable monitoring
diff --git a/services/mediatranscoding/MediaTranscodingService.cpp b/services/mediatranscoding/MediaTranscodingService.cpp
index 2680cee..0269896 100644
--- a/services/mediatranscoding/MediaTranscodingService.cpp
+++ b/services/mediatranscoding/MediaTranscodingService.cpp
@@ -25,10 +25,6 @@
namespace android {
-namespace media {
-
-using android::media::MediaTranscodingService;
-
MediaTranscodingService::MediaTranscodingService() {
ALOGV("MediaTranscodingService is created");
}
@@ -56,7 +52,8 @@
Status MediaTranscodingService::registerClient(
const std::shared_ptr<ITranscodingServiceClient>& /*in_client*/,
- int32_t* /*_aidl_return*/) {
+ const std::string& /* in_opPackageName */, int32_t /* in_clientUid */,
+ int32_t /* in_clientPid */, int32_t* /*_aidl_return*/) {
// TODO(hkuang): Add implementation.
return Status::ok();
}
@@ -67,8 +64,9 @@
}
Status MediaTranscodingService::submitRequest(int32_t /*clientId*/,
- const TranscodingRequest& /*request*/,
- TranscodingJob* /*job*/, int32_t* /*_aidl_return*/) {
+ const TranscodingRequestParcel& /*request*/,
+ TranscodingJobParcel* /*job*/,
+ int32_t* /*_aidl_return*/) {
// TODO(hkuang): Add implementation.
return Status::ok();
}
@@ -79,11 +77,11 @@
return Status::ok();
}
-Status MediaTranscodingService::getJobWithId(int32_t /*in_jobId*/, TranscodingJob* /*out_job*/,
+Status MediaTranscodingService::getJobWithId(int32_t /*in_jobId*/,
+ TranscodingJobParcel* /*out_job*/,
bool* /*_aidl_return*/) {
// TODO(hkuang): Add implementation.
return Status::ok();
}
-} // namespace media
} // namespace android
diff --git a/services/mediatranscoding/MediaTranscodingService.h b/services/mediatranscoding/MediaTranscodingService.h
index c2c30b9..d225f9a 100644
--- a/services/mediatranscoding/MediaTranscodingService.h
+++ b/services/mediatranscoding/MediaTranscodingService.h
@@ -22,13 +22,11 @@
namespace android {
-namespace media {
-
using Status = ::ndk::ScopedAStatus;
using ::aidl::android::media::BnMediaTranscodingService;
using ::aidl::android::media::ITranscodingServiceClient;
-using ::aidl::android::media::TranscodingJob;
-using ::aidl::android::media::TranscodingRequest;
+using ::aidl::android::media::TranscodingJobParcel;
+using ::aidl::android::media::TranscodingRequestParcel;
class MediaTranscodingService : public BnMediaTranscodingService {
public:
@@ -40,23 +38,24 @@
static const char* getServiceName() { return "media.transcoding"; }
Status registerClient(const std::shared_ptr<ITranscodingServiceClient>& in_client,
- int32_t* _aidl_return) override;
+ const std::string& in_opPackageName, int32_t in_clientUid,
+ int32_t in_clientPid, int32_t* _aidl_return) override;
Status unregisterClient(int32_t clientId, bool* _aidl_return) override;
- Status submitRequest(int32_t in_clientId, const TranscodingRequest& in_request,
- TranscodingJob* out_job, int32_t* _aidl_return) override;
+ Status submitRequest(int32_t in_clientId, const TranscodingRequestParcel& in_request,
+ TranscodingJobParcel* out_job, int32_t* _aidl_return) override;
Status cancelJob(int32_t in_clientId, int32_t in_jobId, bool* _aidl_return) override;
- Status getJobWithId(int32_t in_jobId, TranscodingJob* out_job, bool* _aidl_return) override;
+ Status getJobWithId(int32_t in_jobId, TranscodingJobParcel* out_job,
+ bool* _aidl_return) override;
virtual inline binder_status_t dump(int /*fd*/, const char** /*args*/, uint32_t /*numArgs*/);
private:
};
-} // namespace media
} // namespace android
#endif // ANDROID_MEDIA_TRANSCODING_SERVICE_H
diff --git a/services/mediatranscoding/main_mediatranscodingservice.cpp b/services/mediatranscoding/main_mediatranscodingservice.cpp
index 1978eb4..7d862e6 100644
--- a/services/mediatranscoding/main_mediatranscodingservice.cpp
+++ b/services/mediatranscoding/main_mediatranscodingservice.cpp
@@ -29,7 +29,7 @@
strcpy(argv[0], "media.transcoding");
sp<ProcessState> proc(ProcessState::self());
sp<IServiceManager> sm = defaultServiceManager();
- android::media::MediaTranscodingService::instantiate();
+ android::MediaTranscodingService::instantiate();
ProcessState::self()->startThreadPool();
IPCThreadState::self()->joinThreadPool();
diff --git a/services/oboeservice/AAudioEndpointManager.cpp b/services/oboeservice/AAudioEndpointManager.cpp
index a1fc0ea..82cc90e 100644
--- a/services/oboeservice/AAudioEndpointManager.cpp
+++ b/services/oboeservice/AAudioEndpointManager.cpp
@@ -284,7 +284,7 @@
serviceEndpoint->close();
mSharedCloseCount++;
- ALOGV("%s() %p for device %d",
+ ALOGV("%s(%p) closed for device %d",
__func__, serviceEndpoint.get(), serviceEndpoint->getDeviceId());
}
}
diff --git a/services/oboeservice/AAudioServiceEndpointShared.cpp b/services/oboeservice/AAudioServiceEndpointShared.cpp
index 0a415fd..9b3b3b8 100644
--- a/services/oboeservice/AAudioServiceEndpointShared.cpp
+++ b/services/oboeservice/AAudioServiceEndpointShared.cpp
@@ -85,7 +85,7 @@
}
aaudio_result_t AAudioServiceEndpointShared::close() {
- return getStreamInternal()->close();
+ return getStreamInternal()->releaseCloseFinal();
}
// Glue between C and C++ callbacks.
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index 880a3d7..39e90b1 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -56,7 +56,8 @@
LOG_ALWAYS_FATAL_IF(!(getState() == AAUDIO_STREAM_STATE_CLOSED
|| getState() == AAUDIO_STREAM_STATE_UNINITIALIZED
|| getState() == AAUDIO_STREAM_STATE_DISCONNECTED),
- "service stream still open, state = %d", getState());
+ "service stream %p still open, state = %d",
+ this, getState());
}
std::string AAudioServiceStreamBase::dumpHeader() {
@@ -126,13 +127,13 @@
}
aaudio_result_t AAudioServiceStreamBase::close() {
- aaudio_result_t result = AAUDIO_OK;
if (getState() == AAUDIO_STREAM_STATE_CLOSED) {
return AAUDIO_OK;
}
stop();
+ aaudio_result_t result = AAUDIO_OK;
sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
if (endpoint == nullptr) {
result = AAUDIO_ERROR_INVALID_STATE;
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.cpp b/services/oboeservice/AAudioServiceStreamMMAP.cpp
index 837b080..f4e72b7 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.cpp
+++ b/services/oboeservice/AAudioServiceStreamMMAP.cpp
@@ -49,16 +49,6 @@
, mInService(inService) {
}
-aaudio_result_t AAudioServiceStreamMMAP::close() {
- if (getState() == AAUDIO_STREAM_STATE_CLOSED) {
- return AAUDIO_OK;
- }
-
- stop();
-
- return AAudioServiceStreamBase::close();
-}
-
// Open stream on HAL and pass information about the shared memory buffer back to the client.
aaudio_result_t AAudioServiceStreamMMAP::open(const aaudio::AAudioStreamRequest &request) {
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.h b/services/oboeservice/AAudioServiceStreamMMAP.h
index 1509f7d..3d56623 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.h
+++ b/services/oboeservice/AAudioServiceStreamMMAP.h
@@ -67,8 +67,6 @@
aaudio_result_t stopClient(audio_port_handle_t clientHandle) override;
- aaudio_result_t close() override;
-
const char *getTypeText() const override { return "MMAP"; }
protected: